diff --git a/.gitignore b/.gitignore index 264d7caf87d2..d816f14d803d 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,7 @@ lucene/**/*.iml parent.iml *.ipr *.iws +/*.iml /.project /.classpath /.settings diff --git a/build.gradle b/build.gradle index 58afa841dbc9..5fc36093668a 100644 --- a/build.gradle +++ b/build.gradle @@ -21,7 +21,7 @@ import java.time.format.DateTimeFormatter plugins { id "base" id "com.palantir.consistent-versions" version "1.14.0" - id 'de.thetaphi.forbiddenapis' version '3.0' apply false + id 'de.thetaphi.forbiddenapis' version '3.0.1' apply false id "org.owasp.dependencycheck" version "5.3.0" id "de.undercouch.download" version "4.0.2" apply false } @@ -29,6 +29,9 @@ plugins { // Project version. version = "9.0.0-SNAPSHOT" +// General metadata. +description = 'Grandparent project for Apache Lucene Core and Apache Solr' + // Propagate version and derived properties across projects. allprojects { version = rootProject.version @@ -43,6 +46,16 @@ ext { } return m[0][1] }() + // "majorVersion" is an integer with just the major version. Compute it. + majorVersion = { + def m = (version =~ /^(\d+)\.\d+\.\d+(-(.+))?/) + if (!m) { + throw GradleException("Can't strip version to just major version: " + rootProject.version) + } + return m[0][1] as int + } + // snapshot build marker used in scripts. + snapshotBuild = version.contains("SNAPSHOT") // Build timestamp. def tstamp = ZonedDateTime.now() @@ -58,6 +71,7 @@ ext { "javacc": "5.0", "jflex": "1.7.0", "jgit": "5.3.0.201903130848-r", + "flexmark": "0.61.24", ] } @@ -77,7 +91,6 @@ apply from: file('gradle/ant-compat/folder-layout.gradle') // (java, tests) apply from: file('gradle/defaults.gradle') apply from: file('gradle/defaults-java.gradle') -apply from: file('gradle/render-javadoc.gradle') apply from: file('gradle/testing/defaults-tests.gradle') apply from: file('gradle/testing/randomization.gradle') apply from: file('gradle/testing/fail-on-no-tests.gradle') @@ -104,6 +117,7 @@ apply from: file('gradle/validation/ecj-lint.gradle') apply from: file('gradle/validation/gradlew-scripts-tweaked.gradle') apply from: file('gradle/validation/missing-docs-check.gradle') apply from: file('gradle/validation/validate-log-calls.gradle') +apply from: file('gradle/validation/check-broken-links.gradle') // Source or data regeneration tasks apply from: file('gradle/generation/jflex.gradle') @@ -134,3 +148,5 @@ apply from: file('gradle/ant-compat/forbidden-api-rules-in-sync.gradle') apply from: file('gradle/documentation/documentation.gradle') apply from: file('gradle/documentation/changes-to-html.gradle') +apply from: file('gradle/documentation/markdown.gradle') +apply from: file('gradle/render-javadoc.gradle') diff --git a/dev-tools/doap/lucene.rdf b/dev-tools/doap/lucene.rdf index 378562c88ff0..1cdf5521900c 100644 --- a/dev-tools/doap/lucene.rdf +++ b/dev-tools/doap/lucene.rdf @@ -67,6 +67,13 @@ + + + lucene-8.5.2 + 2020-05-26 + 8.5.2 + + lucene-8.5.1 @@ -137,6 +144,13 @@ 8.0.0 + + + lucene-7.7.3 + 2020-04-28 + 7.7.3 + + lucene-7.7.2 diff --git a/dev-tools/doap/solr.rdf b/dev-tools/doap/solr.rdf index e25a578631fb..d3e097f98549 100644 --- a/dev-tools/doap/solr.rdf +++ b/dev-tools/doap/solr.rdf @@ -67,6 +67,13 @@ + + + solr-8.5.2 + 2020-05-26 + 8.5.2 + + solr-8.5.1 @@ -137,6 +144,13 @@ 8.0.0 + + + solr-7.7.3 + 2020-04-28 + 7.7.3 + + solr-7.7.2 diff --git a/dev-tools/maven/pom.xml.template b/dev-tools/maven/pom.xml.template index b5bd2df1144f..b5e98b190ec1 100644 --- a/dev-tools/maven/pom.xml.template +++ b/dev-tools/maven/pom.xml.template @@ -159,7 +159,7 @@ de.thetaphi forbiddenapis - 2.7 + 3.0.1 + + + + + +Lucene 6.0 file format. + + diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java similarity index 63% rename from lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java rename to lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java index ed5577011564..ab54012eb20c 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java +++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java @@ -24,9 +24,9 @@ import org.apache.lucene.codecs.SegmentInfoFormat; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFileNames; -import org.apache.lucene.index.IndexWriter; // javadocs -import org.apache.lucene.index.SegmentInfo; // javadocs -import org.apache.lucene.index.SegmentInfos; // javadocs +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSelector; @@ -34,10 +34,9 @@ import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.store.ChecksumIndexInput; -import org.apache.lucene.store.DataOutput; // javadocs +import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; -import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.Version; /** @@ -271,164 +270,7 @@ public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOConte @Override public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException { - final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene70SegmentInfoFormat.SI_EXTENSION); - - try (IndexOutput output = dir.createOutput(fileName, ioContext)) { - // Only add the file once we've successfully created it, else IFD assert can trip: - si.addFile(fileName); - CodecUtil.writeIndexHeader(output, - Lucene70SegmentInfoFormat.CODEC_NAME, - Lucene70SegmentInfoFormat.VERSION_CURRENT, - si.getId(), - ""); - Version version = si.getVersion(); - if (version.major < 7) { - throw new IllegalArgumentException("invalid major version: should be >= 7 but got: " + version.major + " segment=" + si); - } - // Write the Lucene version that created this segment, since 3.1 - output.writeInt(version.major); - output.writeInt(version.minor); - output.writeInt(version.bugfix); - - // Write the min Lucene version that contributed docs to the segment, since 7.0 - if (si.getMinVersion() != null) { - output.writeByte((byte) 1); - Version minVersion = si.getMinVersion(); - output.writeInt(minVersion.major); - output.writeInt(minVersion.minor); - output.writeInt(minVersion.bugfix); - } else { - output.writeByte((byte) 0); - } - - assert version.prerelease == 0; - output.writeInt(si.maxDoc()); - - output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); - output.writeMapOfStrings(si.getDiagnostics()); - Set files = si.files(); - for (String file : files) { - if (!IndexFileNames.parseSegmentName(file).equals(si.name)) { - throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files); - } - } - output.writeSetOfStrings(files); - output.writeMapOfStrings(si.getAttributes()); - - Sort indexSort = si.getIndexSort(); - int numSortFields = indexSort == null ? 0 : indexSort.getSort().length; - output.writeVInt(numSortFields); - for (int i = 0; i < numSortFields; ++i) { - SortField sortField = indexSort.getSort()[i]; - SortField.Type sortType = sortField.getType(); - output.writeString(sortField.getField()); - int sortTypeID; - switch (sortField.getType()) { - case STRING: - sortTypeID = 0; - break; - case LONG: - sortTypeID = 1; - break; - case INT: - sortTypeID = 2; - break; - case DOUBLE: - sortTypeID = 3; - break; - case FLOAT: - sortTypeID = 4; - break; - case CUSTOM: - if (sortField instanceof SortedSetSortField) { - sortTypeID = 5; - sortType = SortField.Type.STRING; - } else if (sortField instanceof SortedNumericSortField) { - sortTypeID = 6; - sortType = ((SortedNumericSortField) sortField).getNumericType(); - } else { - throw new IllegalStateException("Unexpected SortedNumericSortField " + sortField); - } - break; - default: - throw new IllegalStateException("Unexpected sort type: " + sortField.getType()); - } - output.writeVInt(sortTypeID); - if (sortTypeID == 5) { - SortedSetSortField ssf = (SortedSetSortField) sortField; - if (ssf.getSelector() == SortedSetSelector.Type.MIN) { - output.writeByte((byte) 0); - } else if (ssf.getSelector() == SortedSetSelector.Type.MAX) { - output.writeByte((byte) 1); - } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MIN) { - output.writeByte((byte) 2); - } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MAX) { - output.writeByte((byte) 3); - } else { - throw new IllegalStateException("Unexpected SortedSetSelector type: " + ssf.getSelector()); - } - } else if (sortTypeID == 6) { - SortedNumericSortField snsf = (SortedNumericSortField) sortField; - if (snsf.getNumericType() == SortField.Type.LONG) { - output.writeByte((byte) 0); - } else if (snsf.getNumericType() == SortField.Type.INT) { - output.writeByte((byte) 1); - } else if (snsf.getNumericType() == SortField.Type.DOUBLE) { - output.writeByte((byte) 2); - } else if (snsf.getNumericType() == SortField.Type.FLOAT) { - output.writeByte((byte) 3); - } else { - throw new IllegalStateException("Unexpected SortedNumericSelector type: " + snsf.getNumericType()); - } - if (snsf.getSelector() == SortedNumericSelector.Type.MIN) { - output.writeByte((byte) 0); - } else if (snsf.getSelector() == SortedNumericSelector.Type.MAX) { - output.writeByte((byte) 1); - } else { - throw new IllegalStateException("Unexpected sorted numeric selector type: " + snsf.getSelector()); - } - } - output.writeByte((byte) (sortField.getReverse() ? 0 : 1)); - - // write missing value - Object missingValue = sortField.getMissingValue(); - if (missingValue == null) { - output.writeByte((byte) 0); - } else { - switch(sortType) { - case STRING: - if (missingValue == SortField.STRING_LAST) { - output.writeByte((byte) 1); - } else if (missingValue == SortField.STRING_FIRST) { - output.writeByte((byte) 2); - } else { - throw new AssertionError("unrecognized missing value for STRING field \"" + sortField.getField() + "\": " + missingValue); - } - break; - case LONG: - output.writeByte((byte) 1); - output.writeLong(((Long) missingValue).longValue()); - break; - case INT: - output.writeByte((byte) 1); - output.writeInt(((Integer) missingValue).intValue()); - break; - case DOUBLE: - output.writeByte((byte) 1); - output.writeLong(Double.doubleToLongBits(((Double) missingValue).doubleValue())); - break; - case FLOAT: - output.writeByte((byte) 1); - output.writeInt(Float.floatToIntBits(((Float) missingValue).floatValue())); - break; - default: - throw new IllegalStateException("Unexpected sort type: " + sortField.getType()); - } - } - } - - CodecUtil.writeFooter(output); - } + throw new UnsupportedOperationException("Old formats can't be used for writing"); } /** File extension used to store {@link SegmentInfo}. */ diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/package-info.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/package-info.java similarity index 96% rename from lucene/core/src/java/org/apache/lucene/codecs/lucene70/package-info.java rename to lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/package-info.java index e1913a0c4965..6bbf70c31d8e 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/package-info.java +++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/package-info.java @@ -16,7 +16,7 @@ */ /** - * Components from the Lucene 7.0 index format. See {@link org.apache.lucene.codecs.lucene80} + * Components from the Lucene 7.0 index format. See {@link org.apache.lucene.codecs.lucene86} * for an overview of the current index format. */ package org.apache.lucene.codecs.lucene70; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java similarity index 94% rename from lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java rename to lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java index e3f061ad27c0..bef563301bab 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java +++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java @@ -60,31 +60,31 @@ public class Lucene84Codec extends Codec { private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat(); private final CompoundFormat compoundFormat = new Lucene50CompoundFormat(); private final PostingsFormat defaultFormat; - + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { @Override public PostingsFormat getPostingsFormatForField(String field) { return Lucene84Codec.this.getPostingsFormatForField(field); } }; - + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { @Override public DocValuesFormat getDocValuesFormatForField(String field) { return Lucene84Codec.this.getDocValuesFormatForField(field); } }; - + private final StoredFieldsFormat storedFieldsFormat; - /** + /** * Instantiates a new codec. */ public Lucene84Codec() { this(Mode.BEST_SPEED); } - - /** + + /** * Instantiates a new codec, specifying the stored fields compression * mode to use. * @param mode stored fields compression mode to use for newly @@ -95,12 +95,12 @@ public Lucene84Codec(Mode mode) { this.storedFieldsFormat = new Lucene50StoredFieldsFormat(Objects.requireNonNull(mode)); this.defaultFormat = new Lucene84PostingsFormat(); } - + @Override public final StoredFieldsFormat storedFieldsFormat() { return storedFieldsFormat; } - + @Override public final TermVectorsFormat termVectorsFormat() { return vectorsFormat; @@ -110,17 +110,17 @@ public final TermVectorsFormat termVectorsFormat() { public final PostingsFormat postingsFormat() { return postingsFormat; } - + @Override public final FieldInfosFormat fieldInfosFormat() { return fieldInfosFormat; } - + @Override - public final SegmentInfoFormat segmentInfoFormat() { + public SegmentInfoFormat segmentInfoFormat() { return segmentInfosFormat; } - + @Override public final LiveDocsFormat liveDocsFormat() { return liveDocsFormat; @@ -132,36 +132,36 @@ public final CompoundFormat compoundFormat() { } @Override - public final PointsFormat pointsFormat() { + public PointsFormat pointsFormat() { return new Lucene60PointsFormat(); } - /** Returns the postings format that should be used for writing + /** Returns the postings format that should be used for writing * new segments of field. - * + * * The default implementation always returns "Lucene84". *

- * WARNING: if you subclass, you are responsible for index - * backwards compatibility: future version of Lucene are only - * guaranteed to be able to read the default implementation. + * WARNING: if you subclass, you are responsible for index + * backwards compatibility: future version of Lucene are only + * guaranteed to be able to read the default implementation. */ public PostingsFormat getPostingsFormatForField(String field) { return defaultFormat; } - - /** Returns the docvalues format that should be used for writing + + /** Returns the docvalues format that should be used for writing * new segments of field. - * + * * The default implementation always returns "Lucene80". *

- * WARNING: if you subclass, you are responsible for index - * backwards compatibility: future version of Lucene are only - * guaranteed to be able to read the default implementation. + * WARNING: if you subclass, you are responsible for index + * backwards compatibility: future version of Lucene are only + * guaranteed to be able to read the default implementation. */ public DocValuesFormat getDocValuesFormatForField(String field) { return defaultDVFormat; } - + @Override public final DocValuesFormat docValuesFormat() { return docValuesFormat; diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/package.html b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/package.html new file mode 100644 index 000000000000..d0ba893dfad3 --- /dev/null +++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/package.html @@ -0,0 +1,25 @@ + + + + + + + +Lucene 8.4 file format. + + diff --git a/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec b/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec index a818e355d1c4..cf7a945e1338 100644 --- a/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -14,3 +14,4 @@ # limitations under the License. org.apache.lucene.codecs.lucene80.Lucene80Codec +org.apache.lucene.codecs.lucene84.Lucene84Codec diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointsWriter.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene60/Lucene60PointsWriter.java similarity index 94% rename from lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointsWriter.java rename to lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene60/Lucene60PointsWriter.java index c73a9b18f59a..06e965368d89 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointsWriter.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene60/Lucene60PointsWriter.java @@ -101,9 +101,10 @@ public void writeField(FieldInfo fieldInfo, PointsReader reader) throws IOExcept values.size())) { if (values instanceof MutablePointValues) { - final long fp = writer.writeField(dataOut, fieldInfo.name, (MutablePointValues) values); - if (fp != -1) { - indexFPs.put(fieldInfo.name, fp); + Runnable finalizer = writer.writeField(dataOut, dataOut, dataOut, fieldInfo.name, (MutablePointValues) values); + if (finalizer != null) { + indexFPs.put(fieldInfo.name, dataOut.getFilePointer()); + finalizer.run(); } return; } @@ -125,8 +126,10 @@ public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { }); // We could have 0 points on merge since all docs with dimensional fields may be deleted: - if (writer.getPointCount() > 0) { - indexFPs.put(fieldInfo.name, writer.finish(dataOut)); + Runnable finalizer = writer.finish(dataOut, dataOut, dataOut); + if (finalizer != null) { + indexFPs.put(fieldInfo.name, dataOut.getFilePointer()); + finalizer.run(); } } } @@ -210,9 +213,10 @@ public void merge(MergeState mergeState) throws IOException { } } - long fp = writer.merge(dataOut, docMaps, bkdReaders); - if (fp != -1) { - indexFPs.put(fieldInfo.name, fp); + Runnable finalizer = writer.merge(dataOut, dataOut, dataOut, docMaps, bkdReaders); + if (finalizer != null) { + indexFPs.put(fieldInfo.name, dataOut.getFilePointer()); + finalizer.run(); } } } else { diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene60/Lucene60RWPointsFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene60/Lucene60RWPointsFormat.java new file mode 100644 index 000000000000..6f5127f070b6 --- /dev/null +++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene60/Lucene60RWPointsFormat.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.codecs.lucene60; + +import java.io.IOException; + +import org.apache.lucene.codecs.PointsWriter; +import org.apache.lucene.index.SegmentWriteState; + +/** RW variant of Lucene60PointsFormat */ +public class Lucene60RWPointsFormat extends Lucene60PointsFormat { + + /** Sole constructor. */ + public Lucene60RWPointsFormat() {} + + @Override + public PointsWriter fieldsWriter(SegmentWriteState state) throws IOException { + return new Lucene60PointsWriter(state); + } + +} diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java similarity index 85% rename from lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java rename to lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java index 4487ed012b6c..f6130bddff6a 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java @@ -21,10 +21,7 @@ import java.util.Arrays; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.PointsFormat; -import org.apache.lucene.codecs.PointsReader; -import org.apache.lucene.codecs.PointsWriter; +import org.apache.lucene.codecs.lucene84.Lucene84RWCodec; import org.apache.lucene.document.BinaryPoint; import org.apache.lucene.document.Document; import org.apache.lucene.index.BasePointsFormatTestCase; @@ -35,8 +32,6 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.MockRandomMergePolicy; import org.apache.lucene.index.PointValues; -import org.apache.lucene.index.SegmentReadState; -import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.PointValues.IntersectVisitor; import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.store.Directory; @@ -51,38 +46,8 @@ public class TestLucene60PointsFormat extends BasePointsFormatTestCase { private final int maxPointsInLeafNode; public TestLucene60PointsFormat() { - // standard issue - Codec defaultCodec = TestUtil.getDefaultCodec(); - if (random().nextBoolean()) { - // randomize parameters - maxPointsInLeafNode = TestUtil.nextInt(random(), 50, 500); - double maxMBSortInHeap = 3.0 + (3*random().nextDouble()); - if (VERBOSE) { - System.out.println("TEST: using Lucene60PointsFormat with maxPointsInLeafNode=" + maxPointsInLeafNode + " and maxMBSortInHeap=" + maxMBSortInHeap); - } - - // sneaky impersonation! - codec = new FilterCodec(defaultCodec.getName(), defaultCodec) { - @Override - public PointsFormat pointsFormat() { - return new PointsFormat() { - @Override - public PointsWriter fieldsWriter(SegmentWriteState writeState) throws IOException { - return new Lucene60PointsWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap); - } - - @Override - public PointsReader fieldsReader(SegmentReadState readState) throws IOException { - return new Lucene60PointsReader(readState); - } - }; - } - }; - } else { - // standard issue - codec = defaultCodec; - maxPointsInLeafNode = BKDWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE; - } + codec = new Lucene84RWCodec(); + maxPointsInLeafNode = BKDWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE; } @Override @@ -90,12 +55,6 @@ protected Codec getCodec() { return codec; } - @Override - public void testMergeStability() throws Exception { - assumeFalse("TODO: mess with the parameters and test gets angry!", codec instanceof FilterCodec); - super.testMergeStability(); - } - public void testEstimatePointCount() throws IOException { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(); @@ -239,12 +198,6 @@ public void testEstimatePointCount2Dims() throws IOException { final LeafReader lr = getOnlyLeafReader(r); PointValues points = lr.getPointValues("f"); - // With >1 dims, the tree is balanced - long actualMaxPointsInLeafNode = points.size(); - while (actualMaxPointsInLeafNode > maxPointsInLeafNode) { - actualMaxPointsInLeafNode = (actualMaxPointsInLeafNode + 1) / 2; - } - IntersectVisitor allPointsVisitor = new IntersectVisitor() { @Override public void visit(int docID, byte[] packedValue) throws IOException {} @@ -259,9 +212,9 @@ public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { }; // If all points match, then the point count is numLeaves * maxPointsInLeafNode - final int numLeaves = (int) Math.max(Long.highestOneBit( ((points.size() - 1) / actualMaxPointsInLeafNode)) << 1, 1); + final int numLeaves = (int) Math.ceil((double) points.size() / maxPointsInLeafNode); - assertEquals(numLeaves * actualMaxPointsInLeafNode, points.estimatePointCount(allPointsVisitor)); + assertEquals(numLeaves * maxPointsInLeafNode, points.estimatePointCount(allPointsVisitor)); assertEquals(numDocs, points.estimateDocCount(allPointsVisitor)); IntersectVisitor noPointsVisitor = new IntersectVisitor() { @@ -302,7 +255,7 @@ public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { final long pointCount = points.estimatePointCount(onePointMatchVisitor); // The number of matches needs to be multiple of count per leaf - final long countPerLeaf = (actualMaxPointsInLeafNode + 1) / 2; + final long countPerLeaf = (maxPointsInLeafNode + 1) / 2; assertTrue(""+pointCount, pointCount % countPerLeaf == 0); // in extreme cases, a point can be be shared by 4 leaves assertTrue(""+pointCount, pointCount / countPerLeaf <= 4 && pointCount / countPerLeaf >= 1); diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/Lucene70RWSegmentInfoFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/Lucene70RWSegmentInfoFormat.java new file mode 100644 index 000000000000..75f31c294029 --- /dev/null +++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/Lucene70RWSegmentInfoFormat.java @@ -0,0 +1,204 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.codecs.lucene70; + +import java.io.IOException; +import java.util.Set; + +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortedNumericSelector; +import org.apache.lucene.search.SortedNumericSortField; +import org.apache.lucene.search.SortedSetSelector; +import org.apache.lucene.search.SortedSetSortField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.Version; + +/** + * Writable version of Lucene70SegmentInfoFormat for testing + */ +public class Lucene70RWSegmentInfoFormat extends Lucene70SegmentInfoFormat { + + @Override + public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException { + final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene70SegmentInfoFormat.SI_EXTENSION); + + try (IndexOutput output = dir.createOutput(fileName, ioContext)) { + // Only add the file once we've successfully created it, else IFD assert can trip: + si.addFile(fileName); + CodecUtil.writeIndexHeader(output, + Lucene70SegmentInfoFormat.CODEC_NAME, + Lucene70SegmentInfoFormat.VERSION_CURRENT, + si.getId(), + ""); + Version version = si.getVersion(); + if (version.major < 7) { + throw new IllegalArgumentException("invalid major version: should be >= 7 but got: " + version.major + " segment=" + si); + } + // Write the Lucene version that created this segment, since 3.1 + output.writeInt(version.major); + output.writeInt(version.minor); + output.writeInt(version.bugfix); + + // Write the min Lucene version that contributed docs to the segment, since 7.0 + if (si.getMinVersion() != null) { + output.writeByte((byte) 1); + Version minVersion = si.getMinVersion(); + output.writeInt(minVersion.major); + output.writeInt(minVersion.minor); + output.writeInt(minVersion.bugfix); + } else { + output.writeByte((byte) 0); + } + + assert version.prerelease == 0; + output.writeInt(si.maxDoc()); + + output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); + output.writeMapOfStrings(si.getDiagnostics()); + Set files = si.files(); + for (String file : files) { + if (!IndexFileNames.parseSegmentName(file).equals(si.name)) { + throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files); + } + } + output.writeSetOfStrings(files); + output.writeMapOfStrings(si.getAttributes()); + + Sort indexSort = si.getIndexSort(); + int numSortFields = indexSort == null ? 0 : indexSort.getSort().length; + output.writeVInt(numSortFields); + for (int i = 0; i < numSortFields; ++i) { + SortField sortField = indexSort.getSort()[i]; + SortField.Type sortType = sortField.getType(); + output.writeString(sortField.getField()); + int sortTypeID; + switch (sortField.getType()) { + case STRING: + sortTypeID = 0; + break; + case LONG: + sortTypeID = 1; + break; + case INT: + sortTypeID = 2; + break; + case DOUBLE: + sortTypeID = 3; + break; + case FLOAT: + sortTypeID = 4; + break; + case CUSTOM: + if (sortField instanceof SortedSetSortField) { + sortTypeID = 5; + sortType = SortField.Type.STRING; + } else if (sortField instanceof SortedNumericSortField) { + sortTypeID = 6; + sortType = ((SortedNumericSortField) sortField).getNumericType(); + } else { + throw new IllegalStateException("Unexpected SortedNumericSortField " + sortField); + } + break; + default: + throw new IllegalStateException("Unexpected sort type: " + sortField.getType()); + } + output.writeVInt(sortTypeID); + if (sortTypeID == 5) { + SortedSetSortField ssf = (SortedSetSortField) sortField; + if (ssf.getSelector() == SortedSetSelector.Type.MIN) { + output.writeByte((byte) 0); + } else if (ssf.getSelector() == SortedSetSelector.Type.MAX) { + output.writeByte((byte) 1); + } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MIN) { + output.writeByte((byte) 2); + } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MAX) { + output.writeByte((byte) 3); + } else { + throw new IllegalStateException("Unexpected SortedSetSelector type: " + ssf.getSelector()); + } + } else if (sortTypeID == 6) { + SortedNumericSortField snsf = (SortedNumericSortField) sortField; + if (snsf.getNumericType() == SortField.Type.LONG) { + output.writeByte((byte) 0); + } else if (snsf.getNumericType() == SortField.Type.INT) { + output.writeByte((byte) 1); + } else if (snsf.getNumericType() == SortField.Type.DOUBLE) { + output.writeByte((byte) 2); + } else if (snsf.getNumericType() == SortField.Type.FLOAT) { + output.writeByte((byte) 3); + } else { + throw new IllegalStateException("Unexpected SortedNumericSelector type: " + snsf.getNumericType()); + } + if (snsf.getSelector() == SortedNumericSelector.Type.MIN) { + output.writeByte((byte) 0); + } else if (snsf.getSelector() == SortedNumericSelector.Type.MAX) { + output.writeByte((byte) 1); + } else { + throw new IllegalStateException("Unexpected sorted numeric selector type: " + snsf.getSelector()); + } + } + output.writeByte((byte) (sortField.getReverse() ? 0 : 1)); + + // write missing value + Object missingValue = sortField.getMissingValue(); + if (missingValue == null) { + output.writeByte((byte) 0); + } else { + switch(sortType) { + case STRING: + if (missingValue == SortField.STRING_LAST) { + output.writeByte((byte) 1); + } else if (missingValue == SortField.STRING_FIRST) { + output.writeByte((byte) 2); + } else { + throw new AssertionError("unrecognized missing value for STRING field \"" + sortField.getField() + "\": " + missingValue); + } + break; + case LONG: + output.writeByte((byte) 1); + output.writeLong(((Long) missingValue).longValue()); + break; + case INT: + output.writeByte((byte) 1); + output.writeInt(((Integer) missingValue).intValue()); + break; + case DOUBLE: + output.writeByte((byte) 1); + output.writeLong(Double.doubleToLongBits(((Double) missingValue).doubleValue())); + break; + case FLOAT: + output.writeByte((byte) 1); + output.writeInt(Float.floatToIntBits(((Float) missingValue).floatValue())); + break; + default: + throw new IllegalStateException("Unexpected sort type: " + sortField.getType()); + } + } + } + + CodecUtil.writeFooter(output); + } + } + +} diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java similarity index 77% rename from lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java rename to lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java index 3bf6a18c28aa..ac516a121ef0 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java @@ -14,22 +14,29 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.lucene.codecs.lucene70; import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.SegmentInfoFormat; import org.apache.lucene.index.BaseSegmentInfoFormatTestCase; -import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.Version; public class TestLucene70SegmentInfoFormat extends BaseSegmentInfoFormatTestCase { @Override protected Version[] getVersions() { - return new Version[] { Version.LATEST }; + return new Version[] { Version.LUCENE_8_4_0 }; } @Override protected Codec getCodec() { - return TestUtil.getDefaultCodec(); + return new FilterCodec("Lucene84", Codec.forName("Lucene84")) { + @Override + public SegmentInfoFormat segmentInfoFormat() { + return new Lucene70RWSegmentInfoFormat(); + } + }; } } diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene84/Lucene84RWCodec.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene84/Lucene84RWCodec.java new file mode 100644 index 000000000000..c1fd4677f928 --- /dev/null +++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene84/Lucene84RWCodec.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.codecs.lucene84; + +import org.apache.lucene.codecs.PointsFormat; +import org.apache.lucene.codecs.SegmentInfoFormat; +import org.apache.lucene.codecs.lucene60.Lucene60RWPointsFormat; +import org.apache.lucene.codecs.lucene70.Lucene70RWSegmentInfoFormat; + +/** + * RW impersonation of {@link Lucene84Codec}. + */ +public class Lucene84RWCodec extends Lucene84Codec { + + @Override + public PointsFormat pointsFormat() { + return new Lucene60RWPointsFormat(); + } + + @Override + public SegmentInfoFormat segmentInfoFormat() { + return new Lucene70RWSegmentInfoFormat(); + } + +} diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java index 245cef1b2a09..c14919462056 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java @@ -305,7 +305,9 @@ public void testCreateEmptyIndex() throws Exception { "8.5.0-cfs", "8.5.0-nocfs", "8.5.1-cfs", - "8.5.1-nocfs" + "8.5.1-nocfs", + "8.5.2-cfs", + "8.5.2-nocfs" }; public static String[] getOldNames() { @@ -322,7 +324,8 @@ public static String[] getOldNames() { "sorted.8.4.0", "sorted.8.4.1", "sorted.8.5.0", - "sorted.8.5.1" + "sorted.8.5.1", + "sorted.8.5.2" }; public static String[] getOldSortedNames() { @@ -524,7 +527,9 @@ public static String[] getOldSortedNames() { "7.7.1-cfs", "7.7.1-nocfs", "7.7.2-cfs", - "7.7.2-nocfs" + "7.7.2-nocfs", + "7.7.3-cfs", + "7.7.3-nocfs" }; // TODO: on 6.0.0 release, gen the single segment indices and add here: diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-cfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-cfs.zip new file mode 100644 index 000000000000..06ef027031e8 Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-cfs.zip differ diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-nocfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-nocfs.zip new file mode 100644 index 000000000000..dabe2d4ca0b4 Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-nocfs.zip differ diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/sorted.8.5.2.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/sorted.8.5.2.zip new file mode 100644 index 000000000000..738f1db9938b Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/sorted.8.5.2.zip differ diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.7.7.3-cfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.7.7.3-cfs.zip new file mode 100644 index 000000000000..03f5d64bb4af Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.7.7.3-cfs.zip differ diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.7.7.3-nocfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.7.7.3-nocfs.zip new file mode 100644 index 000000000000..94aaa74815c1 Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.7.7.3-nocfs.zip differ diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.sorted.7.7.3.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.sorted.7.7.3.zip new file mode 100644 index 000000000000..3468e8b942ea Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.sorted.7.7.3.zip differ diff --git a/lucene/benchmark/build.gradle b/lucene/benchmark/build.gradle index 52b54d9fe692..4b34ccefdd9c 100644 --- a/lucene/benchmark/build.gradle +++ b/lucene/benchmark/build.gradle @@ -15,11 +15,13 @@ * limitations under the License. */ +apply plugin: 'java' +// NOT a 'java-library'. Maybe 'application' but seems too limiting. -apply plugin: 'java-library' +description = 'System for benchmarking Lucene' dependencies { - api project(':lucene:core') + implementation project(':lucene:core') implementation project(':lucene:analysis:common') implementation project(':lucene:facet') @@ -35,5 +37,120 @@ dependencies { exclude module: "xml-apis" }) + runtimeOnly project(':lucene:analysis:icu') + testImplementation project(':lucene:test-framework') } + +def tempDir = file("temp") +def workDir = file("work") + +task run(type: JavaExec) { + description "Run a perf test (optional: -PtaskAlg=conf/your-algorithm-file -PmaxHeapSize=1G)" + main 'org.apache.lucene.benchmark.byTask.Benchmark' + classpath sourceSets.main.runtimeClasspath + // allow these to be specified on the CLI via -PtaskAlg= for example + args = [propertyOrDefault('taskAlg', 'conf/micro-standard.alg')] + + maxHeapSize = propertyOrDefault('maxHeapSize', '1G') + + String stdOutStr = propertyOrDefault('standardOutput', null) + if (stdOutStr != null) { + standardOutput = new File(stdOutStr).newOutputStream() + } + + debugOptions { + enabled = false + port = 5005 + suspend = true + } +} + +/* Old "collation" Ant target: +gradle getTop100kWikiWordFiles run -PtaskAlg=conf/collation.alg -PstandardOutput=work/collation.benchmark.output.txt +perl -CSD scripts/collation.bm2jira.pl work/collation.benchmark.output.txt + */ + +/* Old "shingle" Ant target: +gradle getReuters run -PtaskAlg=conf/shingle.alg -PstandardOutput=work/shingle.benchmark.output.txt +perl -CSD scripts/shingle.bm2jira.pl work/shingle.benchmark.output.txt + */ + +// The remaining tasks just get / extract / prepare data + +task getEnWiki(type: Download) { + def finalName = "enwiki-20070527-pages-articles.xml" + src "https://home.apache.org/~dsmiley/data/" + finalName + ".bz2" + dest file("$tempDir/" + finalName + ".bz2") + overwrite false + compress false + + doLast { + ant.bunzip2(src: dest, dest: tempDir) + } + outputs.file file("$tempDir/$finalName") +} + +task getGeoNames(type: Download) { + // note: latest data is at: https://download.geonames.org/export/dump/allCountries.zip + // and then randomize with: gsort -R -S 1500M file.txt > file_random.txt + // and then compress with: bzip2 -9 -k file_random.txt + def finalName = "geonames_20130921_randomOrder_allCountries.txt" + src "https://home.apache.org/~dsmiley/data/" + finalName + ".bz2" + dest file("$tempDir/" + finalName + ".bz2") + overwrite false + compress false + + doLast { + ant.bunzip2(src: dest, dest: tempDir) // will chop off .bz2 + } + outputs.file file("$tempDir/$finalName") +} + +task getTop100kWikiWordFiles(type: Download) { + src "https://home.apache.org/~rmuir/wikipedia/top.100k.words.de.en.fr.uk.wikipedia.2009-11.tar.bz2" + dest file("$tempDir/${src.file.split('/').last()}") + overwrite false + compress false + + def finalPath = file("$workDir/top100k-out") + + doLast { + project.sync { + from tarTree(dest) // defined above. Will decompress on the fly + into finalPath + } + } + outputs.dir finalPath +} + +task getReuters(type: Download) { + // note: there is no HTTPS url and we don't care because this is merely test/perf data + src "http://www.daviddlewis.com/resources/testcollections/reuters21578/reuters21578.tar.gz" + dest file("$tempDir/${src.file.split('/').last()}") + overwrite false + compress false + + def untarPath = file("$workDir/reuters") + def finalPath = file("$workDir/reuters-out") + dependsOn sourceSets.main.runtimeClasspath + + doLast { + project.sync { + from(tarTree(dest)) { // defined above. Will decompress on the fly + exclude '*.txt' + } + into untarPath + } + println "Extracting reuters to $finalPath" + finalPath.deleteDir() // necessary + // TODO consider porting ExtractReuters to groovy? + project.javaexec { + main = 'org.apache.lucene.benchmark.utils.ExtractReuters' + classpath = sourceSets.main.runtimeClasspath + maxHeapSize = '1G' + args = [untarPath, finalPath] + } + } + outputs.dir finalPath +} \ No newline at end of file diff --git a/lucene/benchmark/scripts/collation.bm2jira.pl b/lucene/benchmark/scripts/collation.bm2jira.pl index b423f75ee8a8..41f67491bff6 100644 --- a/lucene/benchmark/scripts/collation.bm2jira.pl +++ b/lucene/benchmark/scripts/collation.bm2jira.pl @@ -40,17 +40,17 @@ } # Print out platform info -print "JAVA:\n", `java -version 2>&1`, "\nOS:\n"; -if ($^O =~ /win/i) { - print "$^O\n"; - eval { - require Win32; - print Win32::GetOSName(), "\n", Win32::GetOSVersion(), "\n"; - }; - die "Error loading Win32: $@" if ($@); -} else { - print `uname -a 2>&1`; -} +#print "JAVA:\n", `java -version 2>&1`, "\nOS:\n"; +#if ($^O =~ /win/i) { +# print "$^O\n"; +# eval { +# require Win32; +# print Win32::GetOSName(), "\n", Win32::GetOSVersion(), "\n"; +# }; +# die "Error loading Win32: $@" if ($@); +#} else { +# print `uname -a 2>&1`; +#} print "\n||Language||java.text||ICU4J||KeywordAnalyzer||ICU4J Improvement||\n"; diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java index 55103284d43b..db64781cff70 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java @@ -29,7 +29,7 @@ import org.apache.lucene.benchmark.byTask.utils.Config; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene84.Lucene84Codec; +import org.apache.lucene.codecs.lucene86.Lucene86Codec; import org.apache.lucene.index.ConcurrentMergeScheduler; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexDeletionPolicy; @@ -138,7 +138,7 @@ public static IndexWriterConfig createWriterConfig(Config config, PerfRunData ru if (defaultCodec == null && postingsFormat != null) { try { final PostingsFormat postingsFormatChosen = PostingsFormat.forName(postingsFormat); - iwConf.setCodec(new Lucene84Codec() { + iwConf.setCodec(new Lucene86Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormatChosen; diff --git a/lucene/build.gradle b/lucene/build.gradle index 1efd5f4d6a03..1c71edb190b7 100644 --- a/lucene/build.gradle +++ b/lucene/build.gradle @@ -15,6 +15,8 @@ * limitations under the License. */ +description = 'Parent project for Apache Lucene Core' + subprojects { group "org.apache.lucene" } \ No newline at end of file diff --git a/lucene/classification/build.gradle b/lucene/classification/build.gradle index 19c9ae7cc209..736dfb305a78 100644 --- a/lucene/classification/build.gradle +++ b/lucene/classification/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Classification module for Lucene' + dependencies { api project(':lucene:core') diff --git a/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java b/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java index 3848151c5f87..bdf3ed8b2493 100644 --- a/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java +++ b/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java @@ -36,7 +36,7 @@ /** * Base class for testing {@link org.apache.lucene.classification.Classifier}s */ -public abstract class DocumentClassificationTestBase extends ClassificationTestBase { +public abstract class DocumentClassificationTestBase extends ClassificationTestBase{ protected static final BytesRef VIDEOGAME_RESULT = new BytesRef("videogames"); protected static final BytesRef VIDEOGAME_ANALYZED_RESULT = new BytesRef("videogam"); diff --git a/lucene/codecs/build.gradle b/lucene/codecs/build.gradle index e39f2724af4c..ad26aae2d941 100644 --- a/lucene/codecs/build.gradle +++ b/lucene/codecs/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Lucene codecs and postings formats' + dependencies { implementation project(':lucene:core') testImplementation project(':lucene:test-framework') diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/blockterms/VariableGapTermsIndexReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/blockterms/VariableGapTermsIndexReader.java index f3d373ebc793..a67f2ddc7cec 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/blockterms/VariableGapTermsIndexReader.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/blockterms/VariableGapTermsIndexReader.java @@ -148,7 +148,7 @@ private final class FieldIndexData implements Accountable { public FieldIndexData(IndexInput in, FieldInfo fieldInfo, long indexStart) throws IOException { IndexInput clone = in.clone(); clone.seek(indexStart); - fst = new FST<>(clone, fstOutputs); + fst = new FST<>(clone, clone, fstOutputs); clone.close(); /* diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/blockterms/VariableGapTermsIndexWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/blockterms/VariableGapTermsIndexWriter.java index b8785050b90b..dd327a05f1f8 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/blockterms/VariableGapTermsIndexWriter.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/blockterms/VariableGapTermsIndexWriter.java @@ -280,7 +280,7 @@ public void add(BytesRef text, TermStats stats, long termsFilePointer) throws IO public void finish(long termsFilePointer) throws IOException { fst = fstCompiler.compile(); if (fst != null) { - fst.save(out); + fst.save(out, out); } } } diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsWriter.java index b04977567ad3..e3bd3cad062d 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsWriter.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsWriter.java @@ -832,7 +832,7 @@ public void finish() throws IOException { // Write FST to index indexStartFP = indexOut.getFilePointer(); - root.index.save(indexOut); + root.index.save(indexOut, indexOut); //System.out.println(" write FST " + indexStartFP + " field=" + fieldInfo.name); // if (SAVE_DOT_FILES || DEBUG) { diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsFieldReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsFieldReader.java index 54954e85d3d2..e9772fb6f063 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsFieldReader.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsFieldReader.java @@ -78,7 +78,7 @@ final class OrdsFieldReader extends Terms implements Accountable { final IndexInput clone = indexIn.clone(); //System.out.println("start=" + indexStartFP + " field=" + fieldInfo.name); clone.seek(indexStartFP); - index = new FST<>(clone, OrdsBlockTreeTermsWriter.FST_OUTPUTS); + index = new FST<>(clone, clone, OrdsBlockTreeTermsWriter.FST_OUTPUTS); /* if (true) { diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java index 8c232fa8d486..4cbaffce4894 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java @@ -176,7 +176,7 @@ final class TermsReader extends Terms implements Accountable { this.sumTotalTermFreq = sumTotalTermFreq; this.sumDocFreq = sumDocFreq; this.docCount = docCount; - this.dict = new FST<>(in, new FSTTermOutputs(fieldInfo)); + this.dict = new FST<>(in, in, new FSTTermOutputs(fieldInfo)); } @Override diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java index 751f3097e534..c16c2349b1f5 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java @@ -209,7 +209,7 @@ public void close() throws IOException { } out.writeVLong(field.sumDocFreq); out.writeVInt(field.docCount); - field.dict.save(out); + field.dict.save(out, out); } writeTrailer(out, dirStart); CodecUtil.writeFooter(out); diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java index 5f22f6252c21..2acfe01618d1 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java @@ -29,17 +29,16 @@ import org.apache.lucene.codecs.SegmentInfoFormat; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.IndexSorter; import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SortFieldProvider; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; -import org.apache.lucene.search.SortedNumericSelector; -import org.apache.lucene.search.SortedNumericSortField; -import org.apache.lucene.search.SortedSetSelector; -import org.apache.lucene.search.SortedSetSortField; +import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; -import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; @@ -68,11 +67,9 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat { final static BytesRef SI_FILE = new BytesRef(" file "); final static BytesRef SI_ID = new BytesRef(" id "); final static BytesRef SI_SORT = new BytesRef(" sort "); - final static BytesRef SI_SORT_FIELD = new BytesRef(" field "); final static BytesRef SI_SORT_TYPE = new BytesRef(" type "); - final static BytesRef SI_SELECTOR_TYPE = new BytesRef(" selector "); - final static BytesRef SI_SORT_REVERSE = new BytesRef(" reverse "); - final static BytesRef SI_SORT_MISSING = new BytesRef(" missing "); + final static BytesRef SI_SORT_NAME = new BytesRef(" name "); + final static BytesRef SI_SORT_BYTES = new BytesRef(" bytes "); public static final String SI_EXTENSION = "si"; @@ -171,133 +168,18 @@ public SegmentInfo read(Directory directory, String segmentName, byte[] segmentI SortField[] sortField = new SortField[numSortFields]; for (int i = 0; i < numSortFields; ++i) { SimpleTextUtil.readLine(input, scratch); - assert StringHelper.startsWith(scratch.get(), SI_SORT_FIELD); - final String field = readString(SI_SORT_FIELD.length, scratch); + assert StringHelper.startsWith(scratch.get(), SI_SORT_NAME); + final String provider = readString(SI_SORT_NAME.length, scratch); SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), SI_SORT_TYPE); - final String typeAsString = readString(SI_SORT_TYPE.length, scratch); - - final SortField.Type type; - SortedSetSelector.Type selectorSet = null; - SortedNumericSelector.Type selectorNumeric = null; - switch (typeAsString) { - case "string": - type = SortField.Type.STRING; - break; - case "long": - type = SortField.Type.LONG; - break; - case "int": - type = SortField.Type.INT; - break; - case "double": - type = SortField.Type.DOUBLE; - break; - case "float": - type = SortField.Type.FLOAT; - break; - case "multi_valued_string": - type = SortField.Type.STRING; - selectorSet = readSetSelector(input, scratch); - break; - case "multi_valued_long": - type = SortField.Type.LONG; - selectorNumeric = readNumericSelector(input, scratch); - break; - case "multi_valued_int": - type = SortField.Type.INT; - selectorNumeric = readNumericSelector(input, scratch); - break; - case "multi_valued_double": - type = SortField.Type.DOUBLE; - selectorNumeric = readNumericSelector(input, scratch); - break; - case "multi_valued_float": - type = SortField.Type.FLOAT; - selectorNumeric = readNumericSelector(input, scratch); - break; - default: - throw new CorruptIndexException("unable to parse sort type string: " + typeAsString, input); - } SimpleTextUtil.readLine(input, scratch); - assert StringHelper.startsWith(scratch.get(), SI_SORT_REVERSE); - final boolean reverse = Boolean.parseBoolean(readString(SI_SORT_REVERSE.length, scratch)); - - SimpleTextUtil.readLine(input, scratch); - assert StringHelper.startsWith(scratch.get(), SI_SORT_MISSING); - final String missingLastAsString = readString(SI_SORT_MISSING.length, scratch); - final Object missingValue; - switch (type) { - case STRING: - switch (missingLastAsString) { - case "null": - missingValue = null; - break; - case "first": - missingValue = SortField.STRING_FIRST; - break; - case "last": - missingValue = SortField.STRING_LAST; - break; - default: - throw new CorruptIndexException("unable to parse missing string: " + typeAsString, input); - } - break; - case LONG: - switch (missingLastAsString) { - case "null": - missingValue = null; - break; - default: - missingValue = Long.parseLong(missingLastAsString); - break; - } - break; - case INT: - switch (missingLastAsString) { - case "null": - missingValue = null; - break; - default: - missingValue = Integer.parseInt(missingLastAsString); - break; - } - break; - case DOUBLE: - switch (missingLastAsString) { - case "null": - missingValue = null; - break; - default: - missingValue = Double.parseDouble(missingLastAsString); - break; - } - break; - case FLOAT: - switch (missingLastAsString) { - case "null": - missingValue = null; - break; - default: - missingValue = Float.parseFloat(missingLastAsString); - break; - } - break; - default: - throw new AssertionError(); - } - if (selectorSet != null) { - sortField[i] = new SortedSetSortField(field, reverse); - } else if (selectorNumeric != null) { - sortField[i] = new SortedNumericSortField(field, type, reverse); - } else { - sortField[i] = new SortField(field, type, reverse); - } - if (missingValue != null) { - sortField[i].setMissingValue(missingValue); - } + assert StringHelper.startsWith(scratch.get(), SI_SORT_BYTES); + BytesRef serializedSort = SimpleTextUtil.fromBytesRefString(readString(SI_SORT_BYTES.length, scratch)); + final ByteArrayDataInput bytes = new ByteArrayDataInput(serializedSort.bytes, serializedSort.offset, serializedSort.length); + sortField[i] = SortFieldProvider.forName(provider).readSortField(bytes); + assert bytes.eof(); } Sort indexSort = sortField.length == 0 ? null : new Sort(sortField); @@ -313,38 +195,6 @@ public SegmentInfo read(Directory directory, String segmentName, byte[] segmentI private String readString(int offset, BytesRefBuilder scratch) { return new String(scratch.bytes(), offset, scratch.length()-offset, StandardCharsets.UTF_8); } - - private SortedSetSelector.Type readSetSelector(IndexInput input, BytesRefBuilder scratch) throws IOException { - SimpleTextUtil.readLine(input, scratch); - assert StringHelper.startsWith(scratch.get(), SI_SELECTOR_TYPE); - final String selectorAsString = readString(SI_SELECTOR_TYPE.length, scratch); - switch (selectorAsString) { - case "min": - return SortedSetSelector.Type.MIN; - case "middle_min": - return SortedSetSelector.Type.MIDDLE_MIN; - case "middle_max": - return SortedSetSelector.Type.MIDDLE_MAX; - case "max": - return SortedSetSelector.Type.MAX; - default: - throw new CorruptIndexException("unable to parse SortedSetSelector type: " + selectorAsString, input); - } - } - - private SortedNumericSelector.Type readNumericSelector(IndexInput input, BytesRefBuilder scratch) throws IOException { - SimpleTextUtil.readLine(input, scratch); - assert StringHelper.startsWith(scratch.get(), SI_SELECTOR_TYPE); - final String selectorAsString = readString(SI_SELECTOR_TYPE.length, scratch); - switch (selectorAsString) { - case "min": - return SortedNumericSelector.Type.MIN; - case "max": - return SortedNumericSelector.Type.MAX; - default: - throw new CorruptIndexException("unable to parse SortedNumericSelector type: " + selectorAsString, input); - } - } @Override public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException { @@ -434,120 +284,42 @@ public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOE SimpleTextUtil.writeNewline(output); for (int i = 0; i < numSortFields; ++i) { final SortField sortField = indexSort.getSort()[i]; + IndexSorter sorter = sortField.getIndexSorter(); + if (sorter == null) { + throw new IllegalStateException("Cannot serialize sort " + sortField); + } - SimpleTextUtil.write(output, SI_SORT_FIELD); - SimpleTextUtil.write(output, sortField.getField(), scratch); + SimpleTextUtil.write(output, SI_SORT_NAME); + SimpleTextUtil.write(output, sorter.getProviderName(), scratch); SimpleTextUtil.writeNewline(output); SimpleTextUtil.write(output, SI_SORT_TYPE); - final String sortTypeString; - final SortField.Type sortType; - final boolean multiValued; - if (sortField instanceof SortedSetSortField) { - sortType = SortField.Type.STRING; - multiValued = true; - } else if (sortField instanceof SortedNumericSortField) { - sortType = ((SortedNumericSortField) sortField).getNumericType(); - multiValued = true; - } else { - sortType = sortField.getType(); - multiValued = false; - } - switch (sortType) { - case STRING: - if (multiValued) { - sortTypeString = "multi_valued_string"; - } else { - sortTypeString = "string"; - } - break; - case LONG: - if (multiValued) { - sortTypeString = "multi_valued_long"; - } else { - sortTypeString = "long"; - } - break; - case INT: - if (multiValued) { - sortTypeString = "multi_valued_int"; - } else { - sortTypeString = "int"; - } - break; - case DOUBLE: - if (multiValued) { - sortTypeString = "multi_valued_double"; - } else { - sortTypeString = "double"; - } - break; - case FLOAT: - if (multiValued) { - sortTypeString = "multi_valued_float"; - } else { - sortTypeString = "float"; - } - break; - default: - throw new IllegalStateException("Unexpected sort type: " + sortField.getType()); - } - SimpleTextUtil.write(output, sortTypeString, scratch); - SimpleTextUtil.writeNewline(output); - - if (sortField instanceof SortedSetSortField) { - SortedSetSelector.Type selector = ((SortedSetSortField) sortField).getSelector(); - final String selectorString; - if (selector == SortedSetSelector.Type.MIN) { - selectorString = "min"; - } else if (selector == SortedSetSelector.Type.MIDDLE_MIN) { - selectorString = "middle_min"; - } else if (selector == SortedSetSelector.Type.MIDDLE_MAX) { - selectorString = "middle_max"; - } else if (selector == SortedSetSelector.Type.MAX) { - selectorString = "max"; - } else { - throw new IllegalStateException("Unexpected SortedSetSelector type selector: " + selector); - } - SimpleTextUtil.write(output, SI_SELECTOR_TYPE); - SimpleTextUtil.write(output, selectorString, scratch); - SimpleTextUtil.writeNewline(output); - } else if (sortField instanceof SortedNumericSortField) { - SortedNumericSelector.Type selector = ((SortedNumericSortField) sortField).getSelector(); - final String selectorString; - if (selector == SortedNumericSelector.Type.MIN) { - selectorString = "min"; - } else if (selector == SortedNumericSelector.Type.MAX) { - selectorString = "max"; - } else { - throw new IllegalStateException("Unexpected SortedNumericSelector type selector: " + selector); - } - SimpleTextUtil.write(output, SI_SELECTOR_TYPE); - SimpleTextUtil.write(output, selectorString, scratch); - SimpleTextUtil.writeNewline(output); - } - - SimpleTextUtil.write(output, SI_SORT_REVERSE); - SimpleTextUtil.write(output, Boolean.toString(sortField.getReverse()), scratch); + SimpleTextUtil.write(output, sortField.toString(), scratch); SimpleTextUtil.writeNewline(output); - SimpleTextUtil.write(output, SI_SORT_MISSING); - final Object missingValue = sortField.getMissingValue(); - final String missing; - if (missingValue == null) { - missing = "null"; - } else if (missingValue == SortField.STRING_FIRST) { - missing = "first"; - } else if (missingValue == SortField.STRING_LAST) { - missing = "last"; - } else { - missing = missingValue.toString(); - } - SimpleTextUtil.write(output, missing, scratch); + SimpleTextUtil.write(output, SI_SORT_BYTES); + BytesRefOutput b = new BytesRefOutput(); + SortFieldProvider.write(sortField, b); + SimpleTextUtil.write(output, b.bytes.get().toString(), scratch); SimpleTextUtil.writeNewline(output); } SimpleTextUtil.writeChecksum(output, scratch); } } + + static class BytesRefOutput extends DataOutput { + + final BytesRefBuilder bytes = new BytesRefBuilder(); + + @Override + public void writeByte(byte b) { + bytes.append(b); + } + + @Override + public void writeBytes(byte[] b, int offset, int length) { + bytes.append(b, offset, length); + } + } } diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/FSTDictionary.java b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/FSTDictionary.java index 026e8724f315..191799c252fa 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/FSTDictionary.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/FSTDictionary.java @@ -71,10 +71,10 @@ public long ramBytesUsed() { @Override public void write(DataOutput output, BlockEncoder blockEncoder) throws IOException { if (blockEncoder == null) { - fst.save(output); + fst.save(output, output); } else { ByteBuffersDataOutput bytesDataOutput = ByteBuffersDataOutput.newResettableInstance(); - fst.save(bytesDataOutput); + fst.save(bytesDataOutput, bytesDataOutput); BlockEncoder.WritableBytes encodedBytes = blockEncoder.encode(bytesDataOutput.toDataInput(), bytesDataOutput.size()); output.writeVLong(encodedBytes.size()); encodedBytes.writeTo(output); @@ -98,8 +98,8 @@ protected static FSTDictionary read(DataInput input, BlockDecoder blockDecoder, isFSTOnHeap = true; } PositiveIntOutputs fstOutputs = PositiveIntOutputs.getSingleton(); - FST fst = isFSTOnHeap ? new FST<>(fstDataInput, fstOutputs) - : new FST<>(fstDataInput, fstOutputs, new OffHeapFSTStore()); + FST fst = isFSTOnHeap ? new FST<>(fstDataInput, fstDataInput, fstOutputs) + : new FST<>(fstDataInput, fstDataInput, fstOutputs, new OffHeapFSTStore()); return new FSTDictionary(fst); } diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitPostingsFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitPostingsFormat.java index f982ed3ad2eb..a58a1de7400a 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitPostingsFormat.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitPostingsFormat.java @@ -47,7 +47,9 @@ public class UniformSplitPostingsFormat extends PostingsFormat { */ public static final String TERMS_BLOCKS_EXTENSION = "ustb"; - public static final int VERSION_CURRENT = 0; + public static final int VERSION_START = 0; + public static final int VERSION_ENCODABLE_FIELDS_METADATA = 1; + public static final int VERSION_CURRENT = VERSION_ENCODABLE_FIELDS_METADATA; public static final String NAME = "UniformSplit"; @@ -74,10 +76,10 @@ public UniformSplitPostingsFormat() { * Must be greater than or equal to 0 and strictly less than {@code targetNumBlockLines}. * The block size will be {@code targetNumBlockLines}+-{@code deltaNumLines}. * The block size must always be less than or equal to {@link UniformSplitTermsWriter#MAX_NUM_BLOCK_LINES}. - * @param blockEncoder Optional block encoder, may be null if none. - * It can be used for compression or encryption. - * @param blockDecoder Optional block decoder, may be null if none. - * It can be used for compression or encryption. + * @param blockEncoder Optional block encoder, may be null if none. If present, it is used to encode all terms + * blocks, as well as the FST dictionary and the fields metadata. + * @param blockDecoder Optional block decoder, may be null if none. If present, it is used to decode all terms + * blocks, as well as the FST dictionary and the fields metadata. * @param dictionaryOnHeap Whether to force loading the terms dictionary on-heap. By default it is kept off-heap without * impact on performance. If block encoding/decoding is used, then the dictionary is always * loaded on-heap whatever this parameter value is. diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitTermsReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitTermsReader.java index 9b2552b5017f..377919dc81b7 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitTermsReader.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitTermsReader.java @@ -34,14 +34,14 @@ import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.Terms; +import org.apache.lucene.store.ByteArrayDataInput; +import org.apache.lucene.store.DataInput; import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.RamUsageEstimator; -import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.NAME; -import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.TERMS_BLOCKS_EXTENSION; -import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.TERMS_DICTIONARY_EXTENSION; -import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.VERSION_CURRENT; +import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.*; /** * A block-based terms index and dictionary based on the Uniform Split technique. @@ -51,12 +51,11 @@ */ public class UniformSplitTermsReader extends FieldsProducer { - protected static final int VERSION_START = 0; - private static final long BASE_RAM_USAGE = RamUsageEstimator.shallowSizeOfInstance(UniformSplitTermsReader.class) + RamUsageEstimator.shallowSizeOfInstance(IndexInput.class) * 2; protected final PostingsReaderBase postingsReader; + protected final int version; protected final IndexInput blockInput; protected final IndexInput dictionaryInput; @@ -93,7 +92,7 @@ protected UniformSplitTermsReader(PostingsReaderBase postingsReader, SegmentRead String termsName = IndexFileNames.segmentFileName(segmentName, state.segmentSuffix, termsBlocksExtension); blockInput = state.directory.openInput(termsName, state.context); - int version = CodecUtil.checkIndexHeader(blockInput, codecName, versionStart, + version = CodecUtil.checkIndexHeader(blockInput, codecName, versionStart, versionCurrent, state.segmentInfo.getId(), state.segmentSuffix); String indexName = IndexFileNames.segmentFileName(segmentName, state.segmentSuffix, dictionaryExtension); dictionaryInput = state.directory.openInput(indexName, state.context); @@ -105,7 +104,8 @@ protected UniformSplitTermsReader(PostingsReaderBase postingsReader, SegmentRead CodecUtil.retrieveChecksum(blockInput); seekFieldsMetadata(blockInput); - Collection fieldMetadataCollection = parseFieldsMetadata(blockInput, state.fieldInfos, fieldMetadataReader, state.segmentInfo.maxDoc()); + Collection fieldMetadataCollection = + readFieldsMetadata(blockInput, blockDecoder, state.fieldInfos, fieldMetadataReader, state.segmentInfo.maxDoc()); fieldToTermsMap = new HashMap<>(); this.blockInput = blockInput; @@ -143,16 +143,36 @@ protected IndexDictionary.BrowserSupplier createDictionaryBrowserSupplier(Segmen /** * @param indexInput {@link IndexInput} must be positioned to the fields metadata * details by calling {@link #seekFieldsMetadata(IndexInput)} before this call. + * @param blockDecoder Optional block decoder, may be null if none. */ - protected static Collection parseFieldsMetadata(IndexInput indexInput, FieldInfos fieldInfos, - FieldMetadata.Serializer fieldMetadataReader, int maxNumDocs) throws IOException { + protected Collection readFieldsMetadata(IndexInput indexInput, BlockDecoder blockDecoder, FieldInfos fieldInfos, + FieldMetadata.Serializer fieldMetadataReader, int maxNumDocs) throws IOException { int numFields = indexInput.readVInt(); if (numFields < 0) { throw new CorruptIndexException("Illegal number of fields= " + numFields, indexInput); } + return (blockDecoder != null && version >= VERSION_ENCODABLE_FIELDS_METADATA) ? + readEncodedFieldsMetadata(numFields, indexInput, blockDecoder, fieldInfos, fieldMetadataReader, maxNumDocs) + : readUnencodedFieldsMetadata(numFields, indexInput, fieldInfos, fieldMetadataReader, maxNumDocs); + } + + protected Collection readEncodedFieldsMetadata(int numFields, DataInput metadataInput, BlockDecoder blockDecoder, + FieldInfos fieldInfos, FieldMetadata.Serializer fieldMetadataReader, + int maxNumDocs) throws IOException { + long encodedLength = metadataInput.readVLong(); + if (encodedLength < 0) { + throw new CorruptIndexException("Illegal encoded length: " + encodedLength, metadataInput); + } + BytesRef decodedBytes = blockDecoder.decode(metadataInput, encodedLength); + DataInput decodedMetadataInput = new ByteArrayDataInput(decodedBytes.bytes, 0, decodedBytes.length); + return readUnencodedFieldsMetadata(numFields, decodedMetadataInput, fieldInfos, fieldMetadataReader, maxNumDocs); + } + + protected Collection readUnencodedFieldsMetadata(int numFields, DataInput metadataInput, FieldInfos fieldInfos, + FieldMetadata.Serializer fieldMetadataReader, int maxNumDocs) throws IOException { Collection fieldMetadataCollection = new ArrayList<>(numFields); for (int i = 0; i < numFields; i++) { - fieldMetadataCollection.add(fieldMetadataReader.read(indexInput, fieldInfos, maxNumDocs)); + fieldMetadataCollection.add(fieldMetadataReader.read(metadataInput, fieldInfos, maxNumDocs)); } return fieldMetadataCollection; } @@ -212,7 +232,7 @@ protected long getTermsRamBytesUsed() { /** * Positions the given {@link IndexInput} at the beginning of the fields metadata. */ - protected static void seekFieldsMetadata(IndexInput indexInput) throws IOException { + protected void seekFieldsMetadata(IndexInput indexInput) throws IOException { indexInput.seek(indexInput.length() - CodecUtil.footerLength() - 8); indexInput.seek(indexInput.readLong()); } diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitTermsWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitTermsWriter.java index 101b6b5942f0..c4e089f56274 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitTermsWriter.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitTermsWriter.java @@ -249,11 +249,26 @@ public void write(Fields fields, NormsProducer normsProducer) throws IOException protected void writeFieldsMetadata(int fieldsNumber, ByteBuffersDataOutput fieldsOutput) throws IOException { long fieldsStartPosition = blockOutput.getFilePointer(); blockOutput.writeVInt(fieldsNumber); - fieldsOutput.copyTo(blockOutput); + if (blockEncoder == null) { + writeUnencodedFieldsMetadata(fieldsOutput); + } else { + writeEncodedFieldsMetadata(fieldsOutput); + } + // Must be a fixed length. Read by UniformSplitTermsReader when seeking fields metadata. blockOutput.writeLong(fieldsStartPosition); CodecUtil.writeFooter(blockOutput); } + protected void writeUnencodedFieldsMetadata(ByteBuffersDataOutput fieldsOutput) throws IOException { + fieldsOutput.copyTo(blockOutput); + } + + protected void writeEncodedFieldsMetadata(ByteBuffersDataOutput fieldsOutput) throws IOException { + BlockEncoder.WritableBytes encodedBytes = blockEncoder.encode(fieldsOutput.toDataInput(), fieldsOutput.size()); + blockOutput.writeVLong(encodedBytes.size()); + encodedBytes.writeTo(blockOutput); + } + /** * @return 1 if the field was written; 0 otherwise. */ diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitPostingsFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitPostingsFormat.java index 57c154099045..730728ba7f45 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitPostingsFormat.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitPostingsFormat.java @@ -54,7 +54,7 @@ public class STUniformSplitPostingsFormat extends UniformSplitPostingsFormat { */ public static final String TERMS_BLOCKS_EXTENSION = "stustb"; - public static final int VERSION_CURRENT = 0; + public static final int VERSION_CURRENT = UniformSplitPostingsFormat.VERSION_CURRENT; public static final String NAME = "SharedTermsUniformSplit"; diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitTermsReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitTermsReader.java index cc25a30cef6b..5c2b24b5fca4 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitTermsReader.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitTermsReader.java @@ -30,10 +30,7 @@ import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.store.IndexInput; -import static org.apache.lucene.codecs.uniformsplit.sharedterms.STUniformSplitPostingsFormat.NAME; -import static org.apache.lucene.codecs.uniformsplit.sharedterms.STUniformSplitPostingsFormat.TERMS_BLOCKS_EXTENSION; -import static org.apache.lucene.codecs.uniformsplit.sharedterms.STUniformSplitPostingsFormat.TERMS_DICTIONARY_EXTENSION; -import static org.apache.lucene.codecs.uniformsplit.sharedterms.STUniformSplitPostingsFormat.VERSION_CURRENT; +import static org.apache.lucene.codecs.uniformsplit.sharedterms.STUniformSplitPostingsFormat.*; /** * A block-based terms index and dictionary based on the Uniform Split technique, diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/UnionFieldMetadataBuilder.java b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/UnionFieldMetadataBuilder.java index 85b6a27fd3bb..4cf5c2623ae2 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/UnionFieldMetadataBuilder.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/UnionFieldMetadataBuilder.java @@ -33,15 +33,9 @@ public class UnionFieldMetadataBuilder { private BytesRef maxLastTerm; public UnionFieldMetadataBuilder() { - reset(); - } - - public UnionFieldMetadataBuilder reset() { dictionaryStartFP = -1; minStartBlockFP = Long.MAX_VALUE; maxEndBlockFP = Long.MIN_VALUE; - maxLastTerm = null; - return this; } public UnionFieldMetadataBuilder addFieldMetadata(FieldMetadata fieldMetadata) { diff --git a/lucene/codecs/src/test/org/apache/lucene/codecs/uniformsplit/TestUniformSplitPostingFormat.java b/lucene/codecs/src/test/org/apache/lucene/codecs/uniformsplit/TestUniformSplitPostingFormat.java index db1d6c12e61a..9a68a14c21a2 100644 --- a/lucene/codecs/src/test/org/apache/lucene/codecs/uniformsplit/TestUniformSplitPostingFormat.java +++ b/lucene/codecs/src/test/org/apache/lucene/codecs/uniformsplit/TestUniformSplitPostingFormat.java @@ -51,17 +51,26 @@ protected Codec getCodec() { @Before public void initialize() { + initializeInner(); + } + + protected void initializeInner() { UniformSplitRot13PostingsFormat.resetEncodingFlags(); } @After public void checkEncodingCalled() { if (checkEncoding) { - assertTrue(UniformSplitRot13PostingsFormat.blocksEncoded); - assertTrue(UniformSplitRot13PostingsFormat.dictionaryEncoded); - if (shouldCheckDecoderWasCalled) { - assertTrue(UniformSplitRot13PostingsFormat.decoderCalled); - } + checkEncodingCalledInner(); + } + } + + protected void checkEncodingCalledInner() { + assertTrue(UniformSplitRot13PostingsFormat.blocksEncoded); + assertTrue(UniformSplitRot13PostingsFormat.fieldsMetadataEncoded); + assertTrue(UniformSplitRot13PostingsFormat.dictionaryEncoded); + if (shouldCheckDecoderWasCalled) { + assertTrue(UniformSplitRot13PostingsFormat.decoderCalled); } } diff --git a/lucene/codecs/src/test/org/apache/lucene/codecs/uniformsplit/sharedterms/STBlockReaderTest.java b/lucene/codecs/src/test/org/apache/lucene/codecs/uniformsplit/sharedterms/TestSTBlockReader.java similarity index 98% rename from lucene/codecs/src/test/org/apache/lucene/codecs/uniformsplit/sharedterms/STBlockReaderTest.java rename to lucene/codecs/src/test/org/apache/lucene/codecs/uniformsplit/sharedterms/TestSTBlockReader.java index 6d09fe36e16b..5707fb4f6a03 100644 --- a/lucene/codecs/src/test/org/apache/lucene/codecs/uniformsplit/sharedterms/STBlockReaderTest.java +++ b/lucene/codecs/src/test/org/apache/lucene/codecs/uniformsplit/sharedterms/TestSTBlockReader.java @@ -51,9 +51,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase; -public class STBlockReaderTest extends LuceneTestCase { +public class TestSTBlockReader extends LuceneTestCase { - private static final String MOCK_BLOCK_OUTPUT_NAME = "STBlockReaderTest.tmp"; + private static final String MOCK_BLOCK_OUTPUT_NAME = "TestSTBlockReader.tmp"; private FieldInfos fieldInfos; private List blockLines; diff --git a/lucene/common-build.xml b/lucene/common-build.xml index e7fc4174de81..7bb6e55081e2 100644 --- a/lucene/common-build.xml +++ b/lucene/common-build.xml @@ -2342,7 +2342,7 @@ ${ant.project.name}.test.dependencies=${test.classpath.list} - diff --git a/lucene/core/build.gradle b/lucene/core/build.gradle index f5609bdb7037..989c57f09357 100644 --- a/lucene/core/build.gradle +++ b/lucene/core/build.gradle @@ -15,9 +15,10 @@ * limitations under the License. */ - apply plugin: 'java-library' +description = 'Lucene core library' + dependencies { testImplementation project(':lucene:codecs') testImplementation project(':lucene:test-framework') diff --git a/lucene/core/src/java/org/apache/lucene/codecs/Codec.java b/lucene/core/src/java/org/apache/lucene/codecs/Codec.java index 07797c6f95d0..8b5ca14ff898 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/Codec.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/Codec.java @@ -57,7 +57,7 @@ static NamedSPILoader getLoader() { } // TODO: should we use this, or maybe a system property is better? - static Codec defaultCodec = LOADER.lookup("Lucene84"); + static Codec defaultCodec = LOADER.lookup("Lucene86"); } private final String name; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java b/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java index c49946b7ffbf..8c40e2adcd62 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java @@ -448,24 +448,27 @@ public static void checkFooter(ChecksumIndexInput in, Throwable priorException) checkFooter(in); } else { try { + // If we have evidence of corruption then we return the corruption as the + // main exception and the prior exception gets suppressed. Otherwise we + // return the prior exception with a suppressed exception that notifies + // the user that checksums matched. long remaining = in.length() - in.getFilePointer(); if (remaining < footerLength()) { // corruption caused us to read into the checksum footer already: we can't proceed - priorException.addSuppressed(new CorruptIndexException("checksum status indeterminate: remaining=" + remaining + - ", please run checkindex for more details", in)); + throw new CorruptIndexException("checksum status indeterminate: remaining=" + remaining + + "; please run checkindex for more details", in); } else { // otherwise, skip any unread bytes. in.skipBytes(remaining - footerLength()); // now check the footer - try { - long checksum = checkFooter(in); - priorException.addSuppressed(new CorruptIndexException("checksum passed (" + Long.toHexString(checksum) + - "). possibly transient resource issue, or a Lucene or JVM bug", in)); - } catch (CorruptIndexException t) { - priorException.addSuppressed(t); - } + long checksum = checkFooter(in); + priorException.addSuppressed(new CorruptIndexException("checksum passed (" + Long.toHexString(checksum) + + "). possibly transient resource issue, or a Lucene or JVM bug", in)); } + } catch (CorruptIndexException corruptException) { + corruptException.addSuppressed(priorException); + throw corruptException; } catch (Throwable t) { // catch-all for things that shouldn't go wrong (e.g. OOM during readInt) but could... priorException.addSuppressed(new CorruptIndexException("checksum status indeterminate: unexpected exception", in, t)); @@ -487,7 +490,25 @@ public static long retrieveChecksum(IndexInput in) throws IOException { validateFooter(in); return readCRC(in); } - + + /** + * Returns (but does not validate) the checksum previously written by {@link #checkFooter}. + * @return actual checksum value + * @throws IOException if the footer is invalid + */ + public static long retrieveChecksum(IndexInput in, long expectedLength) throws IOException { + if (expectedLength < footerLength()) { + throw new IllegalArgumentException("expectedLength cannot be less than the footer length"); + } + if (in.length() < expectedLength) { + throw new CorruptIndexException("truncated file: length=" + in.length() + " but expectedLength==" + expectedLength, in); + } else if (in.length() > expectedLength) { + throw new CorruptIndexException("file too long: length=" + in.length() + " but expectedLength==" + expectedLength, in); + } + + return retrieveChecksum(in); + } + private static void validateFooter(IndexInput in) throws IOException { long remaining = in.length() - in.getFilePointer(); long expected = footerLength(); diff --git a/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java b/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java index 408d97814785..c4bae5c0b18d 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java @@ -393,7 +393,7 @@ public SortedNumericDocValues getSortedNumeric(FieldInfo fieldInfo) throws IOExc } } if (values == null) { - values = DocValues.emptySortedNumeric(mergeState.maxDocs[i]); + values = DocValues.emptySortedNumeric(); } cost += values.cost(); subs.add(new SortedNumericDocValuesSub(mergeState.docMaps[i], values)); diff --git a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java index 4847017ca80a..bee914ba0d02 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.codecs.blocktree; - import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -35,6 +34,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.Terms; +import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; @@ -97,13 +97,20 @@ public final class BlockTreeTermsReader extends FieldsProducer { /** Suffixes are compressed to save space. */ public static final int VERSION_COMPRESSED_SUFFIXES = 5; + /** Metadata is written to its own file. */ + public static final int VERSION_META_FILE = 6; + /** Current terms format. */ - public static final int VERSION_CURRENT = VERSION_COMPRESSED_SUFFIXES; + public static final int VERSION_CURRENT = VERSION_META_FILE; /** Extension of terms index file */ static final String TERMS_INDEX_EXTENSION = "tip"; final static String TERMS_INDEX_CODEC_NAME = "BlockTreeTermsIndex"; + /** Extension of terms meta file */ + static final String TERMS_META_EXTENSION = "tmd"; + final static String TERMS_META_CODEC_NAME = "BlockTreeTermsMeta"; + // Open input to the main terms dict file (_X.tib) final IndexInput termsIn; // Open input to the terms index file (_X.tip) @@ -128,9 +135,9 @@ public BlockTreeTermsReader(PostingsReaderBase postingsReader, SegmentReadState this.postingsReader = postingsReader; this.segment = state.segmentInfo.name; - - String termsName = IndexFileNames.segmentFileName(segment, state.segmentSuffix, TERMS_EXTENSION); + try { + String termsName = IndexFileNames.segmentFileName(segment, state.segmentSuffix, TERMS_EXTENSION); termsIn = state.directory.openInput(termsName, state.context); version = CodecUtil.checkIndexHeader(termsIn, TERMS_CODEC_NAME, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); @@ -138,66 +145,106 @@ public BlockTreeTermsReader(PostingsReaderBase postingsReader, SegmentReadState indexIn = state.directory.openInput(indexName, state.context); CodecUtil.checkIndexHeader(indexIn, TERMS_INDEX_CODEC_NAME, version, version, state.segmentInfo.getId(), state.segmentSuffix); - // Have PostingsReader init itself - postingsReader.init(termsIn, state); + if (version < VERSION_META_FILE) { + // Have PostingsReader init itself + postingsReader.init(termsIn, state); - // Verifying the checksum against all bytes would be too costly, but for now we at least - // verify proper structure of the checksum footer. This is cheap and can detect some forms - // of corruption such as file truncation. - CodecUtil.retrieveChecksum(indexIn); - CodecUtil.retrieveChecksum(termsIn); + // Verifying the checksum against all bytes would be too costly, but for now we at least + // verify proper structure of the checksum footer. This is cheap and can detect some forms + // of corruption such as file truncation. + CodecUtil.retrieveChecksum(indexIn); + CodecUtil.retrieveChecksum(termsIn); + } // Read per-field details - seekDir(termsIn); - seekDir(indexIn); + String metaName = IndexFileNames.segmentFileName(segment, state.segmentSuffix, TERMS_META_EXTENSION); + Map fieldMap = null; + Throwable priorE = null; + long indexLength = -1, termsLength = -1; + try (ChecksumIndexInput metaIn = version >= VERSION_META_FILE ? state.directory.openChecksumInput(metaName, state.context) : null) { + try { + final IndexInput indexMetaIn, termsMetaIn; + if (version >= VERSION_META_FILE) { + CodecUtil.checkIndexHeader(metaIn, TERMS_META_CODEC_NAME, version, version, state.segmentInfo.getId(), state.segmentSuffix); + indexMetaIn = termsMetaIn = metaIn; + postingsReader.init(metaIn, state); + } else { + seekDir(termsIn); + seekDir(indexIn); + indexMetaIn = indexIn; + termsMetaIn = termsIn; + } - final int numFields = termsIn.readVInt(); - if (numFields < 0) { - throw new CorruptIndexException("invalid numFields: " + numFields, termsIn); - } - fieldMap = new HashMap<>((int) (numFields / 0.75f) + 1); - for (int i = 0; i < numFields; ++i) { - final int field = termsIn.readVInt(); - final long numTerms = termsIn.readVLong(); - if (numTerms <= 0) { - throw new CorruptIndexException("Illegal numTerms for field number: " + field, termsIn); - } - final BytesRef rootCode = readBytesRef(termsIn); - final FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field); - if (fieldInfo == null) { - throw new CorruptIndexException("invalid field number: " + field, termsIn); - } - final long sumTotalTermFreq = termsIn.readVLong(); - // when frequencies are omitted, sumDocFreq=sumTotalTermFreq and only one value is written. - final long sumDocFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS ? sumTotalTermFreq : termsIn.readVLong(); - final int docCount = termsIn.readVInt(); - if (version < VERSION_META_LONGS_REMOVED) { - final int longsSize = termsIn.readVInt(); - if (longsSize < 0) { - throw new CorruptIndexException("invalid longsSize for field: " + fieldInfo.name + ", longsSize=" + longsSize, termsIn); + final int numFields = termsMetaIn.readVInt(); + if (numFields < 0) { + throw new CorruptIndexException("invalid numFields: " + numFields, termsMetaIn); + } + fieldMap = new HashMap<>((int) (numFields / 0.75f) + 1); + for (int i = 0; i < numFields; ++i) { + final int field = termsMetaIn.readVInt(); + final long numTerms = termsMetaIn.readVLong(); + if (numTerms <= 0) { + throw new CorruptIndexException("Illegal numTerms for field number: " + field, termsMetaIn); + } + final BytesRef rootCode = readBytesRef(termsMetaIn); + final FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field); + if (fieldInfo == null) { + throw new CorruptIndexException("invalid field number: " + field, termsMetaIn); + } + final long sumTotalTermFreq = termsMetaIn.readVLong(); + // when frequencies are omitted, sumDocFreq=sumTotalTermFreq and only one value is written. + final long sumDocFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS ? sumTotalTermFreq : termsMetaIn.readVLong(); + final int docCount = termsMetaIn.readVInt(); + if (version < VERSION_META_LONGS_REMOVED) { + final int longsSize = termsMetaIn.readVInt(); + if (longsSize < 0) { + throw new CorruptIndexException("invalid longsSize for field: " + fieldInfo.name + ", longsSize=" + longsSize, termsMetaIn); + } + } + BytesRef minTerm = readBytesRef(termsMetaIn); + BytesRef maxTerm = readBytesRef(termsMetaIn); + if (docCount < 0 || docCount > state.segmentInfo.maxDoc()) { // #docs with field must be <= #docs + throw new CorruptIndexException("invalid docCount: " + docCount + " maxDoc: " + state.segmentInfo.maxDoc(), termsMetaIn); + } + if (sumDocFreq < docCount) { // #postings must be >= #docs with field + throw new CorruptIndexException("invalid sumDocFreq: " + sumDocFreq + " docCount: " + docCount, termsMetaIn); + } + if (sumTotalTermFreq < sumDocFreq) { // #positions must be >= #postings + throw new CorruptIndexException("invalid sumTotalTermFreq: " + sumTotalTermFreq + " sumDocFreq: " + sumDocFreq, termsMetaIn); + } + final long indexStartFP = indexMetaIn.readVLong(); + FieldReader previous = fieldMap.put(fieldInfo.name, + new FieldReader(this, fieldInfo, numTerms, rootCode, sumTotalTermFreq, sumDocFreq, docCount, + indexStartFP, indexMetaIn, indexIn, minTerm, maxTerm)); + if (previous != null) { + throw new CorruptIndexException("duplicate field: " + fieldInfo.name, termsMetaIn); + } + } + if (version >= VERSION_META_FILE) { + indexLength = metaIn.readLong(); + termsLength = metaIn.readLong(); + } + } catch (Throwable exception) { + priorE = exception; + } finally { + if (metaIn != null) { + CodecUtil.checkFooter(metaIn, priorE); + } else if (priorE != null) { + IOUtils.rethrowAlways(priorE); } } - BytesRef minTerm = readBytesRef(termsIn); - BytesRef maxTerm = readBytesRef(termsIn); - if (docCount < 0 || docCount > state.segmentInfo.maxDoc()) { // #docs with field must be <= #docs - throw new CorruptIndexException("invalid docCount: " + docCount + " maxDoc: " + state.segmentInfo.maxDoc(), termsIn); - } - if (sumDocFreq < docCount) { // #postings must be >= #docs with field - throw new CorruptIndexException("invalid sumDocFreq: " + sumDocFreq + " docCount: " + docCount, termsIn); - } - if (sumTotalTermFreq < sumDocFreq) { // #positions must be >= #postings - throw new CorruptIndexException("invalid sumTotalTermFreq: " + sumTotalTermFreq + " sumDocFreq: " + sumDocFreq, termsIn); - } - final long indexStartFP = indexIn.readVLong(); - FieldReader previous = fieldMap.put(fieldInfo.name, - new FieldReader(this, fieldInfo, numTerms, rootCode, sumTotalTermFreq, sumDocFreq, docCount, - indexStartFP, indexIn, minTerm, maxTerm)); - if (previous != null) { - throw new CorruptIndexException("duplicate field: " + fieldInfo.name, termsIn); - } + } + if (version >= VERSION_META_FILE) { + // At this point the checksum of the meta file has been verified so the lengths are likely correct + CodecUtil.retrieveChecksum(indexIn, indexLength); + CodecUtil.retrieveChecksum(termsIn, termsLength); + } else { + assert indexLength == -1 : indexLength; + assert termsLength == -1 : termsLength; } List fieldList = new ArrayList<>(fieldMap.keySet()); fieldList.sort(null); + this.fieldMap = fieldMap; this.fieldList = Collections.unmodifiableList(fieldList); success = true; } finally { diff --git a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsWriter.java index d56a45e9a87e..bb7df7b11617 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsWriter.java @@ -211,6 +211,7 @@ public final class BlockTreeTermsWriter extends FieldsConsumer { //private final static boolean SAVE_DOT_FILES = false; + private final IndexOutput metaOut; private final IndexOutput termsOut; private final IndexOutput indexOut; final int maxDoc; @@ -220,34 +221,7 @@ public final class BlockTreeTermsWriter extends FieldsConsumer { final PostingsWriterBase postingsWriter; final FieldInfos fieldInfos; - private static class FieldMetaData { - public final FieldInfo fieldInfo; - public final BytesRef rootCode; - public final long numTerms; - public final long indexStartFP; - public final long sumTotalTermFreq; - public final long sumDocFreq; - public final int docCount; - public final BytesRef minTerm; - public final BytesRef maxTerm; - - public FieldMetaData(FieldInfo fieldInfo, BytesRef rootCode, long numTerms, long indexStartFP, long sumTotalTermFreq, long sumDocFreq, int docCount, - BytesRef minTerm, BytesRef maxTerm) { - assert numTerms > 0; - this.fieldInfo = fieldInfo; - assert rootCode != null: "field=" + fieldInfo.name + " numTerms=" + numTerms; - this.rootCode = rootCode; - this.indexStartFP = indexStartFP; - this.numTerms = numTerms; - this.sumTotalTermFreq = sumTotalTermFreq; - this.sumDocFreq = sumDocFreq; - this.docCount = docCount; - this.minTerm = minTerm; - this.maxTerm = maxTerm; - } - } - - private final List fields = new ArrayList<>(); + private final List fields = new ArrayList<>(); /** Create a new writer. The number of items (terms or * sub-blocks) per block will aim to be between @@ -272,7 +246,7 @@ public BlockTreeTermsWriter(SegmentWriteState state, final String termsName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, BlockTreeTermsReader.TERMS_EXTENSION); termsOut = state.directory.createOutput(termsName, state.context); boolean success = false; - IndexOutput indexOut = null; + IndexOutput metaOut = null, indexOut = null; try { CodecUtil.writeIndexHeader(termsOut, BlockTreeTermsReader.TERMS_CODEC_NAME, BlockTreeTermsReader.VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); @@ -283,27 +257,23 @@ public BlockTreeTermsWriter(SegmentWriteState state, state.segmentInfo.getId(), state.segmentSuffix); //segment = state.segmentInfo.name; - postingsWriter.init(termsOut, state); // have consumer write its format/header - + final String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, BlockTreeTermsReader.TERMS_META_EXTENSION); + metaOut = state.directory.createOutput(metaName, state.context); + CodecUtil.writeIndexHeader(metaOut, BlockTreeTermsReader.TERMS_META_CODEC_NAME, BlockTreeTermsReader.VERSION_CURRENT, + state.segmentInfo.getId(), state.segmentSuffix); + + postingsWriter.init(metaOut, state); // have consumer write its format/header + + this.metaOut = metaOut; this.indexOut = indexOut; success = true; } finally { if (!success) { - IOUtils.closeWhileHandlingException(termsOut, indexOut); + IOUtils.closeWhileHandlingException(metaOut, termsOut, indexOut); } } } - /** Writes the terms file trailer. */ - private void writeTrailer(IndexOutput out, long dirStart) throws IOException { - out.writeLong(dirStart); - } - - /** Writes the index file trailer. */ - private void writeIndexTrailer(IndexOutput indexOut, long dirStart) throws IOException { - indexOut.writeLong(dirStart); - } - /** Throws {@code IllegalArgumentException} if any of these settings * is invalid. */ public static void validateSettings(int minItemsInBlock, int maxItemsInBlock) { @@ -548,7 +518,6 @@ class TermsWriter { final FixedBitSet docsSeen; long sumTotalTermFreq; long sumDocFreq; - long indexStartFP; // Records index into pending where the current prefix at that // length "started"; for example, if current term starts with 't', @@ -1006,11 +975,27 @@ public void finish() throws IOException { assert pending.size() == 1 && !pending.get(0).isTerm: "pending.size()=" + pending.size() + " pending=" + pending; final PendingBlock root = (PendingBlock) pending.get(0); assert root.prefix.length == 0; - assert root.index.getEmptyOutput() != null; - + final BytesRef rootCode = root.index.getEmptyOutput(); + assert rootCode != null; + + ByteBuffersDataOutput metaOut = new ByteBuffersDataOutput(); + fields.add(metaOut); + + metaOut.writeVInt(fieldInfo.number); + metaOut.writeVLong(numTerms); + metaOut.writeVInt(rootCode.length); + metaOut.writeBytes(rootCode.bytes, rootCode.offset, rootCode.length); + assert fieldInfo.getIndexOptions() != IndexOptions.NONE; + if (fieldInfo.getIndexOptions() != IndexOptions.DOCS) { + metaOut.writeVLong(sumTotalTermFreq); + } + metaOut.writeVLong(sumDocFreq); + metaOut.writeVInt(docsSeen.cardinality()); + writeBytesRef(metaOut, new BytesRef(firstPendingTerm.termBytes)); + writeBytesRef(metaOut, new BytesRef(lastPendingTerm.termBytes)); + metaOut.writeVLong(indexOut.getFilePointer()); // Write FST to index - indexStartFP = indexOut.getFilePointer(); - root.index.save(indexOut); + root.index.save(metaOut, indexOut); //System.out.println(" write FST " + indexStartFP + " field=" + fieldInfo.name); /* @@ -1022,20 +1007,7 @@ public void finish() throws IOException { w.close(); } */ - assert firstPendingTerm != null; - BytesRef minTerm = new BytesRef(firstPendingTerm.termBytes); - - assert lastPendingTerm != null; - BytesRef maxTerm = new BytesRef(lastPendingTerm.termBytes); - - fields.add(new FieldMetaData(fieldInfo, - ((PendingBlock) pending.get(0)).index.getEmptyOutput(), - numTerms, - indexStartFP, - sumTotalTermFreq, - sumDocFreq, - docsSeen.cardinality(), - minTerm, maxTerm)); + } else { assert sumTotalTermFreq == 0 || fieldInfo.getIndexOptions() == IndexOptions.DOCS && sumTotalTermFreq == -1; assert sumDocFreq == 0; @@ -1060,47 +1032,29 @@ public void close() throws IOException { return; } closed = true; - + boolean success = false; try { - - final long dirStart = termsOut.getFilePointer(); - final long indexDirStart = indexOut.getFilePointer(); - - termsOut.writeVInt(fields.size()); - - for(FieldMetaData field : fields) { - //System.out.println(" field " + field.fieldInfo.name + " " + field.numTerms + " terms"); - termsOut.writeVInt(field.fieldInfo.number); - assert field.numTerms > 0; - termsOut.writeVLong(field.numTerms); - termsOut.writeVInt(field.rootCode.length); - termsOut.writeBytes(field.rootCode.bytes, field.rootCode.offset, field.rootCode.length); - assert field.fieldInfo.getIndexOptions() != IndexOptions.NONE; - if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS) { - termsOut.writeVLong(field.sumTotalTermFreq); - } - termsOut.writeVLong(field.sumDocFreq); - termsOut.writeVInt(field.docCount); - indexOut.writeVLong(field.indexStartFP); - writeBytesRef(termsOut, field.minTerm); - writeBytesRef(termsOut, field.maxTerm); + metaOut.writeVInt(fields.size()); + for (ByteBuffersDataOutput fieldMeta : fields) { + fieldMeta.copyTo(metaOut); } - writeTrailer(termsOut, dirStart); - CodecUtil.writeFooter(termsOut); - writeIndexTrailer(indexOut, indexDirStart); CodecUtil.writeFooter(indexOut); + metaOut.writeLong(indexOut.getFilePointer()); + CodecUtil.writeFooter(termsOut); + metaOut.writeLong(termsOut.getFilePointer()); + CodecUtil.writeFooter(metaOut); success = true; } finally { if (success) { - IOUtils.close(termsOut, indexOut, postingsWriter); + IOUtils.close(metaOut, termsOut, indexOut, postingsWriter); } else { - IOUtils.closeWhileHandlingException(termsOut, indexOut, postingsWriter); + IOUtils.closeWhileHandlingException(metaOut, termsOut, indexOut, postingsWriter); } } } - private static void writeBytesRef(IndexOutput out, BytesRef bytes) throws IOException { + private static void writeBytesRef(DataOutput out, BytesRef bytes) throws IOException { out.writeVInt(bytes.length); out.writeBytes(bytes.bytes, bytes.offset, bytes.length); } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java index 01b9fa86dbd3..748fbbb97650 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java @@ -52,7 +52,6 @@ public final class FieldReader extends Terms implements Accountable { final long sumTotalTermFreq; final long sumDocFreq; final int docCount; - final long indexStartFP; final long rootBlockFP; final BytesRef rootCode; final BytesRef minTerm; @@ -63,7 +62,7 @@ public final class FieldReader extends Terms implements Accountable { //private boolean DEBUG; FieldReader(BlockTreeTermsReader parent, FieldInfo fieldInfo, long numTerms, BytesRef rootCode, long sumTotalTermFreq, long sumDocFreq, int docCount, - long indexStartFP, IndexInput indexIn, BytesRef minTerm, BytesRef maxTerm) throws IOException { + long indexStartFP, IndexInput metaIn, IndexInput indexIn, BytesRef minTerm, BytesRef maxTerm) throws IOException { assert numTerms > 0; this.fieldInfo = fieldInfo; //DEBUG = BlockTreeTermsReader.DEBUG && fieldInfo.name.equals("id"); @@ -72,7 +71,6 @@ public final class FieldReader extends Terms implements Accountable { this.sumTotalTermFreq = sumTotalTermFreq; this.sumDocFreq = sumDocFreq; this.docCount = docCount; - this.indexStartFP = indexStartFP; this.rootCode = rootCode; this.minTerm = minTerm; this.maxTerm = maxTerm; @@ -81,22 +79,22 @@ public final class FieldReader extends Terms implements Accountable { // } rootBlockFP = (new ByteArrayDataInput(rootCode.bytes, rootCode.offset, rootCode.length)).readVLong() >>> BlockTreeTermsReader.OUTPUT_FLAGS_NUM_BITS; // Initialize FST always off-heap. - if (indexIn != null) { - final IndexInput clone = indexIn.clone(); - clone.seek(indexStartFP); - index = new FST<>(clone, ByteSequenceOutputs.getSingleton(), new OffHeapFSTStore()); - /* - if (false) { - final String dotFileName = segment + "_" + fieldInfo.name + ".dot"; - Writer w = new OutputStreamWriter(new FileOutputStream(dotFileName)); - Util.toDot(index, w, false, false); - System.out.println("FST INDEX: SAVED to " + dotFileName); - w.close(); - } - */ + final IndexInput clone = indexIn.clone(); + clone.seek(indexStartFP); + if (metaIn == indexIn) { // Only true before Lucene 8.6 + index = new FST<>(clone, clone, ByteSequenceOutputs.getSingleton(), new OffHeapFSTStore()); } else { - index = null; + index = new FST<>(metaIn, clone, ByteSequenceOutputs.getSingleton(), new OffHeapFSTStore()); } + /* + if (false) { + final String dotFileName = segment + "_" + fieldInfo.name + ".dot"; + Writer w = new OutputStreamWriter(new FileOutputStream(dotFileName)); + Util.toDot(index, w, false, false); + System.out.println("FST INDEX: SAVED to " + dotFileName); + w.close(); + } + */ } @Override diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/package-info.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene60/package-info.java index b7145ccea94a..d807058f6468 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/package-info.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene60/package-info.java @@ -16,7 +16,7 @@ */ /** - * Components from the Lucene 6.0 index format. See {@link org.apache.lucene.codecs.lucene80} + * Components from the Lucene 6.0 index format. See {@link org.apache.lucene.codecs.lucene86} * for an overview of the current index format. */ package org.apache.lucene.codecs.lucene60; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/package-info.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene84/package-info.java index 91ee2e2cba63..5940a47dca83 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/package-info.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene84/package-info.java @@ -16,399 +16,7 @@ */ /** - * Lucene 8.4 file format. - * - *

Apache Lucene - Index File Formats

- * - * - *

Introduction

- *
- *

This document defines the index file formats used in this version of Lucene. - * If you are using a different version of Lucene, please consult the copy of - * docs/ that was distributed with - * the version you are using.

- *

This document attempts to provide a high-level definition of the Apache - * Lucene file formats.

- *
- * - *

Definitions

- *
- *

The fundamental concepts in Lucene are index, document, field and term.

- *

An index contains a sequence of documents.

- *
    - *
  • A document is a sequence of fields.
  • - *
  • A field is a named sequence of terms.
  • - *
  • A term is a sequence of bytes.
  • - *
- *

The same sequence of bytes in two different fields is considered a different - * term. Thus terms are represented as a pair: the string naming the field, and the - * bytes within the field.

- * - *

Inverted Indexing

- *

The index stores statistics about terms in order to make term-based search - * more efficient. Lucene's index falls into the family of indexes known as an - * inverted index. This is because it can list, for a term, the documents - * that contain it. This is the inverse of the natural relationship, in which - * documents list terms.

- * - *

Types of Fields

- *

In Lucene, fields may be stored, in which case their text is stored - * in the index literally, in a non-inverted manner. Fields that are inverted are - * called indexed. A field may be both stored and indexed.

- *

The text of a field may be tokenized into terms to be indexed, or the - * text of a field may be used literally as a term to be indexed. Most fields are - * tokenized, but sometimes it is useful for certain identifier fields to be - * indexed literally.

- *

See the {@link org.apache.lucene.document.Field Field} - * java docs for more information on Fields.

- * - *

Segments

- *

Lucene indexes may be composed of multiple sub-indexes, or segments. - * Each segment is a fully independent index, which could be searched separately. - * Indexes evolve by:

- *
    - *
  1. Creating new segments for newly added documents.
  2. - *
  3. Merging existing segments.
  4. - *
- *

Searches may involve multiple segments and/or multiple indexes, each index - * potentially composed of a set of segments.

- * - *

Document Numbers

- *

Internally, Lucene refers to documents by an integer document number. - * The first document added to an index is numbered zero, and each subsequent - * document added gets a number one greater than the previous.

- *

Note that a document's number may change, so caution should be taken when - * storing these numbers outside of Lucene. In particular, numbers may change in - * the following situations:

- *
    - *
  • - *

    The numbers stored in each segment are unique only within the segment, and - * must be converted before they can be used in a larger context. The standard - * technique is to allocate each segment a range of values, based on the range of - * numbers used in that segment. To convert a document number from a segment to an - * external value, the segment's base document number is added. To convert - * an external value back to a segment-specific value, the segment is identified - * by the range that the external value is in, and the segment's base value is - * subtracted. For example two five document segments might be combined, so that - * the first segment has a base value of zero, and the second of five. Document - * three from the second segment would have an external value of eight.

    - *
  • - *
  • - *

    When documents are deleted, gaps are created in the numbering. These are - * eventually removed as the index evolves through merging. Deleted documents are - * dropped when segments are merged. A freshly-merged segment thus has no gaps in - * its numbering.

    - *
  • - *
- *
- * - *

Index Structure Overview

- *
- *

Each segment index maintains the following:

- *
    - *
  • - * {@link org.apache.lucene.codecs.lucene70.Lucene70SegmentInfoFormat Segment info}. - * This contains metadata about a segment, such as the number of documents, - * what files it uses, - *
  • - *
  • - * {@link org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat Field names}. - * This contains the set of field names used in the index. - *
  • - *
  • - * {@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Stored Field values}. - * This contains, for each document, a list of attribute-value pairs, where the attributes - * are field names. These are used to store auxiliary information about the document, such as - * its title, url, or an identifier to access a database. The set of stored fields are what is - * returned for each hit when searching. This is keyed by document number. - *
  • - *
  • - * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term dictionary}. - * A dictionary containing all of the terms used in all of the - * indexed fields of all of the documents. The dictionary also contains the number - * of documents which contain the term, and pointers to the term's frequency and - * proximity data. - *
  • - *
  • - * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Frequency data}. - * For each term in the dictionary, the numbers of all the - * documents that contain that term, and the frequency of the term in that - * document, unless frequencies are omitted ({@link org.apache.lucene.index.IndexOptions#DOCS IndexOptions.DOCS}) - *
  • - *
  • - * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Proximity data}. - * For each term in the dictionary, the positions that the - * term occurs in each document. Note that this will not exist if all fields in - * all documents omit position data. - *
  • - *
  • - * {@link org.apache.lucene.codecs.lucene80.Lucene80NormsFormat Normalization factors}. - * For each field in each document, a value is stored - * that is multiplied into the score for hits on that field. - *
  • - *
  • - * {@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vectors}. - * For each field in each document, the term vector (sometimes - * called document vector) may be stored. A term vector consists of term text and - * term frequency. To add Term Vectors to your index see the - * {@link org.apache.lucene.document.Field Field} constructors - *
  • - *
  • - * {@link org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat Per-document values}. - * Like stored values, these are also keyed by document - * number, but are generally intended to be loaded into main memory for fast - * access. Whereas stored values are generally intended for summary results from - * searches, per-document values are useful for things like scoring factors. - *
  • - *
  • - * {@link org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat Live documents}. - * An optional file indicating which documents are live. - *
  • - *
  • - * {@link org.apache.lucene.codecs.lucene60.Lucene60PointsFormat Point values}. - * Optional pair of files, recording dimensionally indexed fields, to enable fast - * numeric range filtering and large numeric values like BigInteger and BigDecimal (1D) - * and geographic shape intersection (2D, 3D). - *
  • - *
- *

Details on each of these are provided in their linked pages.

- *
- * - *

File Naming

- *
- *

All files belonging to a segment have the same name with varying extensions. - * The extensions correspond to the different file formats described below. When - * using the Compound File format (default for small segments) these files (except - * for the Segment info file, the Lock file, and Deleted documents file) are collapsed - * into a single .cfs file (see below for details)

- *

Typically, all segments in an index are stored in a single directory, - * although this is not required.

- *

File names are never re-used. That is, when any file is saved - * to the Directory it is given a never before used filename. This is achieved - * using a simple generations approach. For example, the first segments file is - * segments_1, then segments_2, etc. The generation is a sequential long integer - * represented in alpha-numeric (base 36) form.

- *
- * - *

Summary of File Extensions

- *
- *

The following table summarizes the names and extensions of the files in - * Lucene:

- * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
lucene filenames by extension
NameExtensionBrief Description
{@link org.apache.lucene.index.SegmentInfos Segments File}segments_NStores information about a commit point
Lock Filewrite.lockThe Write lock prevents multiple IndexWriters from writing to the same - * file.
{@link org.apache.lucene.codecs.lucene70.Lucene70SegmentInfoFormat Segment Info}.siStores metadata about a segment
{@link org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat Compound File}.cfs, .cfeAn optional "virtual" file consisting of all the other index files for - * systems that frequently run out of file handles.
{@link org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat Fields}.fnmStores information about the fields
{@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Field Index}.fdxContains pointers to field data
{@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Field Data}.fdtThe stored fields for documents
{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Dictionary}.timThe term dictionary, stores term info
{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Index}.tipThe index into the Term Dictionary
{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Frequencies}.docContains the list of docs which contain each term along with frequency
{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Positions}.posStores position information about where a term occurs in the index
{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Payloads}.payStores additional per-position metadata information such as character offsets and user payloads
{@link org.apache.lucene.codecs.lucene80.Lucene80NormsFormat Norms}.nvd, .nvmEncodes length and boost factors for docs and fields
{@link org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat Per-Document Values}.dvd, .dvmEncodes additional scoring factors or other per-document information.
{@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vector Index}.tvxStores offset into the document data file
{@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vector Data}.tvdContains term vector data.
{@link org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat Live Documents}.livInfo about what documents are live
{@link org.apache.lucene.codecs.lucene60.Lucene60PointsFormat Point values}.dii, .dimHolds indexed points, if any
- *
- * - *

Lock File

- * The write lock, which is stored in the index directory by default, is named - * "write.lock". If the lock directory is different from the index directory then - * the write lock will be named "XXXX-write.lock" where XXXX is a unique prefix - * derived from the full path to the index directory. When this file is present, a - * writer is currently modifying the index (adding or removing documents). This - * lock file ensures that only one writer is modifying the index at a time. - * - *

History

- *

Compatibility notes are provided in this document, describing how file - * formats have changed from prior versions:

- *
    - *
  • In version 2.1, the file format was changed to allow lock-less commits (ie, - * no more commit lock). The change is fully backwards compatible: you can open a - * pre-2.1 index for searching or adding/deleting of docs. When the new segments - * file is saved (committed), it will be written in the new file format (meaning - * no specific "upgrade" process is needed). But note that once a commit has - * occurred, pre-2.1 Lucene will not be able to read the index.
  • - *
  • In version 2.3, the file format was changed to allow segments to share a - * single set of doc store (vectors & stored fields) files. This allows for - * faster indexing in certain cases. The change is fully backwards compatible (in - * the same way as the lock-less commits change in 2.1).
  • - *
  • In version 2.4, Strings are now written as true UTF-8 byte sequence, not - * Java's modified UTF-8. See - * LUCENE-510 for details.
  • - *
  • In version 2.9, an optional opaque Map<String,String> CommitUserData - * may be passed to IndexWriter's commit methods (and later retrieved), which is - * recorded in the segments_N file. See - * LUCENE-1382 for details. Also, - * diagnostics were added to each segment written recording details about why it - * was written (due to flush, merge; which OS/JRE was used; etc.). See issue - * LUCENE-1654 for details.
  • - *
  • In version 3.0, compressed fields are no longer written to the index (they - * can still be read, but on merge the new segment will write them, uncompressed). - * See issue LUCENE-1960 - * for details.
  • - *
  • In version 3.1, segments records the code version that created them. See - * LUCENE-2720 for details. - * Additionally segments track explicitly whether or not they have term vectors. - * See LUCENE-2811 - * for details.
  • - *
  • In version 3.2, numeric fields are written as natively to stored fields - * file, previously they were stored in text format only.
  • - *
  • In version 3.4, fields can omit position data while still indexing term - * frequencies.
  • - *
  • In version 4.0, the format of the inverted index became extensible via - * the {@link org.apache.lucene.codecs.Codec Codec} api. Fast per-document storage - * ({@code DocValues}) was introduced. Normalization factors need no longer be a - * single byte, they can be any {@link org.apache.lucene.index.NumericDocValues NumericDocValues}. - * Terms need not be unicode strings, they can be any byte sequence. Term offsets - * can optionally be indexed into the postings lists. Payloads can be stored in the - * term vectors.
  • - *
  • In version 4.1, the format of the postings list changed to use either - * of FOR compression or variable-byte encoding, depending upon the frequency - * of the term. Terms appearing only once were changed to inline directly into - * the term dictionary. Stored fields are compressed by default.
  • - *
  • In version 4.2, term vectors are compressed by default. DocValues has - * a new multi-valued type (SortedSet), that can be used for faceting/grouping/joining - * on multi-valued fields.
  • - *
  • In version 4.5, DocValues were extended to explicitly represent missing values.
  • - *
  • In version 4.6, FieldInfos were extended to support per-field DocValues generation, to - * allow updating NumericDocValues fields.
  • - *
  • In version 4.8, checksum footers were added to the end of each index file - * for improved data integrity. Specifically, the last 8 bytes of every index file - * contain the zlib-crc32 checksum of the file.
  • - *
  • In version 4.9, DocValues has a new multi-valued numeric type (SortedNumeric) - * that is suitable for faceting/sorting/analytics. - *
  • In version 5.4, DocValues have been improved to store more information on disk: - * addresses for binary fields and ord indexes for multi-valued fields. - *
  • In version 6.0, Points were added, for multi-dimensional range/distance search. - *
  • In version 6.2, new Segment info format that reads/writes the index sort, to support index sorting. - *
  • In version 7.0, DocValues have been improved to better support sparse doc values - * thanks to an iterator API.
  • - *
  • In version 8.0, postings have been enhanced to record, for each block of - * doc ids, the (term freq, normalization factor) pairs that may trigger the - * maximum score of the block. This information is recorded alongside skip data - * in order to be able to skip blocks of doc ids if they may not produce high - * enough scores. - * Additionally doc values and norms has been extended with jump-tables to make access O(1) - * instead of O(n), where n is the number of elements to skip when advancing in the data.
  • - *
  • In version 8.4, postings, positions, offsets and payload lengths have move to a more - * performant encoding that is vectorized.
  • - *
- * - *

Limitations

- *
- *

Lucene uses a Java int to refer to - * document numbers, and the index file format uses an Int32 - * on-disk to store document numbers. This is a limitation - * of both the index file format and the current implementation. Eventually these - * should be replaced with either UInt64 values, or - * better yet, {@link org.apache.lucene.store.DataOutput#writeVInt VInt} values which have no limit.

- *
+ * Components from the Lucene 8.4 index format. See {@link org.apache.lucene.codecs.lucene86} + * for an overview of the current index format. */ package org.apache.lucene.codecs.lucene84; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86Codec.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86Codec.java new file mode 100644 index 000000000000..3f69874ef200 --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86Codec.java @@ -0,0 +1,178 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.codecs.lucene86; + +import java.util.Objects; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.CompoundFormat; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.FieldInfosFormat; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.LiveDocsFormat; +import org.apache.lucene.codecs.NormsFormat; +import org.apache.lucene.codecs.PointsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.SegmentInfoFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.TermVectorsFormat; +import org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat; +import org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat; +import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; +import org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat; +import org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat; +import org.apache.lucene.codecs.lucene80.Lucene80NormsFormat; +import org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; + +/** + * Implements the Lucene 8.6 index format, with configurable per-field postings + * and docvalues formats. + *

+ * If you want to reuse functionality of this codec in another codec, extend + * {@link FilterCodec}. + * + * @see org.apache.lucene.codecs.lucene86 package documentation for file format details. + * + * @lucene.experimental + */ +public class Lucene86Codec extends Codec { + private final TermVectorsFormat vectorsFormat = new Lucene50TermVectorsFormat(); + private final FieldInfosFormat fieldInfosFormat = new Lucene60FieldInfosFormat(); + private final SegmentInfoFormat segmentInfosFormat = new Lucene86SegmentInfoFormat(); + private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat(); + private final CompoundFormat compoundFormat = new Lucene50CompoundFormat(); + private final PointsFormat pointsFormat = new Lucene86PointsFormat(); + private final PostingsFormat defaultFormat; + + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Lucene86Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Lucene86Codec.this.getDocValuesFormatForField(field); + } + }; + + private final StoredFieldsFormat storedFieldsFormat; + + /** + * Instantiates a new codec. + */ + public Lucene86Codec() { + this(Lucene50StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Instantiates a new codec, specifying the stored fields compression + * mode to use. + * @param mode stored fields compression mode to use for newly + * flushed/merged segments. + */ + public Lucene86Codec(Lucene50StoredFieldsFormat.Mode mode) { + super("Lucene86"); + this.storedFieldsFormat = new Lucene50StoredFieldsFormat(Objects.requireNonNull(mode)); + this.defaultFormat = new Lucene84PostingsFormat(); + } + + @Override + public final StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final TermVectorsFormat termVectorsFormat() { + return vectorsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final FieldInfosFormat fieldInfosFormat() { + return fieldInfosFormat; + } + + @Override + public final SegmentInfoFormat segmentInfoFormat() { + return segmentInfosFormat; + } + + @Override + public final LiveDocsFormat liveDocsFormat() { + return liveDocsFormat; + } + + @Override + public final CompoundFormat compoundFormat() { + return compoundFormat; + } + + @Override + public final PointsFormat pointsFormat() { + return pointsFormat; + } + + /** Returns the postings format that should be used for writing + * new segments of field. + * + * The default implementation always returns "Lucene84". + *

+ * WARNING: if you subclass, you are responsible for index + * backwards compatibility: future version of Lucene are only + * guaranteed to be able to read the default implementation. + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultFormat; + } + + /** Returns the docvalues format that should be used for writing + * new segments of field. + * + * The default implementation always returns "Lucene80". + *

+ * WARNING: if you subclass, you are responsible for index + * backwards compatibility: future version of Lucene are only + * guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + private final DocValuesFormat defaultDVFormat = DocValuesFormat.forName("Lucene80"); + + private final NormsFormat normsFormat = new Lucene80NormsFormat(); + + @Override + public final NormsFormat normsFormat() { + return normsFormat; + } +} diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86PointsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86PointsFormat.java new file mode 100644 index 000000000000..8cd63a790c4c --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86PointsFormat.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.codecs.lucene86; + + +import java.io.IOException; + +import org.apache.lucene.codecs.PointsFormat; +import org.apache.lucene.codecs.PointsReader; +import org.apache.lucene.codecs.PointsWriter; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; + +/** + * Lucene 8.6 point format, which encodes dimensional values in a block KD-tree structure + * for fast 1D range and N dimensional shape intersection filtering. + * See this paper for details. + * + *

Data is stored across three files + *

    + *
  • A .kdm file that records metadata about the fields, such as numbers of + * dimensions or numbers of bytes per dimension. + *
  • A .kdi file that stores inner nodes of the tree. + *
  • A .kdm file that stores leaf nodes, where most of the data lives. + *
+ * + * @lucene.experimental + */ +public final class Lucene86PointsFormat extends PointsFormat { + + static final String DATA_CODEC_NAME = "Lucene86PointsFormatData"; + static final String INDEX_CODEC_NAME = "Lucene86PointsFormatIndex"; + static final String META_CODEC_NAME = "Lucene86PointsFormatMeta"; + + /** + * Filename extension for the leaf blocks + */ + public static final String DATA_EXTENSION = "kdd"; + + /** + * Filename extension for the index per field + */ + public static final String INDEX_EXTENSION = "kdi"; + + /** + * Filename extension for the meta per field + */ + public static final String META_EXTENSION = "kdm"; + + static final int VERSION_START = 0; + static final int VERSION_CURRENT = VERSION_START; + + /** Sole constructor */ + public Lucene86PointsFormat() { + } + + @Override + public PointsWriter fieldsWriter(SegmentWriteState state) throws IOException { + return new Lucene86PointsWriter(state); + } + + @Override + public PointsReader fieldsReader(SegmentReadState state) throws IOException { + return new Lucene86PointsReader(state); + } +} diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86PointsReader.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86PointsReader.java new file mode 100644 index 000000000000..fdc3cbd78b1b --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86PointsReader.java @@ -0,0 +1,152 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.codecs.lucene86; + + +import java.io.Closeable; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.PointsReader; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.bkd.BKDReader; + +/** Reads point values previously written with {@link Lucene86PointsWriter} */ +public class Lucene86PointsReader extends PointsReader implements Closeable { + final IndexInput indexIn, dataIn; + final SegmentReadState readState; + final Map readers = new HashMap<>(); + + /** Sole constructor */ + public Lucene86PointsReader(SegmentReadState readState) throws IOException { + this.readState = readState; + + String metaFileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, + readState.segmentSuffix, + Lucene86PointsFormat.META_EXTENSION); + String indexFileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, + readState.segmentSuffix, + Lucene86PointsFormat.INDEX_EXTENSION); + String dataFileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, + readState.segmentSuffix, + Lucene86PointsFormat.DATA_EXTENSION); + + boolean success = false; + try { + indexIn = readState.directory.openInput(indexFileName, readState.context); + CodecUtil.checkIndexHeader(indexIn, + Lucene86PointsFormat.INDEX_CODEC_NAME, + Lucene86PointsFormat.VERSION_START, + Lucene86PointsFormat.VERSION_CURRENT, + readState.segmentInfo.getId(), + readState.segmentSuffix); + + dataIn = readState.directory.openInput(dataFileName, readState.context); + CodecUtil.checkIndexHeader(dataIn, + Lucene86PointsFormat.DATA_CODEC_NAME, + Lucene86PointsFormat.VERSION_START, + Lucene86PointsFormat.VERSION_CURRENT, + readState.segmentInfo.getId(), + readState.segmentSuffix); + + long indexLength = -1, dataLength = -1; + try (ChecksumIndexInput metaIn = readState.directory.openChecksumInput(metaFileName, readState.context)) { + Throwable priorE = null; + try { + CodecUtil.checkIndexHeader(metaIn, + Lucene86PointsFormat.META_CODEC_NAME, + Lucene86PointsFormat.VERSION_START, + Lucene86PointsFormat.VERSION_CURRENT, + readState.segmentInfo.getId(), + readState.segmentSuffix); + + while (true) { + int fieldNumber = metaIn.readInt(); + if (fieldNumber == -1) { + break; + } else if (fieldNumber < 0) { + throw new CorruptIndexException("Illegal field number: " + fieldNumber, metaIn); + } + BKDReader reader = new BKDReader(metaIn, indexIn, dataIn); + readers.put(fieldNumber, reader); + } + indexLength = metaIn.readLong(); + dataLength = metaIn.readLong(); + } catch (Throwable t) { + priorE = t; + } finally { + CodecUtil.checkFooter(metaIn, priorE); + } + } + // At this point, checksums of the meta file have been validated so we + // know that indexLength and dataLength are very likely correct. + CodecUtil.retrieveChecksum(indexIn, indexLength); + CodecUtil.retrieveChecksum(dataIn, dataLength); + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this); + } + } + + } + + /** Returns the underlying {@link BKDReader}. + * + * @lucene.internal */ + @Override + public PointValues getValues(String fieldName) { + FieldInfo fieldInfo = readState.fieldInfos.fieldInfo(fieldName); + if (fieldInfo == null) { + throw new IllegalArgumentException("field=\"" + fieldName + "\" is unrecognized"); + } + if (fieldInfo.getPointDimensionCount() == 0) { + throw new IllegalArgumentException("field=\"" + fieldName + "\" did not index point values"); + } + + return readers.get(fieldInfo.number); + } + + @Override + public long ramBytesUsed() { + return 0L; + } + + @Override + public void checkIntegrity() throws IOException { + CodecUtil.checksumEntireFile(indexIn); + CodecUtil.checksumEntireFile(dataIn); + } + + @Override + public void close() throws IOException { + IOUtils.close(indexIn, dataIn); + // Free up heap: + readers.clear(); + } + +} + diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86PointsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86PointsWriter.java new file mode 100644 index 000000000000..6fe35710c500 --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86PointsWriter.java @@ -0,0 +1,265 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.codecs.lucene86; + + +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.MutablePointValues; +import org.apache.lucene.codecs.PointsReader; +import org.apache.lucene.codecs.PointsWriter; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.MergeState; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.bkd.BKDReader; +import org.apache.lucene.util.bkd.BKDWriter; + +/** Writes dimensional values */ +public class Lucene86PointsWriter extends PointsWriter implements Closeable { + + /** Outputs used to write the BKD tree data files. */ + protected final IndexOutput metaOut, indexOut, dataOut; + + final SegmentWriteState writeState; + final int maxPointsInLeafNode; + final double maxMBSortInHeap; + private boolean finished; + + /** Full constructor */ + public Lucene86PointsWriter(SegmentWriteState writeState, int maxPointsInLeafNode, double maxMBSortInHeap) throws IOException { + assert writeState.fieldInfos.hasPointValues(); + this.writeState = writeState; + this.maxPointsInLeafNode = maxPointsInLeafNode; + this.maxMBSortInHeap = maxMBSortInHeap; + String dataFileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, + writeState.segmentSuffix, + Lucene86PointsFormat.DATA_EXTENSION); + dataOut = writeState.directory.createOutput(dataFileName, writeState.context); + boolean success = false; + try { + CodecUtil.writeIndexHeader(dataOut, + Lucene86PointsFormat.DATA_CODEC_NAME, + Lucene86PointsFormat.VERSION_CURRENT, + writeState.segmentInfo.getId(), + writeState.segmentSuffix); + + String metaFileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, + writeState.segmentSuffix, + Lucene86PointsFormat.META_EXTENSION); + metaOut = writeState.directory.createOutput(metaFileName, writeState.context); + CodecUtil.writeIndexHeader(metaOut, + Lucene86PointsFormat.META_CODEC_NAME, + Lucene86PointsFormat.VERSION_CURRENT, + writeState.segmentInfo.getId(), + writeState.segmentSuffix); + + String indexFileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, + writeState.segmentSuffix, + Lucene86PointsFormat.INDEX_EXTENSION); + indexOut = writeState.directory.createOutput(indexFileName, writeState.context); + CodecUtil.writeIndexHeader(indexOut, + Lucene86PointsFormat.INDEX_CODEC_NAME, + Lucene86PointsFormat.VERSION_CURRENT, + writeState.segmentInfo.getId(), + writeState.segmentSuffix); + + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this); + } + } + } + + /** Uses the defaults values for {@code maxPointsInLeafNode} (1024) and {@code maxMBSortInHeap} (16.0) */ + public Lucene86PointsWriter(SegmentWriteState writeState) throws IOException { + this(writeState, BKDWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE, BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP); + } + + @Override + public void writeField(FieldInfo fieldInfo, PointsReader reader) throws IOException { + + PointValues values = reader.getValues(fieldInfo.name); + + try (BKDWriter writer = new BKDWriter(writeState.segmentInfo.maxDoc(), + writeState.directory, + writeState.segmentInfo.name, + fieldInfo.getPointDimensionCount(), + fieldInfo.getPointIndexDimensionCount(), + fieldInfo.getPointNumBytes(), + maxPointsInLeafNode, + maxMBSortInHeap, + values.size())) { + + if (values instanceof MutablePointValues) { + Runnable finalizer = writer.writeField(metaOut, indexOut, dataOut, fieldInfo.name, (MutablePointValues) values); + if (finalizer != null) { + metaOut.writeInt(fieldInfo.number); + finalizer.run(); + } + return; + } + + values.intersect(new IntersectVisitor() { + @Override + public void visit(int docID) { + throw new IllegalStateException(); + } + + public void visit(int docID, byte[] packedValue) throws IOException { + writer.add(packedValue, docID); + } + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return Relation.CELL_CROSSES_QUERY; + } + }); + + // We could have 0 points on merge since all docs with dimensional fields may be deleted: + Runnable finalizer = writer.finish(metaOut, indexOut, dataOut); + if (finalizer != null) { + metaOut.writeInt(fieldInfo.number); + finalizer.run(); + } + } + } + + @Override + public void merge(MergeState mergeState) throws IOException { + /** + * If indexSort is activated and some of the leaves are not sorted the next test will catch that and the non-optimized merge will run. + * If the readers are all sorted then it's safe to perform a bulk merge of the points. + **/ + for(PointsReader reader : mergeState.pointsReaders) { + if (reader instanceof Lucene86PointsReader == false) { + // We can only bulk merge when all to-be-merged segments use our format: + super.merge(mergeState); + return; + } + } + for (PointsReader reader : mergeState.pointsReaders) { + if (reader != null) { + reader.checkIntegrity(); + } + } + + for (FieldInfo fieldInfo : mergeState.mergeFieldInfos) { + if (fieldInfo.getPointDimensionCount() != 0) { + if (fieldInfo.getPointDimensionCount() == 1) { + + // Worst case total maximum size (if none of the points are deleted): + long totMaxSize = 0; + for(int i=0;i 0) { + PointValues values = reader.getValues(fieldInfo.name); + if (values != null) { + totMaxSize += values.size(); + } + } + } + } + + //System.out.println("MERGE: field=" + fieldInfo.name); + // Optimize the 1D case to use BKDWriter.merge, which does a single merge sort of the + // already sorted incoming segments, instead of trying to sort all points again as if + // we were simply reindexing them: + try (BKDWriter writer = new BKDWriter(writeState.segmentInfo.maxDoc(), + writeState.directory, + writeState.segmentInfo.name, + fieldInfo.getPointDimensionCount(), + fieldInfo.getPointIndexDimensionCount(), + fieldInfo.getPointNumBytes(), + maxPointsInLeafNode, + maxMBSortInHeap, + totMaxSize)) { + List bkdReaders = new ArrayList<>(); + List docMaps = new ArrayList<>(); + for(int i=0;i 0) { + BKDReader bkdReader = reader60.readers.get(readerFieldInfo.number); + if (bkdReader != null) { + bkdReaders.add(bkdReader); + docMaps.add(mergeState.docMaps[i]); + } + } + } + } + + Runnable finalizer = writer.merge(metaOut, indexOut, dataOut, docMaps, bkdReaders); + if (finalizer != null) { + metaOut.writeInt(fieldInfo.number); + finalizer.run(); + } + } + } else { + mergeOneField(mergeState, fieldInfo); + } + } + } + + finish(); + } + + @Override + public void finish() throws IOException { + if (finished) { + throw new IllegalStateException("already finished"); + } + finished = true; + metaOut.writeInt(-1); + CodecUtil.writeFooter(indexOut); + CodecUtil.writeFooter(dataOut); + metaOut.writeLong(indexOut.getFilePointer()); + metaOut.writeLong(dataOut.getFilePointer()); + CodecUtil.writeFooter(metaOut); + } + + @Override + public void close() throws IOException { + IOUtils.close(metaOut, indexOut, dataOut); + } +} diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86SegmentInfoFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86SegmentInfoFormat.java new file mode 100644 index 000000000000..b2bcdc2282ec --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86SegmentInfoFormat.java @@ -0,0 +1,217 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.codecs.lucene86; + +import java.io.IOException; +import java.util.Map; +import java.util.Set; + +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.SegmentInfoFormat; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.IndexSorter; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.SortFieldProvider; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.Version; + +/** + * Lucene 8.6 Segment info format. + *

+ * Files: + *

    + *
  • .si: Header, SegVersion, SegSize, IsCompoundFile, Diagnostics, Files, Attributes, IndexSort, Footer + *
+ * Data types: + *
    + *
  • Header --> {@link CodecUtil#writeIndexHeader IndexHeader}
  • + *
  • SegSize --> {@link DataOutput#writeInt Int32}
  • + *
  • SegVersion --> {@link DataOutput#writeString String}
  • + *
  • SegMinVersion --> {@link DataOutput#writeString String}
  • + *
  • Files --> {@link DataOutput#writeSetOfStrings Set<String>}
  • + *
  • Diagnostics,Attributes --> {@link DataOutput#writeMapOfStrings Map<String,String>}
  • + *
  • IsCompoundFile --> {@link DataOutput#writeByte Int8}
  • + *
  • IndexSort --> {@link DataOutput#writeVInt Int32} count, followed by {@code count} SortField
  • + *
  • SortField --> {@link DataOutput#writeString String} sort class, followed by a per-sort bytestream + * (see {@link SortFieldProvider#readSortField(DataInput)}) + *
  • Footer --> {@link CodecUtil#writeFooter CodecFooter}
  • + *
+ * Field Descriptions: + *
    + *
  • SegVersion is the code version that created the segment.
  • + *
  • SegMinVersion is the minimum code version that contributed documents to the segment.
  • + *
  • SegSize is the number of documents contained in the segment index.
  • + *
  • IsCompoundFile records whether the segment is written as a compound file or + * not. If this is -1, the segment is not a compound file. If it is 1, the segment + * is a compound file.
  • + *
  • The Diagnostics Map is privately written by {@link IndexWriter}, as a debugging aid, + * for each segment it creates. It includes metadata like the current Lucene + * version, OS, Java version, why the segment was created (merge, flush, + * addIndexes), etc.
  • + *
  • Files is a list of files referred to by this segment.
  • + *
+ * + * @see SegmentInfos + * @lucene.experimental + */ +public class Lucene86SegmentInfoFormat extends SegmentInfoFormat { + + /** Sole constructor. */ + public Lucene86SegmentInfoFormat() { + } + + @Override + public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context) throws IOException { + final String fileName = IndexFileNames.segmentFileName(segment, "", SI_EXTENSION); + try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) { + Throwable priorE = null; + SegmentInfo si = null; + try { + int format = CodecUtil.checkIndexHeader(input, CODEC_NAME, + VERSION_START, + VERSION_CURRENT, + segmentID, ""); + final Version version = Version.fromBits(input.readInt(), input.readInt(), input.readInt()); + byte hasMinVersion = input.readByte(); + final Version minVersion; + switch (hasMinVersion) { + case 0: + minVersion = null; + break; + case 1: + minVersion = Version.fromBits(input.readInt(), input.readInt(), input.readInt()); + break; + default: + throw new CorruptIndexException("Illegal boolean value " + hasMinVersion, input); + } + + final int docCount = input.readInt(); + if (docCount < 0) { + throw new CorruptIndexException("invalid docCount: " + docCount, input); + } + final boolean isCompoundFile = input.readByte() == SegmentInfo.YES; + + final Map diagnostics = input.readMapOfStrings(); + final Set files = input.readSetOfStrings(); + final Map attributes = input.readMapOfStrings(); + + int numSortFields = input.readVInt(); + Sort indexSort; + if (numSortFields > 0) { + SortField[] sortFields = new SortField[numSortFields]; + for(int i=0;i= 7 but got: " + version.major + " segment=" + si); + } + // Write the Lucene version that created this segment, since 3.1 + output.writeInt(version.major); + output.writeInt(version.minor); + output.writeInt(version.bugfix); + + // Write the min Lucene version that contributed docs to the segment, since 7.0 + if (si.getMinVersion() != null) { + output.writeByte((byte) 1); + Version minVersion = si.getMinVersion(); + output.writeInt(minVersion.major); + output.writeInt(minVersion.minor); + output.writeInt(minVersion.bugfix); + } else { + output.writeByte((byte) 0); + } + + assert version.prerelease == 0; + output.writeInt(si.maxDoc()); + + output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); + output.writeMapOfStrings(si.getDiagnostics()); + Set files = si.files(); + for (String file : files) { + if (!IndexFileNames.parseSegmentName(file).equals(si.name)) { + throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files); + } + } + output.writeSetOfStrings(files); + output.writeMapOfStrings(si.getAttributes()); + + Sort indexSort = si.getIndexSort(); + int numSortFields = indexSort == null ? 0 : indexSort.getSort().length; + output.writeVInt(numSortFields); + for (int i = 0; i < numSortFields; ++i) { + SortField sortField = indexSort.getSort()[i]; + IndexSorter sorter = sortField.getIndexSorter(); + if (sorter == null) { + throw new IllegalArgumentException("cannot serialize SortField " + sortField); + } + output.writeString(sorter.getProviderName()); + SortFieldProvider.write(sortField, output); + } + + CodecUtil.writeFooter(output); + } + } + + /** File extension used to store {@link SegmentInfo}. */ + public final static String SI_EXTENSION = "si"; + static final String CODEC_NAME = "Lucene86SegmentInfo"; + static final int VERSION_START = 0; + static final int VERSION_CURRENT = VERSION_START; +} diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene86/package-info.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/package-info.java new file mode 100644 index 000000000000..19be7eb66157 --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/package-info.java @@ -0,0 +1,416 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Lucene 8.6 file format. + * + *

Apache Lucene - Index File Formats

+ * + * + *

Introduction

+ *
+ *

This document defines the index file formats used in this version of Lucene. + * If you are using a different version of Lucene, please consult the copy of + * docs/ that was distributed with + * the version you are using.

+ *

This document attempts to provide a high-level definition of the Apache + * Lucene file formats.

+ *
+ * + *

Definitions

+ *
+ *

The fundamental concepts in Lucene are index, document, field and term.

+ *

An index contains a sequence of documents.

+ *
    + *
  • A document is a sequence of fields.
  • + *
  • A field is a named sequence of terms.
  • + *
  • A term is a sequence of bytes.
  • + *
+ *

The same sequence of bytes in two different fields is considered a different + * term. Thus terms are represented as a pair: the string naming the field, and the + * bytes within the field.

+ * + *

Inverted Indexing

+ *

The index stores statistics about terms in order to make term-based search + * more efficient. Lucene's index falls into the family of indexes known as an + * inverted index. This is because it can list, for a term, the documents + * that contain it. This is the inverse of the natural relationship, in which + * documents list terms.

+ * + *

Types of Fields

+ *

In Lucene, fields may be stored, in which case their text is stored + * in the index literally, in a non-inverted manner. Fields that are inverted are + * called indexed. A field may be both stored and indexed.

+ *

The text of a field may be tokenized into terms to be indexed, or the + * text of a field may be used literally as a term to be indexed. Most fields are + * tokenized, but sometimes it is useful for certain identifier fields to be + * indexed literally.

+ *

See the {@link org.apache.lucene.document.Field Field} + * java docs for more information on Fields.

+ * + *

Segments

+ *

Lucene indexes may be composed of multiple sub-indexes, or segments. + * Each segment is a fully independent index, which could be searched separately. + * Indexes evolve by:

+ *
    + *
  1. Creating new segments for newly added documents.
  2. + *
  3. Merging existing segments.
  4. + *
+ *

Searches may involve multiple segments and/or multiple indexes, each index + * potentially composed of a set of segments.

+ * + *

Document Numbers

+ *

Internally, Lucene refers to documents by an integer document number. + * The first document added to an index is numbered zero, and each subsequent + * document added gets a number one greater than the previous.

+ *

Note that a document's number may change, so caution should be taken when + * storing these numbers outside of Lucene. In particular, numbers may change in + * the following situations:

+ *
    + *
  • + *

    The numbers stored in each segment are unique only within the segment, and + * must be converted before they can be used in a larger context. The standard + * technique is to allocate each segment a range of values, based on the range of + * numbers used in that segment. To convert a document number from a segment to an + * external value, the segment's base document number is added. To convert + * an external value back to a segment-specific value, the segment is identified + * by the range that the external value is in, and the segment's base value is + * subtracted. For example two five document segments might be combined, so that + * the first segment has a base value of zero, and the second of five. Document + * three from the second segment would have an external value of eight.

    + *
  • + *
  • + *

    When documents are deleted, gaps are created in the numbering. These are + * eventually removed as the index evolves through merging. Deleted documents are + * dropped when segments are merged. A freshly-merged segment thus has no gaps in + * its numbering.

    + *
  • + *
+ *
+ * + *

Index Structure Overview

+ *
+ *

Each segment index maintains the following:

+ *
    + *
  • + * {@link org.apache.lucene.codecs.lucene86.Lucene86SegmentInfoFormat Segment info}. + * This contains metadata about a segment, such as the number of documents, + * what files it uses, and information about how the segment is sorted + *
  • + *
  • + * {@link org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat Field names}. + * This contains the set of field names used in the index. + *
  • + *
  • + * {@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Stored Field values}. + * This contains, for each document, a list of attribute-value pairs, where the attributes + * are field names. These are used to store auxiliary information about the document, such as + * its title, url, or an identifier to access a database. The set of stored fields are what is + * returned for each hit when searching. This is keyed by document number. + *
  • + *
  • + * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term dictionary}. + * A dictionary containing all of the terms used in all of the + * indexed fields of all of the documents. The dictionary also contains the number + * of documents which contain the term, and pointers to the term's frequency and + * proximity data. + *
  • + *
  • + * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Frequency data}. + * For each term in the dictionary, the numbers of all the + * documents that contain that term, and the frequency of the term in that + * document, unless frequencies are omitted ({@link org.apache.lucene.index.IndexOptions#DOCS IndexOptions.DOCS}) + *
  • + *
  • + * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Proximity data}. + * For each term in the dictionary, the positions that the + * term occurs in each document. Note that this will not exist if all fields in + * all documents omit position data. + *
  • + *
  • + * {@link org.apache.lucene.codecs.lucene80.Lucene80NormsFormat Normalization factors}. + * For each field in each document, a value is stored + * that is multiplied into the score for hits on that field. + *
  • + *
  • + * {@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vectors}. + * For each field in each document, the term vector (sometimes + * called document vector) may be stored. A term vector consists of term text and + * term frequency. To add Term Vectors to your index see the + * {@link org.apache.lucene.document.Field Field} constructors + *
  • + *
  • + * {@link org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat Per-document values}. + * Like stored values, these are also keyed by document + * number, but are generally intended to be loaded into main memory for fast + * access. Whereas stored values are generally intended for summary results from + * searches, per-document values are useful for things like scoring factors. + *
  • + *
  • + * {@link org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat Live documents}. + * An optional file indicating which documents are live. + *
  • + *
  • + * {@link org.apache.lucene.codecs.lucene86.Lucene86PointsFormat Point values}. + * Optional pair of files, recording dimensionally indexed fields, to enable fast + * numeric range filtering and large numeric values like BigInteger and BigDecimal (1D) + * and geographic shape intersection (2D, 3D). + *
  • + *
+ *

Details on each of these are provided in their linked pages.

+ *
+ * + *

File Naming

+ *
+ *

All files belonging to a segment have the same name with varying extensions. + * The extensions correspond to the different file formats described below. When + * using the Compound File format (default for small segments) these files (except + * for the Segment info file, the Lock file, and Deleted documents file) are collapsed + * into a single .cfs file (see below for details)

+ *

Typically, all segments in an index are stored in a single directory, + * although this is not required.

+ *

File names are never re-used. That is, when any file is saved + * to the Directory it is given a never before used filename. This is achieved + * using a simple generations approach. For example, the first segments file is + * segments_1, then segments_2, etc. The generation is a sequential long integer + * represented in alpha-numeric (base 36) form.

+ *
+ * + *

Summary of File Extensions

+ *
+ *

The following table summarizes the names and extensions of the files in + * Lucene:

+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
lucene filenames by extension
NameExtensionBrief Description
{@link org.apache.lucene.index.SegmentInfos Segments File}segments_NStores information about a commit point
Lock Filewrite.lockThe Write lock prevents multiple IndexWriters from writing to the same + * file.
{@link org.apache.lucene.codecs.lucene86.Lucene86SegmentInfoFormat Segment Info}.siStores metadata about a segment
{@link org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat Compound File}.cfs, .cfeAn optional "virtual" file consisting of all the other index files for + * systems that frequently run out of file handles.
{@link org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat Fields}.fnmStores information about the fields
{@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Field Index}.fdxContains pointers to field data
{@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Field Data}.fdtThe stored fields for documents
{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Dictionary}.timThe term dictionary, stores term info
{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Index}.tipThe index into the Term Dictionary
{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Frequencies}.docContains the list of docs which contain each term along with frequency
{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Positions}.posStores position information about where a term occurs in the index
{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Payloads}.payStores additional per-position metadata information such as character offsets and user payloads
{@link org.apache.lucene.codecs.lucene80.Lucene80NormsFormat Norms}.nvd, .nvmEncodes length and boost factors for docs and fields
{@link org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat Per-Document Values}.dvd, .dvmEncodes additional scoring factors or other per-document information.
{@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vector Index}.tvxStores offset into the document data file
{@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vector Data}.tvdContains term vector data.
{@link org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat Live Documents}.livInfo about what documents are live
{@link org.apache.lucene.codecs.lucene86.Lucene86PointsFormat Point values}.dii, .dimHolds indexed points, if any
+ *
+ * + *

Lock File

+ * The write lock, which is stored in the index directory by default, is named + * "write.lock". If the lock directory is different from the index directory then + * the write lock will be named "XXXX-write.lock" where XXXX is a unique prefix + * derived from the full path to the index directory. When this file is present, a + * writer is currently modifying the index (adding or removing documents). This + * lock file ensures that only one writer is modifying the index at a time. + * + *

History

+ *

Compatibility notes are provided in this document, describing how file + * formats have changed from prior versions:

+ *
    + *
  • In version 2.1, the file format was changed to allow lock-less commits (ie, + * no more commit lock). The change is fully backwards compatible: you can open a + * pre-2.1 index for searching or adding/deleting of docs. When the new segments + * file is saved (committed), it will be written in the new file format (meaning + * no specific "upgrade" process is needed). But note that once a commit has + * occurred, pre-2.1 Lucene will not be able to read the index.
  • + *
  • In version 2.3, the file format was changed to allow segments to share a + * single set of doc store (vectors & stored fields) files. This allows for + * faster indexing in certain cases. The change is fully backwards compatible (in + * the same way as the lock-less commits change in 2.1).
  • + *
  • In version 2.4, Strings are now written as true UTF-8 byte sequence, not + * Java's modified UTF-8. See + * LUCENE-510 for details.
  • + *
  • In version 2.9, an optional opaque Map<String,String> CommitUserData + * may be passed to IndexWriter's commit methods (and later retrieved), which is + * recorded in the segments_N file. See + * LUCENE-1382 for details. Also, + * diagnostics were added to each segment written recording details about why it + * was written (due to flush, merge; which OS/JRE was used; etc.). See issue + * LUCENE-1654 for details.
  • + *
  • In version 3.0, compressed fields are no longer written to the index (they + * can still be read, but on merge the new segment will write them, uncompressed). + * See issue LUCENE-1960 + * for details.
  • + *
  • In version 3.1, segments records the code version that created them. See + * LUCENE-2720 for details. + * Additionally segments track explicitly whether or not they have term vectors. + * See LUCENE-2811 + * for details.
  • + *
  • In version 3.2, numeric fields are written as natively to stored fields + * file, previously they were stored in text format only.
  • + *
  • In version 3.4, fields can omit position data while still indexing term + * frequencies.
  • + *
  • In version 4.0, the format of the inverted index became extensible via + * the {@link org.apache.lucene.codecs.Codec Codec} api. Fast per-document storage + * ({@code DocValues}) was introduced. Normalization factors need no longer be a + * single byte, they can be any {@link org.apache.lucene.index.NumericDocValues NumericDocValues}. + * Terms need not be unicode strings, they can be any byte sequence. Term offsets + * can optionally be indexed into the postings lists. Payloads can be stored in the + * term vectors.
  • + *
  • In version 4.1, the format of the postings list changed to use either + * of FOR compression or variable-byte encoding, depending upon the frequency + * of the term. Terms appearing only once were changed to inline directly into + * the term dictionary. Stored fields are compressed by default.
  • + *
  • In version 4.2, term vectors are compressed by default. DocValues has + * a new multi-valued type (SortedSet), that can be used for faceting/grouping/joining + * on multi-valued fields.
  • + *
  • In version 4.5, DocValues were extended to explicitly represent missing values.
  • + *
  • In version 4.6, FieldInfos were extended to support per-field DocValues generation, to + * allow updating NumericDocValues fields.
  • + *
  • In version 4.8, checksum footers were added to the end of each index file + * for improved data integrity. Specifically, the last 8 bytes of every index file + * contain the zlib-crc32 checksum of the file.
  • + *
  • In version 4.9, DocValues has a new multi-valued numeric type (SortedNumeric) + * that is suitable for faceting/sorting/analytics. + *
  • In version 5.4, DocValues have been improved to store more information on disk: + * addresses for binary fields and ord indexes for multi-valued fields. + *
  • In version 6.0, Points were added, for multi-dimensional range/distance search. + *
  • In version 6.2, new Segment info format that reads/writes the index sort, to support index sorting. + *
  • In version 7.0, DocValues have been improved to better support sparse doc values + * thanks to an iterator API.
  • + *
  • In version 8.0, postings have been enhanced to record, for each block of + * doc ids, the (term freq, normalization factor) pairs that may trigger the + * maximum score of the block. This information is recorded alongside skip data + * in order to be able to skip blocks of doc ids if they may not produce high + * enough scores. + * Additionally doc values and norms has been extended with jump-tables to make access O(1) + * instead of O(n), where n is the number of elements to skip when advancing in the data.
  • + *
  • In version 8.4, postings, positions, offsets and payload lengths have move to a more + * performant encoding that is vectorized.
  • + *
  • In version 8.6, index sort serialization is delegated to the sorts themselves, to + * allow user-defined sorts to be used
  • + *
+ * + *

Limitations

+ *
+ *

Lucene uses a Java int to refer to + * document numbers, and the index file format uses an Int32 + * on-disk to store document numbers. This is a limitation + * of both the index file format and the current implementation. Eventually these + * should be replaced with either UInt64 values, or + * better yet, {@link org.apache.lucene.store.DataOutput#writeVInt VInt} values which have no limit.

+ *
+ */ +package org.apache.lucene.codecs.lucene86; diff --git a/lucene/core/src/java/org/apache/lucene/geo/Tessellator.java b/lucene/core/src/java/org/apache/lucene/geo/Tessellator.java index c61fba938c3a..1600955b1726 100644 --- a/lucene/core/src/java/org/apache/lucene/geo/Tessellator.java +++ b/lucene/core/src/java/org/apache/lucene/geo/Tessellator.java @@ -376,7 +376,12 @@ private static Node getSharedVertex(final Node polygon, final Node vertex) { Node next = polygon; do { if (isVertexEquals(next, vertex)) { - return next; + // make sure we are not crossing the polygon. This might happen when several holes share the same polygon vertex. + boolean crosses = GeoUtils.lineCrossesLine(next.previous.getX(), next.previous.getY(), vertex.next.getX(), vertex.next.getY(), + next.next.getX(), next.next.getY(), vertex.previous.getX(), vertex.previous.getY()); + if (crosses == false) { + return next; + } } next = next.next; } while(next != polygon); diff --git a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java index 1aeab4c98026..e213a485ea0f 100644 --- a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java @@ -21,7 +21,6 @@ import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.SortField; import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; import org.apache.lucene.util.ArrayUtil; @@ -37,7 +36,7 @@ /** Buffers up pending byte[] per doc, then flushes when * segment flushes. */ -class BinaryDocValuesWriter extends DocValuesWriter { +class BinaryDocValuesWriter extends DocValuesWriter { /** Maximum length for a binary field. */ private static final int MAX_LENGTH = ArrayUtil.MAX_ARRAY_LENGTH; @@ -56,6 +55,8 @@ class BinaryDocValuesWriter extends DocValuesWriter { private int lastDocID = -1; private int maxLength = 0; + private PackedLongValues finalLengths; + public BinaryDocValuesWriter(FieldInfo fieldInfo, Counter iwBytesUsed) { this.fieldInfo = fieldInfo; this.bytes = new PagedBytes(BLOCK_BITS); @@ -98,10 +99,6 @@ private void updateBytesUsed() { bytesUsed = newBytesUsed; } - @Override - public void finish(int maxDoc) { - } - private SortingLeafReader.CachedBinaryDVs sortDocValues(int maxDoc, Sorter.DocMap sortMap, BinaryDocValues oldValues) throws IOException { FixedBitSet docsWithField = new FixedBitSet(maxDoc); BytesRef[] values = new BytesRef[maxDoc]; @@ -118,18 +115,23 @@ private SortingLeafReader.CachedBinaryDVs sortDocValues(int maxDoc, Sorter.DocMa } @Override - Sorter.DocComparator getDocComparator(int numDoc, SortField sortField) throws IOException { - throw new IllegalArgumentException("It is forbidden to sort on a binary field"); + BinaryDocValues getDocValues() { + if (finalLengths == null) { + finalLengths = this.lengths.build(); + } + return new BufferedBinaryDocValues(finalLengths, maxLength, bytes.getDataInput(), docsWithField.iterator()); } @Override public void flush(SegmentWriteState state, Sorter.DocMap sortMap, DocValuesConsumer dvConsumer) throws IOException { bytes.freeze(false); - final PackedLongValues lengths = this.lengths.build(); + if (finalLengths == null) { + finalLengths = this.lengths.build(); + } final SortingLeafReader.CachedBinaryDVs sorted; if (sortMap != null) { sorted = sortDocValues(state.segmentInfo.maxDoc(), sortMap, - new BufferedBinaryDocValues(lengths, maxLength, bytes.getDataInput(), docsWithField.iterator())); + new BufferedBinaryDocValues(finalLengths, maxLength, bytes.getDataInput(), docsWithField.iterator())); } else { sorted = null; } @@ -141,7 +143,7 @@ public BinaryDocValues getBinary(FieldInfo fieldInfoIn) { throw new IllegalArgumentException("wrong fieldInfo"); } if (sorted == null) { - return new BufferedBinaryDocValues(lengths, maxLength, bytes.getDataInput(), docsWithField.iterator()); + return new BufferedBinaryDocValues(finalLengths, maxLength, bytes.getDataInput(), docsWithField.iterator()); } else { return new SortingLeafReader.SortingBinaryDocValues(sorted); } @@ -200,9 +202,4 @@ public BytesRef binaryValue() { return value.get(); } } - - @Override - DocIdSetIterator getDocIdSet() { - return docsWithField.iterator(); - } } diff --git a/lucene/core/src/java/org/apache/lucene/index/ByteSliceWriter.java b/lucene/core/src/java/org/apache/lucene/index/ByteSliceWriter.java index b96f7fe1f041..75650049aa2c 100644 --- a/lucene/core/src/java/org/apache/lucene/index/ByteSliceWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/ByteSliceWriter.java @@ -26,7 +26,6 @@ * byte[]. This is used by DocumentsWriter to hold the * posting list for many terms in RAM. */ - final class ByteSliceWriter extends DataOutput { private byte[] slice; diff --git a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java index 03cabc13ba03..128aee028323 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java +++ b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java @@ -22,11 +22,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; +import java.util.Objects; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesFormat; @@ -39,8 +39,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; -import org.apache.lucene.search.SortedNumericSortField; -import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.ArrayUtil; @@ -48,13 +46,13 @@ import org.apache.lucene.util.BytesRefHash.MaxBytesLengthExceededException; import org.apache.lucene.util.Counter; import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.RamUsageEstimator; /** Default general purpose indexing chain, which handles * indexing all types of fields. */ final class DefaultIndexingChain extends DocConsumer { final Counter bytesUsed; - final DocumentsWriterPerThread.DocState docState; final DocumentsWriterPerThread docWriter; final FieldInfos.Builder fieldInfos; @@ -74,14 +72,13 @@ final class DefaultIndexingChain extends DocConsumer { // Holds fields seen in each document private PerField[] fields = new PerField[1]; - - private final Set finishedDocValues = new HashSet<>(); + private final InfoStream infoStream; public DefaultIndexingChain(DocumentsWriterPerThread docWriter) { this.docWriter = docWriter; this.fieldInfos = docWriter.getFieldInfosBuilder(); - this.docState = docWriter.docState; this.bytesUsed = docWriter.bytesUsed; + this.infoStream = docWriter.getIndexWriterConfig().getInfoStream(); final TermsHash termVectorsWriter; if (docWriter.getSegmentInfo().getIndexSort() == null) { @@ -94,29 +91,96 @@ public DefaultIndexingChain(DocumentsWriterPerThread docWriter) { termsHash = new FreqProxTermsWriter(docWriter, termVectorsWriter); } + private LeafReader getDocValuesLeafReader() { + return new DocValuesLeafReader() { + @Override + public NumericDocValues getNumericDocValues(String field) { + PerField pf = getPerField(field); + if (pf == null) { + return null; + } + if (pf.fieldInfo.getDocValuesType() == DocValuesType.NUMERIC) { + return (NumericDocValues) pf.docValuesWriter.getDocValues(); + } + return null; + } + + @Override + public BinaryDocValues getBinaryDocValues(String field) { + PerField pf = getPerField(field); + if (pf == null) { + return null; + } + if (pf.fieldInfo.getDocValuesType() == DocValuesType.BINARY) { + return (BinaryDocValues) pf.docValuesWriter.getDocValues(); + } + return null; + } + + @Override + public SortedDocValues getSortedDocValues(String field) throws IOException { + PerField pf = getPerField(field); + if (pf == null) { + return null; + } + if (pf.fieldInfo.getDocValuesType() == DocValuesType.SORTED) { + return (SortedDocValues) pf.docValuesWriter.getDocValues(); + } + return null; + } + + @Override + public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException { + PerField pf = getPerField(field); + if (pf == null) { + return null; + } + if (pf.fieldInfo.getDocValuesType() == DocValuesType.SORTED_NUMERIC) { + return (SortedNumericDocValues) pf.docValuesWriter.getDocValues(); + } + return null; + } + + @Override + public SortedSetDocValues getSortedSetDocValues(String field) throws IOException { + PerField pf = getPerField(field); + if (pf == null) { + return null; + } + if (pf.fieldInfo.getDocValuesType() == DocValuesType.SORTED_SET) { + return (SortedSetDocValues) pf.docValuesWriter.getDocValues(); + } + return null; + } + + @Override + public FieldInfos getFieldInfos() { + return fieldInfos.finish(); + } + + }; + } + private Sorter.DocMap maybeSortSegment(SegmentWriteState state) throws IOException { Sort indexSort = state.segmentInfo.getIndexSort(); if (indexSort == null) { return null; } - List comparators = new ArrayList<>(); + LeafReader docValuesReader = getDocValuesLeafReader(); + + List comparators = new ArrayList<>(); for (int i = 0; i < indexSort.getSort().length; i++) { SortField sortField = indexSort.getSort()[i]; - PerField perField = getPerField(sortField.getField()); - if (perField != null && perField.docValuesWriter != null && - finishedDocValues.contains(perField.fieldInfo.name) == false) { - perField.docValuesWriter.finish(state.segmentInfo.maxDoc()); - Sorter.DocComparator cmp = perField.docValuesWriter.getDocComparator(state.segmentInfo.maxDoc(), sortField); - comparators.add(cmp); - finishedDocValues.add(perField.fieldInfo.name); - } else { - // safe to ignore, sort field with no values or already seen before + IndexSorter sorter = sortField.getIndexSorter(); + if (sorter == null) { + throw new UnsupportedOperationException("Cannot sort index using sort field " + sortField); } + comparators.add(sorter.getDocComparator(docValuesReader, state.segmentInfo.maxDoc())); } Sorter sorter = new Sorter(indexSort); // returns null if the documents are already sorted - return sorter.sort(state.segmentInfo.maxDoc(), comparators.toArray(new Sorter.DocComparator[comparators.size()])); + return sorter.sort(state.segmentInfo.maxDoc(), comparators.toArray(IndexSorter.DocComparator[]::new)); } @Override @@ -128,29 +192,29 @@ public Sorter.DocMap flush(SegmentWriteState state) throws IOException { int maxDoc = state.segmentInfo.maxDoc(); long t0 = System.nanoTime(); writeNorms(state, sortMap); - if (docState.infoStream.isEnabled("IW")) { - docState.infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write norms"); + if (infoStream.isEnabled("IW")) { + infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write norms"); } SegmentReadState readState = new SegmentReadState(state.directory, state.segmentInfo, state.fieldInfos, IOContext.READ, state.segmentSuffix); t0 = System.nanoTime(); writeDocValues(state, sortMap); - if (docState.infoStream.isEnabled("IW")) { - docState.infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write docValues"); + if (infoStream.isEnabled("IW")) { + infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write docValues"); } t0 = System.nanoTime(); writePoints(state, sortMap); - if (docState.infoStream.isEnabled("IW")) { - docState.infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write points"); + if (infoStream.isEnabled("IW")) { + infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write points"); } // it's possible all docs hit non-aborting exceptions... t0 = System.nanoTime(); storedFieldsConsumer.finish(maxDoc); storedFieldsConsumer.flush(state, sortMap); - if (docState.infoStream.isEnabled("IW")) { - docState.infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to finish stored fields"); + if (infoStream.isEnabled("IW")) { + infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to finish stored fields"); } t0 = System.nanoTime(); @@ -175,8 +239,8 @@ public Sorter.DocMap flush(SegmentWriteState state) throws IOException { } termsHash.flush(fieldsToFlush, state, sortMap, normsMergeInstance); } - if (docState.infoStream.isEnabled("IW")) { - docState.infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write postings and finish vectors"); + if (infoStream.isEnabled("IW")) { + infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write postings and finish vectors"); } // Important to save after asking consumer to flush so @@ -185,8 +249,8 @@ public Sorter.DocMap flush(SegmentWriteState state) throws IOException { // FieldInfo.storePayload. t0 = System.nanoTime(); docWriter.codec.fieldInfosFormat().write(state.directory, state.segmentInfo, "", state.fieldInfos, IOContext.DEFAULT); - if (docState.infoStream.isEnabled("IW")) { - docState.infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write fieldInfos"); + if (infoStream.isEnabled("IW")) { + infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write fieldInfos"); } return sortMap; @@ -255,10 +319,6 @@ private void writeDocValues(SegmentWriteState state, Sorter.DocMap sortMap) thro DocValuesFormat fmt = state.segmentInfo.getCodec().docValuesFormat(); dvConsumer = fmt.fieldsConsumer(state); } - - if (finishedDocValues.contains(perField.fieldInfo.name) == false) { - perField.docValuesWriter.finish(maxDoc); - } perField.docValuesWriter.flush(state, sortMap, dvConsumer); perField.docValuesWriter = null; } else if (perField.fieldInfo.getDocValuesType() != DocValuesType.NONE) { @@ -382,7 +442,7 @@ private void finishStoredFields() throws IOException { } @Override - public void processDocument() throws IOException { + public void processDocument(int docID, Iterable document) throws IOException { // How many indexed field names we've seen (collapses // multiple field instances by the same name): @@ -399,23 +459,23 @@ public void processDocument() throws IOException { termsHash.startDocument(); - startStoredFields(docState.docID); + startStoredFields(docID); try { - for (IndexableField field : docState.doc) { - fieldCount = processField(field, fieldGen, fieldCount); + for (IndexableField field : document) { + fieldCount = processField(docID, field, fieldGen, fieldCount); } } finally { if (docWriter.hasHitAbortingException() == false) { // Finish each indexed field name seen in the document: for (int i=0;i fi.putAttribute(k, v)); } - fp = new PerField(docWriter.getIndexCreatedVersionMajor(), fi, invert); + LiveIndexWriterConfig indexWriterConfig = docWriter.getIndexWriterConfig(); + fp = new PerField(docWriter.getIndexCreatedVersionMajor(), fi, invert, + indexWriterConfig.getSimilarity(), indexWriterConfig.getInfoStream(), indexWriterConfig.getAnalyzer()); fp.next = fieldHash[hashPos]; fieldHash[hashPos] = fp; totalFieldCount++; @@ -713,7 +786,7 @@ private final class PerField implements Comparable { // Non-null if this field ever had doc values in this // segment: - DocValuesWriter docValuesWriter; + DocValuesWriter docValuesWriter; // Non-null if this field ever had points in this segment: PointValuesWriter pointValuesWriter; @@ -730,11 +803,15 @@ private final class PerField implements Comparable { // reused TokenStream tokenStream; + private final InfoStream infoStream; + private final Analyzer analyzer; - public PerField(int indexCreatedVersionMajor, FieldInfo fieldInfo, boolean invert) { + PerField(int indexCreatedVersionMajor, FieldInfo fieldInfo, boolean invert, Similarity similarity, InfoStream infoStream, Analyzer analyzer) { this.indexCreatedVersionMajor = indexCreatedVersionMajor; this.fieldInfo = fieldInfo; - similarity = docState.similarity; + this.similarity = similarity; + this.infoStream = infoStream; + this.analyzer = analyzer; if (invert) { setInvertState(); } @@ -746,7 +823,7 @@ void setInvertState() { if (fieldInfo.omitsNorms() == false) { assert norms == null; // Even if no documents actually succeed in setting a norm, we still write norms for this segment: - norms = new NormValuesWriter(fieldInfo, docState.docWriter.bytesUsed); + norms = new NormValuesWriter(fieldInfo, bytesUsed); } } @@ -755,7 +832,7 @@ public int compareTo(PerField other) { return this.fieldInfo.name.compareTo(other.fieldInfo.name); } - public void finish() throws IOException { + public void finish(int docID) throws IOException { if (fieldInfo.omitsNorms() == false) { long normValue; if (invertState.length == 0) { @@ -769,7 +846,7 @@ public void finish() throws IOException { throw new IllegalStateException("Similarity " + similarity + " return 0 for non-empty field"); } } - norms.addValue(docState.docID, normValue); + norms.addValue(docID, normValue); } termsHashPerField.finish(); @@ -778,7 +855,7 @@ public void finish() throws IOException { /** Inverts one field for one document; first is true * if this is the first time we are seeing this field * name in this document. */ - public void invert(IndexableField field, boolean first) throws IOException { + public void invert(int docID, IndexableField field, boolean first) throws IOException { if (first) { // First time we're seeing this field (indexed) in // this document: @@ -794,7 +871,7 @@ public void invert(IndexableField field, boolean first) throws IOException { fieldInfo.setOmitsNorms(); } - final boolean analyzed = fieldType.tokenized() && docState.analyzer != null; + final boolean analyzed = fieldType.tokenized() && analyzer != null; /* * To assist people in tracking down problems in analysis components, we wish to write the field name to the infostream @@ -802,7 +879,7 @@ public void invert(IndexableField field, boolean first) throws IOException { * but rather a finally that takes note of the problem. */ boolean succeededInProcessingField = false; - try (TokenStream stream = tokenStream = field.tokenStream(docState.analyzer, tokenStream)) { + try (TokenStream stream = tokenStream = field.tokenStream(analyzer, tokenStream)) { // reset the TokenStream to the first token stream.reset(); invertState.setAttributeSource(stream); @@ -858,14 +935,14 @@ public void invert(IndexableField field, boolean first) throws IOException { // corrupt and should not be flushed to a // new segment: try { - termsHashPerField.add(); + termsHashPerField.add(invertState.termAttribute.getBytesRef(), docID); } catch (MaxBytesLengthExceededException e) { byte[] prefix = new byte[30]; BytesRef bigTerm = invertState.termAttribute.getBytesRef(); System.arraycopy(bigTerm.bytes, bigTerm.offset, prefix, 0, 30); String msg = "Document contains at least one immense term in field=\"" + fieldInfo.name + "\" (whose UTF8 encoding is longer than the max length " + DocumentsWriterPerThread.MAX_TERM_LENGTH_UTF8 + "), all of which were skipped. Please correct the analyzer to not produce such terms. The prefix of the first immense term is: '" + Arrays.toString(prefix) + "...', original message: " + e.getMessage(); - if (docState.infoStream.isEnabled("IW")) { - docState.infoStream.message("IW", "ERROR: " + msg); + if (infoStream.isEnabled("IW")) { + infoStream.message("IW", "ERROR: " + msg); } // Document will be deleted above: throw new IllegalArgumentException(msg, e); @@ -886,14 +963,14 @@ public void invert(IndexableField field, boolean first) throws IOException { /* if there is an exception coming through, we won't set this to true here:*/ succeededInProcessingField = true; } finally { - if (!succeededInProcessingField && docState.infoStream.isEnabled("DW")) { - docState.infoStream.message("DW", "An exception was thrown while processing field " + fieldInfo.name); + if (!succeededInProcessingField && infoStream.isEnabled("DW")) { + infoStream.message("DW", "An exception was thrown while processing field " + fieldInfo.name); } } if (analyzed) { - invertState.position += docState.analyzer.getPositionIncrementGap(fieldInfo.name); - invertState.offset += docState.analyzer.getOffsetGap(fieldInfo.name); + invertState.position += analyzer.getPositionIncrementGap(fieldInfo.name); + invertState.offset += analyzer.getOffsetGap(fieldInfo.name); } } } @@ -907,7 +984,7 @@ DocIdSetIterator getHasDocValues(String field) { return null; } - return perField.docValuesWriter.getDocIdSet(); + return perField.docValuesWriter.getDocValues(); } } return null; diff --git a/lucene/core/src/java/org/apache/lucene/index/DocConsumer.java b/lucene/core/src/java/org/apache/lucene/index/DocConsumer.java index d124434a5f9c..90a98f43e547 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocConsumer.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocConsumer.java @@ -22,7 +22,7 @@ import org.apache.lucene.search.DocIdSetIterator; abstract class DocConsumer { - abstract void processDocument() throws IOException; + abstract void processDocument(int docId, Iterable document) throws IOException; abstract Sorter.DocMap flush(final SegmentWriteState state) throws IOException; abstract void abort() throws IOException; diff --git a/lucene/core/src/java/org/apache/lucene/index/DocValues.java b/lucene/core/src/java/org/apache/lucene/index/DocValues.java index 63488d038084..f90d715e69cc 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocValues.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocValues.java @@ -168,7 +168,7 @@ public int getValueCount() { /** * An empty SortedNumericDocValues which returns zero values for every document */ - public static final SortedNumericDocValues emptySortedNumeric(int maxDoc) { + public static final SortedNumericDocValues emptySortedNumeric() { return new SortedNumericDocValues() { private int doc = -1; @@ -387,7 +387,7 @@ public static SortedNumericDocValues getSortedNumeric(LeafReader reader, String NumericDocValues single = reader.getNumericDocValues(field); if (single == null) { checkField(reader, field, DocValuesType.SORTED_NUMERIC, DocValuesType.NUMERIC); - return emptySortedNumeric(reader.maxDoc()); + return emptySortedNumeric(); } return singleton(single); } diff --git a/lucene/core/src/java/org/apache/lucene/index/DocValuesLeafReader.java b/lucene/core/src/java/org/apache/lucene/index/DocValuesLeafReader.java new file mode 100644 index 000000000000..93b7f4988d68 --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/index/DocValuesLeafReader.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.index; + +import java.io.IOException; + +import org.apache.lucene.util.Bits; + +abstract class DocValuesLeafReader extends LeafReader { + @Override + public final CacheHelper getCoreCacheHelper() { + throw new UnsupportedOperationException(); + } + + @Override + public final Terms terms(String field) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public final NumericDocValues getNormValues(String field) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public final Bits getLiveDocs() { + throw new UnsupportedOperationException(); + } + + @Override + public final PointValues getPointValues(String field) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public final void checkIntegrity() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public final LeafMetaData getMetaData() { + throw new UnsupportedOperationException(); + } + + @Override + public final Fields getTermVectors(int docID) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public final int numDocs() { + throw new UnsupportedOperationException(); + } + + @Override + public final int maxDoc() { + throw new UnsupportedOperationException(); + } + + @Override + public final void document(int docID, StoredFieldVisitor visitor) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + protected final void doClose() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public final CacheHelper getReaderCacheHelper() { + throw new UnsupportedOperationException(); + } +} diff --git a/lucene/core/src/java/org/apache/lucene/index/DocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/DocValuesWriter.java index b739b14a2a77..4098cb05cf7f 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocValuesWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocValuesWriter.java @@ -21,12 +21,8 @@ import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.SortField; -abstract class DocValuesWriter { - abstract void finish(int numDoc); +abstract class DocValuesWriter { abstract void flush(SegmentWriteState state, Sorter.DocMap sortMap, DocValuesConsumer consumer) throws IOException; - abstract Sorter.DocComparator getDocComparator(int numDoc, SortField sortField) throws IOException; - abstract DocIdSetIterator getDocIdSet(); - + abstract T getDocValues(); } diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java index d4083c44e3ef..9f01f884b8c9 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java @@ -137,7 +137,7 @@ final class DocumentsWriter implements Closeable, Accountable { final FieldInfos.Builder infos = new FieldInfos.Builder(globalFieldNumberMap); return new DocumentsWriterPerThread(indexCreatedVersionMajor, segmentNameSupplier.get(), directoryOrig, - directory, config, infoStream, deleteQueue, infos, + directory, config, deleteQueue, infos, pendingNumDocs, enableTestPoints); }); this.pendingNumDocs = pendingNumDocs; diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java index d810234d568e..48f676e56226 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java @@ -26,11 +26,9 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; -import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.codecs.Codec; import org.apache.lucene.index.DocumentsWriterDeleteQueue.DeleteSlice; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FlushInfo; import org.apache.lucene.store.IOContext; @@ -52,6 +50,10 @@ final class DocumentsWriterPerThread { + LiveIndexWriterConfig getIndexWriterConfig() { + return indexWriterConfig; + } + /** * The IndexingChain must define the {@link #getChain(DocumentsWriterPerThread)} method * which returns the DocConsumer that the DocumentsWriter calls to process the @@ -85,27 +87,6 @@ DocConsumer getChain(DocumentsWriterPerThread documentsWriterPerThread) { } }; - static class DocState { - final DocumentsWriterPerThread docWriter; - final Analyzer analyzer; - InfoStream infoStream; - Similarity similarity; - int docID; - Iterable doc; - - DocState(DocumentsWriterPerThread docWriter, Analyzer analyzer, InfoStream infoStream) { - this.docWriter = docWriter; - this.infoStream = infoStream; - this.analyzer = analyzer; - } - - public void clear() { - // don't hold onto doc nor analyzer, in case it is - // largish: - doc = null; - } - } - static final class FlushedSegment { final SegmentCommitInfo segmentInfo; final FieldInfos fieldInfos; @@ -150,7 +131,6 @@ void abort() throws IOException{ private final static boolean INFO_VERBOSE = false; final Codec codec; final TrackingDirectoryWrapper directory; - final DocState docState; private final DocConsumer consumer; final Counter bytesUsed; @@ -179,15 +159,13 @@ void abort() throws IOException{ private int numDeletedDocIds = 0; - DocumentsWriterPerThread(int indexVersionCreated, String segmentName, Directory directoryOrig, Directory directory, LiveIndexWriterConfig indexWriterConfig, InfoStream infoStream, DocumentsWriterDeleteQueue deleteQueue, + DocumentsWriterPerThread(int indexVersionCreated, String segmentName, Directory directoryOrig, Directory directory, LiveIndexWriterConfig indexWriterConfig, DocumentsWriterDeleteQueue deleteQueue, FieldInfos.Builder fieldInfos, AtomicLong pendingNumDocs, boolean enableTestPoints) throws IOException { this.directory = new TrackingDirectoryWrapper(directory); this.fieldInfos = fieldInfos; this.indexWriterConfig = indexWriterConfig; - this.infoStream = infoStream; + this.infoStream = indexWriterConfig.getInfoStream(); this.codec = indexWriterConfig.getCodec(); - this.docState = new DocState(this, indexWriterConfig.getAnalyzer(), infoStream); - this.docState.similarity = indexWriterConfig.getSimilarity(); this.pendingNumDocs = pendingNumDocs; bytesUsed = Counter.newCounter(); byteBlockAllocator = new DirectTrackingAllocator(bytesUsed); @@ -239,7 +217,7 @@ long updateDocuments(Iterable> docs testPoint("DocumentsWriterPerThread addDocuments start"); assert hasHitAbortingException() == false: "DWPT has hit aborting exception but is still indexing"; if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) { - infoStream.message("DWPT", Thread.currentThread().getName() + " update delTerm=" + deleteNode + " docID=" + docState.docID + " seg=" + segmentInfo.name); + infoStream.message("DWPT", Thread.currentThread().getName() + " update delTerm=" + deleteNode + " docID=" + numDocsInRAM + " seg=" + segmentInfo.name); } final int docsInRamBefore = numDocsInRAM; boolean allDocsIndexed = false; @@ -252,13 +230,7 @@ long updateDocuments(Iterable> docs // it's very hard to fix (we can't easily distinguish aborting // vs non-aborting exceptions): reserveOneDoc(); - docState.doc = doc; - docState.docID = numDocsInRAM; - try { - consumer.processDocument(); - } finally { - numDocsInRAM++; // we count the doc anyway even in the case of an exception - } + consumer.processDocument(numDocsInRAM++, doc); } allDocsIndexed = true; return finishDocuments(deleteNode, docsInRamBefore); @@ -268,7 +240,6 @@ long updateDocuments(Iterable> docs // go and mark all docs from this block as deleted deleteLastDocs(numDocsInRAM - docsInRamBefore); } - docState.clear(); } } finally { maybeAbort("updateDocuments", flushNotifications); @@ -400,8 +371,8 @@ FlushedSegment flush(DocumentsWriter.FlushNotifications flushNotifications) thro final Sorter.DocMap sortMap; try { DocIdSetIterator softDeletedDocs; - if (indexWriterConfig.getSoftDeletesField() != null) { - softDeletedDocs = consumer.getHasDocValues(indexWriterConfig.getSoftDeletesField()); + if (getIndexWriterConfig().getSoftDeletesField() != null) { + softDeletedDocs = consumer.getHasDocValues(getIndexWriterConfig().getSoftDeletesField()); } else { softDeletedDocs = null; } @@ -509,7 +480,7 @@ void sealFlushedSegment(FlushedSegment flushedSegment, Sorter.DocMap sortMap, Do boolean success = false; try { - if (indexWriterConfig.getUseCompoundFile()) { + if (getIndexWriterConfig().getUseCompoundFile()) { Set originalFiles = newSegment.info.files(); // TODO: like addIndexes, we are relying on createCompoundFile to successfully cleanup... IndexWriter.createCompoundFile(infoStream, new TrackingDirectoryWrapper(directory), newSegment.info, context, flushNotifications::deleteUnusedFiles); diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java index eb634b48a6b7..b4e33f8f6b4f 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java @@ -57,6 +57,11 @@ public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, Mer return in.findForcedDeletesMerges(segmentInfos, mergeContext); } + @Override + public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException { + return in.findFullFlushMerges(mergeTrigger, segmentInfos, mergeContext); + } + @Override public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo, MergeContext mergeContext) throws IOException { diff --git a/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java b/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java index 4ec9fd5662bb..db1748fcb9a8 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java +++ b/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java @@ -39,7 +39,7 @@ class FreqProxFields extends Fields { public FreqProxFields(List fieldList) { // NOTE: fields are already sorted by field name for(FreqProxTermsWriterPerField field : fieldList) { - fields.put(field.fieldInfo.name, field); + fields.put(field.getFieldName(), field); } } @@ -55,7 +55,6 @@ public Terms terms(String field) throws IOException { @Override public int size() { - //return fields.size(); throw new UnsupportedOperationException(); } @@ -75,31 +74,27 @@ public TermsEnum iterator() { @Override public long size() { - //return terms.termsHashPerField.bytesHash.size(); throw new UnsupportedOperationException(); } @Override public long getSumTotalTermFreq() { - //return terms.sumTotalTermFreq; throw new UnsupportedOperationException(); } @Override public long getSumDocFreq() { - //return terms.sumDocFreq; throw new UnsupportedOperationException(); } @Override public int getDocCount() { - //return terms.docCount; throw new UnsupportedOperationException(); } @Override public boolean hasFreqs() { - return terms.fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; + return terms.indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; } @Override @@ -107,7 +102,7 @@ public boolean hasOffsets() { // NOTE: the in-memory buffer may have indexed offsets // because that's what FieldInfo said when we started, // but during indexing this may have been downgraded: - return terms.fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; + return terms.indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; } @Override @@ -115,7 +110,7 @@ public boolean hasPositions() { // NOTE: the in-memory buffer may have indexed positions // because that's what FieldInfo said when we started, // but during indexing this may have been downgraded: - return terms.fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; + return terms.indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; } @Override @@ -132,10 +127,10 @@ private static class FreqProxTermsEnum extends BaseTermsEnum { final int numTerms; int ord; - public FreqProxTermsEnum(FreqProxTermsWriterPerField terms) { + FreqProxTermsEnum(FreqProxTermsWriterPerField terms) { this.terms = terms; - this.numTerms = terms.bytesHash.size(); - sortedTermIDs = terms.sortedTermIDs; + this.numTerms = terms.getNumTerms(); + sortedTermIDs = terms.getSortedTermIDs(); assert sortedTermIDs != null; postingsArray = (FreqProxPostingsArray) terms.postingsArray; } diff --git a/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java b/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java index 5180aafd63b1..bbc7b18dcad2 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java @@ -75,9 +75,9 @@ public void flush(Map fieldsToFlush, final SegmentWrit for (TermsHashPerField f : fieldsToFlush.values()) { final FreqProxTermsWriterPerField perField = (FreqProxTermsWriterPerField) f; - if (perField.bytesHash.size() > 0) { - perField.sortPostings(); - assert perField.fieldInfo.getIndexOptions() != IndexOptions.NONE; + if (perField.getNumTerms() > 0) { + perField.sortTerms(); + assert perField.indexOptions != IndexOptions.NONE; allFields.add(perField); } } diff --git a/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java b/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java index 7d77d0b62dde..1b87cbb5c0d6 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java +++ b/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java @@ -20,6 +20,7 @@ import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; +import org.apache.lucene.analysis.tokenattributes.TermFrequencyAttribute; import org.apache.lucene.util.BytesRef; // TODO: break into separate freq and prox writers as @@ -28,26 +29,25 @@ final class FreqProxTermsWriterPerField extends TermsHashPerField { private FreqProxPostingsArray freqProxPostingsArray; + private final FieldInvertState fieldState; + private final FieldInfo fieldInfo; final boolean hasFreq; final boolean hasProx; final boolean hasOffsets; PayloadAttribute payloadAttribute; OffsetAttribute offsetAttribute; - long sumTotalTermFreq; - long sumDocFreq; - - // How many docs have this field: - int docCount; + TermFrequencyAttribute termFreqAtt; /** Set to true if any token had a payload in the current * segment. */ boolean sawPayloads; - public FreqProxTermsWriterPerField(FieldInvertState invertState, TermsHash termsHash, FieldInfo fieldInfo, TermsHashPerField nextPerField) { - super(fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 ? 2 : 1, invertState, termsHash, nextPerField, fieldInfo); - IndexOptions indexOptions = fieldInfo.getIndexOptions(); - assert indexOptions != IndexOptions.NONE; + FreqProxTermsWriterPerField(FieldInvertState invertState, TermsHash termsHash, FieldInfo fieldInfo, TermsHashPerField nextPerField) { + super(fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 ? 2 : 1, + termsHash.intPool, termsHash.bytePool, termsHash.termBytePool, termsHash.bytesUsed, nextPerField, fieldInfo.name, fieldInfo.getIndexOptions()); + this.fieldState = invertState; + this.fieldInfo = fieldInfo; hasFreq = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; hasProx = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; hasOffsets = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; @@ -56,12 +56,6 @@ public FreqProxTermsWriterPerField(FieldInvertState invertState, TermsHash terms @Override void finish() throws IOException { super.finish(); - sumDocFreq += fieldState.uniqueTermCount; - sumTotalTermFreq += fieldState.length; - if (fieldState.length > 0) { - docCount++; - } - if (sawPayloads) { fieldInfo.setStorePayloads(); } @@ -70,6 +64,7 @@ void finish() throws IOException { @Override boolean start(IndexableField f, boolean first) { super.start(f, first); + termFreqAtt = fieldState.termFreqAttribute; payloadAttribute = fieldState.payloadAttribute; offsetAttribute = fieldState.offsetAttribute; return true; @@ -104,18 +99,18 @@ void writeOffsets(int termID, int offsetAccum) { } @Override - void newTerm(final int termID) { + void newTerm(final int termID, final int docID) { // First time we're seeing this term since the last // flush final FreqProxPostingsArray postings = freqProxPostingsArray; - postings.lastDocIDs[termID] = docState.docID; + postings.lastDocIDs[termID] = docID; if (!hasFreq) { assert postings.termFreqs == null; - postings.lastDocCodes[termID] = docState.docID; + postings.lastDocCodes[termID] = docID; fieldState.maxTermFrequency = Math.max(1, fieldState.maxTermFrequency); } else { - postings.lastDocCodes[termID] = docState.docID << 1; + postings.lastDocCodes[termID] = docID << 1; postings.termFreqs[termID] = getTermFreq(); if (hasProx) { writeProx(termID, fieldState.position); @@ -131,25 +126,25 @@ void newTerm(final int termID) { } @Override - void addTerm(final int termID) { + void addTerm(final int termID, final int docID) { final FreqProxPostingsArray postings = freqProxPostingsArray; assert !hasFreq || postings.termFreqs[termID] > 0; if (!hasFreq) { assert postings.termFreqs == null; if (termFreqAtt.getTermFrequency() != 1) { - throw new IllegalStateException("field \"" + fieldInfo.name + "\": must index term freq while using custom TermFrequencyAttribute"); + throw new IllegalStateException("field \"" + getFieldName() + "\": must index term freq while using custom TermFrequencyAttribute"); } - if (docState.docID != postings.lastDocIDs[termID]) { + if (docID != postings.lastDocIDs[termID]) { // New document; now encode docCode for previous doc: - assert docState.docID > postings.lastDocIDs[termID]; + assert docID > postings.lastDocIDs[termID]; writeVInt(0, postings.lastDocCodes[termID]); - postings.lastDocCodes[termID] = docState.docID - postings.lastDocIDs[termID]; - postings.lastDocIDs[termID] = docState.docID; + postings.lastDocCodes[termID] = docID - postings.lastDocIDs[termID]; + postings.lastDocIDs[termID] = docID; fieldState.uniqueTermCount++; } - } else if (docState.docID != postings.lastDocIDs[termID]) { - assert docState.docID > postings.lastDocIDs[termID]:"id: "+docState.docID + " postings ID: "+ postings.lastDocIDs[termID] + " termID: "+termID; + } else if (docID != postings.lastDocIDs[termID]) { + assert docID > postings.lastDocIDs[termID]:"id: "+docID + " postings ID: "+ postings.lastDocIDs[termID] + " termID: "+termID; // Term not yet seen in the current doc but previously // seen in other doc(s) since the last flush @@ -165,8 +160,8 @@ void addTerm(final int termID) { // Init freq for the current document postings.termFreqs[termID] = getTermFreq(); fieldState.maxTermFrequency = Math.max(postings.termFreqs[termID], fieldState.maxTermFrequency); - postings.lastDocCodes[termID] = (docState.docID - postings.lastDocIDs[termID]) << 1; - postings.lastDocIDs[termID] = docState.docID; + postings.lastDocCodes[termID] = (docID - postings.lastDocIDs[termID]) << 1; + postings.lastDocIDs[termID] = docID; if (hasProx) { writeProx(termID, fieldState.position); if (hasOffsets) { @@ -193,7 +188,7 @@ private int getTermFreq() { int freq = termFreqAtt.getTermFrequency(); if (freq != 1) { if (hasProx) { - throw new IllegalStateException("field \"" + fieldInfo.name + "\": cannot index positions while using custom TermFrequencyAttribute"); + throw new IllegalStateException("field \"" + getFieldName() + "\": cannot index positions while using custom TermFrequencyAttribute"); } } @@ -207,8 +202,6 @@ public void newPostingsArray() { @Override ParallelPostingsArray createPostingsArray(int size) { - IndexOptions indexOptions = fieldInfo.getIndexOptions(); - assert indexOptions != IndexOptions.NONE; boolean hasFreq = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; boolean hasProx = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; boolean hasOffsets = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexSorter.java b/lucene/core/src/java/org/apache/lucene/index/IndexSorter.java new file mode 100644 index 000000000000..81fdf6207d2f --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/index/IndexSorter.java @@ -0,0 +1,448 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.index; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; + +import org.apache.lucene.search.FieldComparator; +import org.apache.lucene.search.SortField; +import org.apache.lucene.util.LongValues; +import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.packed.PackedInts; + +import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; + +/** + * Handles how documents should be sorted in an index, both within a segment and between + * segments. + * + * Implementers must provide the following methods: + * {@link #getDocComparator(LeafReader,int)} - an object that determines how documents within a segment are to be sorted + * {@link #getComparableProviders(List)} - an array of objects that return a sortable long value per document and segment + * {@link #getProviderName()} - the SPI-registered name of a {@link SortFieldProvider} to serialize the sort + * + * The companion {@link SortFieldProvider} should be registered with SPI via {@code META-INF/services} + */ +public interface IndexSorter { + + /** Used for sorting documents across segments */ + interface ComparableProvider { + /** + * Returns a long so that the natural ordering of long values matches the + * ordering of doc IDs for the given comparator + */ + long getAsComparableLong(int docID) throws IOException; + } + + /** A comparator of doc IDs, used for sorting documents within a segment */ + interface DocComparator { + /** Compare docID1 against docID2. The contract for the return value is the + * same as {@link Comparator#compare(Object, Object)}. */ + int compare(int docID1, int docID2); + } + + /** + * Get an array of {@link ComparableProvider}, one per segment, for merge sorting documents in different segments + * @param readers the readers to be merged + */ + ComparableProvider[] getComparableProviders(List readers) throws IOException; + + /** + * Get a comparator that determines the sort order of docs within a single Reader. + * + * NB We cannot simply use the {@link FieldComparator} API because it requires docIDs to be sent + * in-order. The default implementations allocate array[maxDoc] to hold native values for comparison, + * but 1) they are transient (only alive while sorting this one segment) and 2) in the typical + * index sorting case, they are only used to sort newly flushed segments, which will be smaller + * than merged segments + * + * @param reader the Reader to sort + * @param maxDoc the number of documents in the Reader + */ + DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException; + + /** + * The SPI-registered name of a {@link SortFieldProvider} that will deserialize the parent SortField + */ + String getProviderName(); + + /** + * Provide a NumericDocValues instance for a LeafReader + */ + interface NumericDocValuesProvider { + /** + * Returns the NumericDocValues instance for this LeafReader + */ + NumericDocValues get(LeafReader reader) throws IOException; + } + + /** + * Provide a SortedDocValues instance for a LeafReader + */ + interface SortedDocValuesProvider { + /** + * Returns the SortedDocValues instance for this LeafReader + */ + SortedDocValues get(LeafReader reader) throws IOException; + } + + /** + * Sorts documents based on integer values from a NumericDocValues instance + */ + final class IntSorter implements IndexSorter { + + private final Integer missingValue; + private final int reverseMul; + private final NumericDocValuesProvider valuesProvider; + private final String providerName; + + /** + * Creates a new IntSorter + */ + public IntSorter(String providerName, Integer missingValue, boolean reverse, NumericDocValuesProvider valuesProvider) { + this.missingValue = missingValue; + this.reverseMul = reverse ? -1 : 1; + this.valuesProvider = valuesProvider; + this.providerName = providerName; + } + + @Override + public ComparableProvider[] getComparableProviders(List readers) throws IOException { + ComparableProvider[] providers = new ComparableProvider[readers.size()]; + final long missingValue; + if (this.missingValue != null) { + missingValue = this.missingValue; + } else { + missingValue = 0L; + } + + for(int readerIndex=0;readerIndex { + if (values.advanceExact(docID)) { + return values.longValue(); + } else { + return missingValue; + } + }; + } + return providers; + } + + @Override + public DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException { + final NumericDocValues dvs = valuesProvider.get(reader); + int[] values = new int[maxDoc]; + if (this.missingValue != null) { + Arrays.fill(values, this.missingValue); + } + while (true) { + int docID = dvs.nextDoc(); + if (docID == NO_MORE_DOCS) { + break; + } + values[docID] = (int) dvs.longValue(); + } + + return (docID1, docID2) -> reverseMul * Integer.compare(values[docID1], values[docID2]); + } + + @Override + public String getProviderName() { + return providerName; + } + } + + /** + * Sorts documents based on long values from a NumericDocValues instance + */ + final class LongSorter implements IndexSorter { + + private final String providerName; + private final Long missingValue; + private final int reverseMul; + private final NumericDocValuesProvider valuesProvider; + + /** Creates a new LongSorter */ + public LongSorter(String providerName, Long missingValue, boolean reverse, NumericDocValuesProvider valuesProvider) { + this.providerName = providerName; + this.missingValue = missingValue; + this.reverseMul = reverse ? -1 : 1; + this.valuesProvider = valuesProvider; + } + + @Override + public ComparableProvider[] getComparableProviders(List readers) throws IOException { + ComparableProvider[] providers = new ComparableProvider[readers.size()]; + final long missingValue; + if (this.missingValue != null) { + missingValue = this.missingValue; + } else { + missingValue = 0L; + } + + for(int readerIndex=0;readerIndex { + if (values.advanceExact(docID)) { + return values.longValue(); + } else { + return missingValue; + } + }; + } + return providers; + } + + @Override + public DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException { + final NumericDocValues dvs = valuesProvider.get(reader); + long[] values = new long[maxDoc]; + if (this.missingValue != null) { + Arrays.fill(values, this.missingValue); + } + while (true) { + int docID = dvs.nextDoc(); + if (docID == NO_MORE_DOCS) { + break; + } + values[docID] = dvs.longValue(); + } + + return (docID1, docID2) -> reverseMul * Long.compare(values[docID1], values[docID2]); + } + + @Override + public String getProviderName() { + return providerName; + } + } + + /** + * Sorts documents based on float values from a NumericDocValues instance + */ + final class FloatSorter implements IndexSorter { + + private final String providerName; + private final Float missingValue; + private final int reverseMul; + private final NumericDocValuesProvider valuesProvider; + + /** Creates a new FloatSorter */ + public FloatSorter(String providerName, Float missingValue, boolean reverse, NumericDocValuesProvider valuesProvider) { + this.providerName = providerName; + this.missingValue = missingValue; + this.reverseMul = reverse ? -1 : 1; + this.valuesProvider = valuesProvider; + } + + @Override + public ComparableProvider[] getComparableProviders(List readers) throws IOException { + ComparableProvider[] providers = new ComparableProvider[readers.size()]; + final float missingValue; + if (this.missingValue != null) { + missingValue = this.missingValue; + } else { + missingValue = 0.0f; + } + + for(int readerIndex=0;readerIndex { + float value = missingValue; + if (values.advanceExact(docID)) { + value = Float.intBitsToFloat((int) values.longValue()); + } + return NumericUtils.floatToSortableInt(value); + }; + } + return providers; + } + + @Override + public DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException { + final NumericDocValues dvs = valuesProvider.get(reader); + float[] values = new float[maxDoc]; + if (this.missingValue != null) { + Arrays.fill(values, this.missingValue); + } + while (true) { + int docID = dvs.nextDoc(); + if (docID == NO_MORE_DOCS) { + break; + } + values[docID] = Float.intBitsToFloat((int) dvs.longValue()); + } + + return (docID1, docID2) -> reverseMul * Float.compare(values[docID1], values[docID2]); + } + + @Override + public String getProviderName() { + return providerName; + } + } + + /** + * Sorts documents based on double values from a NumericDocValues instance + */ + final class DoubleSorter implements IndexSorter { + + private final String providerName; + private final Double missingValue; + private final int reverseMul; + private final NumericDocValuesProvider valuesProvider; + + /** Creates a new DoubleSorter */ + public DoubleSorter(String providerName, Double missingValue, boolean reverse, NumericDocValuesProvider valuesProvider) { + this.providerName = providerName; + this.missingValue = missingValue; + this.reverseMul = reverse ? -1 : 1; + this.valuesProvider = valuesProvider; + } + + @Override + public ComparableProvider[] getComparableProviders(List readers) throws IOException { + ComparableProvider[] providers = new ComparableProvider[readers.size()]; + final double missingValue; + if (this.missingValue != null) { + missingValue = this.missingValue; + } else { + missingValue = 0.0f; + } + + for(int readerIndex=0;readerIndex { + double value = missingValue; + if (values.advanceExact(docID)) { + value = Double.longBitsToDouble(values.longValue()); + } + return NumericUtils.doubleToSortableLong(value); + }; + } + return providers; + } + + @Override + public DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException { + final NumericDocValues dvs = valuesProvider.get(reader); + double[] values = new double[maxDoc]; + if (missingValue != null) { + Arrays.fill(values, missingValue); + } + while (true) { + int docID = dvs.nextDoc(); + if (docID == NO_MORE_DOCS) { + break; + } + values[docID] = Double.longBitsToDouble(dvs.longValue()); + } + + return (docID1, docID2) -> reverseMul * Double.compare(values[docID1], values[docID2]); + } + + @Override + public String getProviderName() { + return providerName; + } + } + + /** + * Sorts documents based on terms from a SortedDocValues instance + */ + final class StringSorter implements IndexSorter { + + private final String providerName; + private final Object missingValue; + private final int reverseMul; + private final SortedDocValuesProvider valuesProvider; + + /** Creates a new StringSorter */ + public StringSorter(String providerName, Object missingValue, boolean reverse, SortedDocValuesProvider valuesProvider) { + this.providerName = providerName; + this.missingValue = missingValue; + this.reverseMul = reverse ? -1 : 1; + this.valuesProvider = valuesProvider; + } + + @Override + public ComparableProvider[] getComparableProviders(List readers) throws IOException { + final ComparableProvider[] providers = new ComparableProvider[readers.size()]; + final SortedDocValues[] values = new SortedDocValues[readers.size()]; + for(int i=0;i { + if (readerValues.advanceExact(docID)) { + // translate segment's ord to global ord space: + return globalOrds.get(readerValues.ordValue()); + } else { + return missingOrd; + } + }; + } + return providers; + } + + @Override + public DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException { + final SortedDocValues sorted = valuesProvider.get(reader); + final int missingOrd; + if (missingValue == SortField.STRING_LAST) { + missingOrd = Integer.MAX_VALUE; + } else { + missingOrd = Integer.MIN_VALUE; + } + + final int[] ords = new int[maxDoc]; + Arrays.fill(ords, missingOrd); + int docID; + while ((docID = sorted.nextDoc()) != NO_MORE_DOCS) { + ords[docID] = sorted.ordValue(); + } + + return (docID1, docID2) -> reverseMul * Integer.compare(ords[docID1], ords[docID2]); + } + + @Override + public String getProviderName() { + return providerName; + } + } + +} diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java index 464be01d97ae..bd273a1344fe 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java @@ -33,6 +33,7 @@ import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; @@ -2129,12 +2130,12 @@ public final void maybeMerge() throws IOException { private final void maybeMerge(MergePolicy mergePolicy, MergeTrigger trigger, int maxNumSegments) throws IOException { ensureOpen(false); - if (updatePendingMerges(mergePolicy, trigger, maxNumSegments)) { + if (updatePendingMerges(mergePolicy, trigger, maxNumSegments) != null) { mergeScheduler.merge(mergeSource, trigger); } } - private synchronized boolean updatePendingMerges(MergePolicy mergePolicy, MergeTrigger trigger, int maxNumSegments) + private synchronized MergePolicy.MergeSpecification updatePendingMerges(MergePolicy mergePolicy, MergeTrigger trigger, int maxNumSegments) throws IOException { // In case infoStream was disabled on init, but then enabled at some @@ -2144,22 +2145,21 @@ private synchronized boolean updatePendingMerges(MergePolicy mergePolicy, MergeT assert maxNumSegments == UNBOUNDED_MAX_MERGE_SEGMENTS || maxNumSegments > 0; assert trigger != null; if (stopMerges) { - return false; + return null; } // Do not start new merges if disaster struck if (tragedy.get() != null) { - return false; + return null; } - boolean newMergesFound = false; + final MergePolicy.MergeSpecification spec; if (maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS) { assert trigger == MergeTrigger.EXPLICIT || trigger == MergeTrigger.MERGE_FINISHED : "Expected EXPLICT or MERGE_FINISHED as trigger even with maxNumSegments set but was: " + trigger.name(); spec = mergePolicy.findForcedMerges(segmentInfos, maxNumSegments, Collections.unmodifiableMap(segmentsToMerge), this); - newMergesFound = spec != null; - if (newMergesFound) { + if (spec != null) { final int numMerges = spec.merges.size(); for(int i=0;i 0) { + SegmentInfos committingSegmentInfos = toCommit; + onCommitMerges = updatePendingMerges(new OneMergeWrappingMergePolicy(config.getMergePolicy(), toWrap -> + new MergePolicy.OneMerge(toWrap.segments) { + @Override + public void mergeFinished(boolean committed) throws IOException { + assert Thread.holdsLock(IndexWriter.this); + if (committed && includeInCommit.get()) { + deleter.incRef(info.files()); + Set mergedSegmentNames = new HashSet<>(); + for (SegmentCommitInfo sci : segments) { + mergedSegmentNames.add(sci.info.name); + } + List toCommitMergedAwaySegments = new ArrayList<>(); + for (SegmentCommitInfo sci : committingSegmentInfos) { + if (mergedSegmentNames.contains(sci.info.name)) { + toCommitMergedAwaySegments.add(sci); + deleter.decRef(sci.files()); + } + } + // Construct a OneMerge that applies to toCommit + MergePolicy.OneMerge applicableMerge = new MergePolicy.OneMerge(toCommitMergedAwaySegments); + applicableMerge.info = info.clone(); + long segmentCounter = Long.parseLong(info.info.name.substring(1), Character.MAX_RADIX); + committingSegmentInfos.counter = Math.max(committingSegmentInfos.counter, segmentCounter + 1); + committingSegmentInfos.applyMergeChanges(applicableMerge, false); + } + toWrap.mergeFinished(committed); + super.mergeFinished(committed); + } + + @Override + public CodecReader wrapForMerge(CodecReader reader) throws IOException { + return toWrap.wrapForMerge(reader); + } + } + ), MergeTrigger.COMMIT, UNBOUNDED_MAX_MERGE_SEGMENTS); + } + pendingCommitChangeCount = changeCount.get(); // This protects the segmentInfos we are now going @@ -3236,8 +3281,7 @@ private long prepareCommitInternal() throws IOException { // we are trying to sync all referenced files, a // merge completes which would otherwise have // removed the files we are now syncing. - filesToCommit = toCommit.files(false); - deleter.incRef(filesToCommit); + deleter.incRef(toCommit.files(false)); } success = true; } finally { @@ -3258,7 +3302,16 @@ private long prepareCommitInternal() throws IOException { } finally { maybeCloseOnTragicEvent(); } - + + if (onCommitMerges != null) { + mergeScheduler.merge(mergeSource, MergeTrigger.COMMIT); + onCommitMerges.await(maxCommitMergeWaitSeconds, TimeUnit.SECONDS); + synchronized (this) { + // we need to call this under lock since mergeFinished above is also called under the IW lock + includeInCommit.set(false); + } + } + filesToCommit = toCommit.files(false); try { if (anyChanges) { maybeMerge.set(true); @@ -4290,7 +4343,7 @@ private synchronized void mergeFinish(MergePolicy.OneMerge merge) { @SuppressWarnings("try") private synchronized void closeMergeReaders(MergePolicy.OneMerge merge, boolean suppressExceptions) throws IOException { final boolean drop = suppressExceptions == false; - try (Closeable finalizer = merge::mergeFinished) { + try (Closeable finalizer = () -> merge.mergeFinished(suppressExceptions == false)) { IOUtils.applyToAll(merge.readers, sr -> { final ReadersAndUpdates rld = getPooledInstance(sr.getOriginalSegmentInfo(), false); // We still hold a ref so it should not have been removed: @@ -4484,6 +4537,7 @@ public int length() { // Merge would produce a 0-doc segment, so we do nothing except commit the merge to remove all the 0-doc segments that we "merged": assert merge.info.info.maxDoc() == 0; commitMerge(merge, mergeState); + success = true; return 0; } diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java index 4cdc9c02529f..12379328bc13 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java @@ -32,9 +32,9 @@ import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.PrintStreamInfoStream; +import org.apache.lucene.util.SetOnce; import org.apache.lucene.util.SetOnce.AlreadySetException; import org.apache.lucene.util.Version; -import org.apache.lucene.util.SetOnce; /** * Holds all the configuration that is used to create an {@link IndexWriter}. @@ -110,6 +110,9 @@ public static enum OpenMode { /** Default value for whether calls to {@link IndexWriter#close()} include a commit. */ public final static boolean DEFAULT_COMMIT_ON_CLOSE = true; + + /** Default value for time to wait for merges on commit (when using a {@link MergePolicy} that implements findFullFlushMerges). */ + public static final long DEFAULT_MAX_COMMIT_MERGE_WAIT_SECONDS = 0; // indicates whether this config instance is already attached to a writer. // not final so that it can be cloned properly. @@ -460,6 +463,20 @@ public IndexWriterConfig setCommitOnClose(boolean commitOnClose) { return this; } + /** + * Expert: sets the amount of time to wait for merges returned by MergePolicy.findFullFlushMerges(...). + * If this time is reached, we proceed with the commit based on segments merged up to that point. + * The merges are not cancelled, and will still run to completion independent of the commit + * like normal segment merges. The default is {@value IndexWriterConfig#DEFAULT_MAX_COMMIT_MERGE_WAIT_SECONDS}. + * + * Note: This settings has no effect unless {@link MergePolicy#findFullFlushMerges(MergeTrigger, SegmentInfos, MergePolicy.MergeContext)} + * has an implementation that actually returns merges which by default doesn't return any merges. + */ + public IndexWriterConfig setMaxCommitMergeWaitSeconds(long maxCommitMergeWaitSeconds) { + this.maxCommitMergeWaitSeconds = maxCommitMergeWaitSeconds; + return this; + } + /** We only allow sorting on these types */ private static final EnumSet ALLOWED_INDEX_SORT_TYPES = EnumSet.of(SortField.Type.STRING, SortField.Type.LONG, @@ -471,10 +488,9 @@ public IndexWriterConfig setCommitOnClose(boolean commitOnClose) { * Set the {@link Sort} order to use for all (flushed and merged) segments. */ public IndexWriterConfig setIndexSort(Sort sort) { - for(SortField sortField : sort.getSort()) { - final SortField.Type sortType = Sorter.getSortFieldType(sortField); - if (ALLOWED_INDEX_SORT_TYPES.contains(sortType) == false) { - throw new IllegalArgumentException("invalid SortField type: must be one of " + ALLOWED_INDEX_SORT_TYPES + " but got: " + sortField); + for (SortField sortField : sort.getSort()) { + if (sortField.getIndexSorter() == null) { + throw new IllegalArgumentException("Cannot sort index with sort field " + sortField); } } this.indexSort = sort; diff --git a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java index 1f48acc8d5f6..9b1d56c0a96d 100644 --- a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java +++ b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java @@ -109,6 +109,8 @@ public class LiveIndexWriterConfig { /** soft deletes field */ protected String softDeletesField = null; + /** Amount of time to wait for merges returned by MergePolicy.findFullFlushMerges(...) */ + protected volatile long maxCommitMergeWaitSeconds; // used by IndexWriterConfig LiveIndexWriterConfig(Analyzer analyzer) { @@ -132,6 +134,7 @@ public class LiveIndexWriterConfig { flushPolicy = new FlushByRamOrCountsPolicy(); readerPooling = IndexWriterConfig.DEFAULT_READER_POOLING; perThreadHardLimitMB = IndexWriterConfig.DEFAULT_RAM_PER_THREAD_HARD_LIMIT_MB; + maxCommitMergeWaitSeconds = IndexWriterConfig.DEFAULT_MAX_COMMIT_MERGE_WAIT_SECONDS; } /** Returns the default analyzer to use for indexing documents. */ @@ -461,6 +464,15 @@ public String getSoftDeletesField() { return softDeletesField; } + /** + * Expert: return the amount of time to wait for merges returned by by MergePolicy.findFullFlushMerges(...). + * If this time is reached, we proceed with the commit based on segments merged up to that point. + * The merges are not cancelled, and may still run to completion independent of the commit. + */ + public long getMaxCommitMergeWaitSeconds() { + return maxCommitMergeWaitSeconds; + } + @Override public String toString() { StringBuilder sb = new StringBuilder(); @@ -484,6 +496,7 @@ public String toString() { sb.append("indexSort=").append(getIndexSort()).append("\n"); sb.append("checkPendingFlushOnUpdate=").append(isCheckPendingFlushOnUpdate()).append("\n"); sb.append("softDeletesField=").append(getSoftDeletesField()).append("\n"); + sb.append("maxCommitMergeWaitSeconds=").append(getMaxCommitMergeWaitSeconds()).append("\n"); return sb.toString(); } } diff --git a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java index 3ac391451911..5a090da82967 100644 --- a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java @@ -23,7 +23,12 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; @@ -37,6 +42,7 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.IOSupplier; import org.apache.lucene.util.InfoStream; +import org.apache.lucene.util.ThreadInterruptedException; /** *

Expert: a MergePolicy determines the sequence of @@ -76,7 +82,7 @@ public abstract class MergePolicy { * @lucene.experimental */ public static class OneMergeProgress { /** Reason for pausing the merge thread. */ - public static enum PauseReason { + public enum PauseReason { /** Stopped (because of throughput rate set to 0, typically). */ STOPPED, /** Temporarily paused because of exceeded throughput rate. */ @@ -196,6 +202,7 @@ final void setMergeThread(Thread owner) { * * @lucene.experimental */ public static class OneMerge { + private final CompletableFuture mergeCompleted = new CompletableFuture<>(); SegmentCommitInfo info; // used by IndexWriter boolean registerDone; // used by IndexWriter long mergeGen; // used by IndexWriter @@ -222,7 +229,7 @@ public static class OneMerge { volatile long mergeStartNS = -1; /** Total number of documents in segments to be merged, not accounting for deletions. */ - public final int totalMaxDoc; + final int totalMaxDoc; Throwable error; /** Sole constructor. @@ -233,13 +240,8 @@ public OneMerge(List segments) { throw new RuntimeException("segments must include at least one segment"); } // clone the list, as the in list may be based off original SegmentInfos and may be modified - this.segments = new ArrayList<>(segments); - int count = 0; - for(SegmentCommitInfo info : segments) { - count += info.info.maxDoc(); - } - totalMaxDoc = count; - + this.segments = List.copyOf(segments); + totalMaxDoc = segments.stream().mapToInt(i -> i.info.maxDoc()).sum(); mergeProgress = new OneMergeProgress(); } @@ -250,9 +252,15 @@ public OneMerge(List segments) { public void mergeInit() throws IOException { mergeProgress.setMergeThread(Thread.currentThread()); } - - /** Called by {@link IndexWriter} after the merge is done and all readers have been closed. */ - public void mergeFinished() throws IOException { + + /** Called by {@link IndexWriter} after the merge is done and all readers have been closed. + * @param success true iff the merge finished successfully ie. was committed */ + public void mergeFinished(boolean success) throws IOException { + mergeCompleted.complete(success); + // https://issues.apache.org/jira/browse/LUCENE-9408 + // if (mergeCompleted.complete(success) == false) { + // throw new IllegalStateException("merge has already finished"); + // } } /** Wrap the reader in order to add/remove information to the merged segment. */ @@ -362,6 +370,37 @@ public void checkAborted() throws MergeAbortedException { public OneMergeProgress getMergeProgress() { return mergeProgress; } + + /** + * Waits for this merge to be completed + * @return true if the merge finished within the specified timeout + */ + boolean await(long timeout, TimeUnit timeUnit) { + try { + mergeCompleted.get(timeout, timeUnit); + return true; + } catch (InterruptedException e) { + throw new ThreadInterruptedException(e); + } catch (ExecutionException | TimeoutException e) { + return false; + } + } + + /** + * Returns true if the merge has finished or false if it's still running or + * has not been started. This method will not block. + */ + boolean isDone() { + return mergeCompleted.isDone(); + } + + /** + * Returns true iff the merge completed successfully or false if the merge succeeded with a failure. + * This method will not block and return an empty Optional if the merge has not finished yet + */ + Optional hasCompletedSuccessfully() { + return Optional.ofNullable(mergeCompleted.getNow(null)); + } } /** @@ -399,6 +438,22 @@ public String segString(Directory dir) { } return b.toString(); } + + /** + * Waits if necessary for at most the given time for all merges. + */ + boolean await(long timeout, TimeUnit unit) { + try { + CompletableFuture future = CompletableFuture.allOf(merges.stream() + .map(m -> m.mergeCompleted).collect(Collectors.toList()).toArray(CompletableFuture[]::new)); + future.get(timeout, unit); + return true; + } catch (InterruptedException e) { + throw new ThreadInterruptedException(e); + } catch (ExecutionException | TimeoutException e) { + return false; + } + } } /** Exception thrown if there are any problems while executing a merge. */ @@ -500,7 +555,7 @@ public abstract MergeSpecification findMerges(MergeTrigger mergeTrigger, Segment * an original segment present in the * to-be-merged index; else, it was a segment * produced by a cascaded merge. - * @param mergeContext the IndexWriter to find the merges on + * @param mergeContext the MergeContext to find the merges on */ public abstract MergeSpecification findForcedMerges( SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge, MergeContext mergeContext) @@ -511,11 +566,33 @@ public abstract MergeSpecification findForcedMerges( * deletes from the index. * @param segmentInfos * the total set of segments in the index - * @param mergeContext the IndexWriter to find the merges on + * @param mergeContext the MergeContext to find the merges on */ public abstract MergeSpecification findForcedDeletesMerges( SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException; + /** + * Identifies merges that we want to execute (synchronously) on commit. By default, do not synchronously merge on commit. + * + * Any merges returned here will make {@link IndexWriter#commit()} or {@link IndexWriter#prepareCommit()} block until + * the merges complete or until {@link IndexWriterConfig#getMaxCommitMergeWaitSeconds()} have elapsed. This may be + * used to merge small segments that have just been flushed as part of the commit, reducing the number of segments in + * the commit. If a merge does not complete in the allotted time, it will continue to execute, but will not be reflected + * in the commit. + * + * If a {@link OneMerge} in the returned {@link MergeSpecification} includes a segment already included in a registered + * merge, then {@link IndexWriter#commit()} or {@link IndexWriter#prepareCommit()} will throw a {@link IllegalStateException}. + * Use {@link MergeContext#getMergingSegments()} to determine which segments are currently registered to merge. + * + * @param mergeTrigger the event that triggered the merge (COMMIT or FULL_FLUSH). + * @param segmentInfos the total set of segments in the index (while preparing the commit) + * @param mergeContext the MergeContext to find the merges on, which should be used to determine which segments are + * already in a registered merge (see {@link MergeContext#getMergingSegments()}). + */ + public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException { + return null; + } + /** * Returns true if a new segment (regardless of its origin) should use the * compound file format. The default implementation returns true diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java b/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java index d165a27008f4..01a6b15a0358 100644 --- a/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java +++ b/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java @@ -47,5 +47,10 @@ public enum MergeTrigger { /** * Merge was triggered by a closing IndexWriter. */ - CLOSING + CLOSING, + + /** + * Merge was triggered on commit. + */ + COMMIT, } diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java index b9ee2f58bb73..d515b6dfc9e1 100644 --- a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java +++ b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java @@ -434,7 +434,7 @@ public static SortedNumericDocValues getSortedNumericValues(final IndexReader r, LeafReaderContext context = leaves.get(i); SortedNumericDocValues v = context.reader().getSortedNumericDocValues(field); if (v == null) { - v = DocValues.emptySortedNumeric(context.reader().maxDoc()); + v = DocValues.emptySortedNumeric(); } else { anyReal = true; } diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiSorter.java b/lucene/core/src/java/org/apache/lucene/index/MultiSorter.java index 23487014ea3b..35d1441155a7 100644 --- a/lucene/core/src/java/org/apache/lucene/index/MultiSorter.java +++ b/lucene/core/src/java/org/apache/lucene/index/MultiSorter.java @@ -24,8 +24,6 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.util.Bits; -import org.apache.lucene.util.LongValues; -import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; @@ -41,10 +39,14 @@ static MergeState.DocMap[] sort(Sort sort, List readers) throws IOE // TODO: optimize if only 1 reader is incoming, though that's a rare case SortField fields[] = sort.getSort(); - final ComparableProvider[][] comparables = new ComparableProvider[fields.length][]; + final IndexSorter.ComparableProvider[][] comparables = new IndexSorter.ComparableProvider[fields.length][]; final int[] reverseMuls = new int[fields.length]; for(int i=0;i readers, SortField sortField) throws IOException { - - ComparableProvider[] providers = new ComparableProvider[readers.size()]; - final SortField.Type sortType = Sorter.getSortFieldType(sortField); - - switch(sortType) { - - case STRING: - { - // this uses the efficient segment-local ordinal map: - final SortedDocValues[] values = new SortedDocValues[readers.size()]; - for(int i=0;i 0) { - infos.minSegmentLuceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt()); - } else { - // else leave as null: no segments - } + SegmentInfos infos = new SegmentInfos(indexCreatedVersion); + infos.id = id; + infos.generation = generation; + infos.lastGeneration = generation; + infos.luceneVersion = luceneVersion; - long totalDocs = 0; - for (int seg = 0; seg < numSegments; seg++) { - String segName = input.readString(); - byte[] segmentID = new byte[StringHelper.ID_LENGTH]; - input.readBytes(segmentID, 0, segmentID.length); - Codec codec = readCodec(input); - SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ); - info.setCodec(codec); - totalDocs += info.maxDoc(); - long delGen = input.readLong(); - int delCount = input.readInt(); - if (delCount < 0 || delCount > info.maxDoc()) { - throw new CorruptIndexException("invalid deletion count: " + delCount + " vs maxDoc=" + info.maxDoc(), input); - } - long fieldInfosGen = input.readLong(); - long dvGen = input.readLong(); - int softDelCount = format > VERSION_72 ? input.readInt() : 0; - if (softDelCount < 0 || softDelCount > info.maxDoc()) { - throw new CorruptIndexException("invalid deletion count: " + softDelCount + " vs maxDoc=" + info.maxDoc(), input); + infos.version = input.readLong(); + //System.out.println("READ sis version=" + infos.version); + if (format > VERSION_70) { + infos.counter = input.readVLong(); + } else { + infos.counter = input.readInt(); } - if (softDelCount + delCount > info.maxDoc()) { - throw new CorruptIndexException("invalid deletion count: " + softDelCount + delCount + " vs maxDoc=" + info.maxDoc(), input); + int numSegments = input.readInt(); + if (numSegments < 0) { + throw new CorruptIndexException("invalid segment count: " + numSegments, input); } - final byte[] sciId; - if (format > VERSION_74) { - byte marker = input.readByte(); - switch (marker) { - case 1: - sciId = new byte[StringHelper.ID_LENGTH]; - input.readBytes(sciId, 0, sciId.length); - break; - case 0: - sciId = null; - break; - default: - throw new CorruptIndexException("invalid SegmentCommitInfo ID marker: " + marker, input); - } + + if (numSegments > 0) { + infos.minSegmentLuceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt()); } else { - sciId = null; + // else leave as null: no segments } - SegmentCommitInfo siPerCommit = new SegmentCommitInfo(info, delCount, softDelCount, delGen, fieldInfosGen, dvGen, sciId); - siPerCommit.setFieldInfosFiles(input.readSetOfStrings()); - final Map> dvUpdateFiles; - final int numDVFields = input.readInt(); - if (numDVFields == 0) { - dvUpdateFiles = Collections.emptyMap(); - } else { - Map> map = new HashMap<>(numDVFields); - for (int i = 0; i < numDVFields; i++) { - map.put(input.readInt(), input.readSetOfStrings()); + + long totalDocs = 0; + for (int seg = 0; seg < numSegments; seg++) { + String segName = input.readString(); + byte[] segmentID = new byte[StringHelper.ID_LENGTH]; + input.readBytes(segmentID, 0, segmentID.length); + Codec codec = readCodec(input); + SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ); + info.setCodec(codec); + totalDocs += info.maxDoc(); + long delGen = input.readLong(); + int delCount = input.readInt(); + if (delCount < 0 || delCount > info.maxDoc()) { + throw new CorruptIndexException("invalid deletion count: " + delCount + " vs maxDoc=" + info.maxDoc(), input); } - dvUpdateFiles = Collections.unmodifiableMap(map); - } - siPerCommit.setDocValuesUpdatesFiles(dvUpdateFiles); - infos.add(siPerCommit); + long fieldInfosGen = input.readLong(); + long dvGen = input.readLong(); + int softDelCount = format > VERSION_72 ? input.readInt() : 0; + if (softDelCount < 0 || softDelCount > info.maxDoc()) { + throw new CorruptIndexException("invalid deletion count: " + softDelCount + " vs maxDoc=" + info.maxDoc(), input); + } + if (softDelCount + delCount > info.maxDoc()) { + throw new CorruptIndexException("invalid deletion count: " + softDelCount + delCount + " vs maxDoc=" + info.maxDoc(), input); + } + final byte[] sciId; + if (format > VERSION_74) { + byte marker = input.readByte(); + switch (marker) { + case 1: + sciId = new byte[StringHelper.ID_LENGTH]; + input.readBytes(sciId, 0, sciId.length); + break; + case 0: + sciId = null; + break; + default: + throw new CorruptIndexException("invalid SegmentCommitInfo ID marker: " + marker, input); + } + } else { + sciId = null; + } + SegmentCommitInfo siPerCommit = new SegmentCommitInfo(info, delCount, softDelCount, delGen, fieldInfosGen, dvGen, sciId); + siPerCommit.setFieldInfosFiles(input.readSetOfStrings()); + final Map> dvUpdateFiles; + final int numDVFields = input.readInt(); + if (numDVFields == 0) { + dvUpdateFiles = Collections.emptyMap(); + } else { + Map> map = new HashMap<>(numDVFields); + for (int i = 0; i < numDVFields; i++) { + map.put(input.readInt(), input.readSetOfStrings()); + } + dvUpdateFiles = Collections.unmodifiableMap(map); + } + siPerCommit.setDocValuesUpdatesFiles(dvUpdateFiles); + infos.add(siPerCommit); - Version segmentVersion = info.getVersion(); + Version segmentVersion = info.getVersion(); - if (segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) { - throw new CorruptIndexException("segments file recorded minSegmentLuceneVersion=" + infos.minSegmentLuceneVersion + " but segment=" + info + " has older version=" + segmentVersion, input); - } + if (segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) { + throw new CorruptIndexException("segments file recorded minSegmentLuceneVersion=" + infos.minSegmentLuceneVersion + " but segment=" + info + " has older version=" + segmentVersion, input); + } - if (infos.indexCreatedVersionMajor >= 7 && segmentVersion.major < infos.indexCreatedVersionMajor) { - throw new CorruptIndexException("segments file recorded indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor + " but segment=" + info + " has older version=" + segmentVersion, input); - } + if (infos.indexCreatedVersionMajor >= 7 && segmentVersion.major < infos.indexCreatedVersionMajor) { + throw new CorruptIndexException("segments file recorded indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor + " but segment=" + info + " has older version=" + segmentVersion, input); + } - if (infos.indexCreatedVersionMajor >= 7 && info.getMinVersion() == null) { - throw new CorruptIndexException("segments infos must record minVersion with indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor, input); + if (infos.indexCreatedVersionMajor >= 7 && info.getMinVersion() == null) { + throw new CorruptIndexException("segments infos must record minVersion with indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor, input); + } } - } - infos.userData = input.readMapOfStrings(); + infos.userData = input.readMapOfStrings(); - CodecUtil.checkFooter(input); + // LUCENE-6299: check we are in bounds + if (totalDocs > IndexWriter.getActualMaxDocs()) { + throw new CorruptIndexException("Too many documents: an index cannot exceed " + IndexWriter.getActualMaxDocs() + " but readers have total maxDoc=" + totalDocs, input); + } - // LUCENE-6299: check we are in bounds - if (totalDocs > IndexWriter.getActualMaxDocs()) { - throw new CorruptIndexException("Too many documents: an index cannot exceed " + IndexWriter.getActualMaxDocs() + " but readers have total maxDoc=" + totalDocs, input); + return infos; + } catch (Throwable t) { + priorE = t; + } finally { + if (format >= VERSION_70) { // oldest supported version + CodecUtil.checkFooter(input, priorE); + } else { + throw IOUtils.rethrowAlways(priorE); + } } - - return infos; + throw new Error("Unreachable code"); } private static Codec readCodec(DataInput input) throws IOException { diff --git a/lucene/core/src/java/org/apache/lucene/index/SortFieldProvider.java b/lucene/core/src/java/org/apache/lucene/index/SortFieldProvider.java new file mode 100644 index 000000000000..290decd73c3b --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/index/SortFieldProvider.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.index; + +import java.io.IOException; +import java.util.Set; + +import org.apache.lucene.search.SortField; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.util.NamedSPILoader; + +/** + * Reads/Writes a named SortField from a segment info file, used to record index sorts + */ +public abstract class SortFieldProvider implements NamedSPILoader.NamedSPI { + + private static class Holder { + private static final NamedSPILoader LOADER = new NamedSPILoader<>(SortFieldProvider.class); + + static NamedSPILoader getLoader() { + if (LOADER == null) { + throw new IllegalStateException("You tried to lookup a SortFieldProvider by name before all SortFieldProviders could be initialized. "+ + "This likely happens if you call SortFieldProvider#forName from a SortFieldProviders's ctor."); + } + return LOADER; + } + } + + /** + * Looks up a SortFieldProvider by name + */ + public static SortFieldProvider forName(String name) { + return Holder.getLoader().lookup(name); + } + + /** + * Lists all available SortFieldProviders + */ + public static Set availableSortFieldProviders() { + return Holder.getLoader().availableServices(); + } + + /** + * Reloads the SortFieldProvider list from the given {@link ClassLoader}. + * Changes to the list are visible after the method ends, all + * iterators ({@link #availableSortFieldProviders()} ()},...) stay consistent. + * + *

NOTE: Only new SortFieldProviders are added, existing ones are + * never removed or replaced. + * + *

This method is expensive and should only be called for discovery + * of new SortFieldProviders on the given classpath/classloader! + */ + public static void reloadSortFieldProviders(ClassLoader classLoader) { + Holder.getLoader().reload(classLoader); + } + + /** + * Writes a SortField to a DataOutput + */ + public static void write(SortField sf, DataOutput output) throws IOException { + IndexSorter sorter = sf.getIndexSorter(); + if (sorter == null) { + throw new IllegalArgumentException("Cannot serialize sort field " + sf); + } + SortFieldProvider provider = SortFieldProvider.forName(sorter.getProviderName()); + provider.writeSortField(sf, output); + } + + /** The name this SortFieldProvider is registered under */ + protected final String name; + + /** + * Creates a new SortFieldProvider. + *

+ * The provided name will be written into the index segment: in order to + * for the segment to be read this class should be registered with Java's + * SPI mechanism (registered in META-INF/ of your jar file, etc). + * @param name must be all ascii alphanumeric, and less than 128 characters in length. + */ + protected SortFieldProvider(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + /** + * Reads a SortField from serialized bytes + */ + public abstract SortField readSortField(DataInput in) throws IOException; + + /** + * Writes a SortField to a DataOutput + * + * This is used to record index sort information in segment headers + */ + public abstract void writeSortField(SortField sf, DataOutput out) throws IOException; + +} diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java index 86d0f0bab338..2252f003cb2d 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java @@ -21,7 +21,6 @@ import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.SortField; import org.apache.lucene.util.ByteBlockPool; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefHash; @@ -35,7 +34,7 @@ /** Buffers up pending byte[] per doc, deref and sorting via * int ord, then flushes when segment flushes. */ -class SortedDocValuesWriter extends DocValuesWriter { +class SortedDocValuesWriter extends DocValuesWriter { final BytesRefHash hash; private PackedLongValues.Builder pending; private DocsWithFieldSet docsWithField; @@ -79,11 +78,6 @@ public void addValue(int docID, BytesRef value) { lastDocID = docID; } - @Override - public void finish(int maxDoc) { - updateBytesUsed(); - } - private void addOneValue(BytesRef value) { int termID = hash.add(value); if (termID < 0) { @@ -107,20 +101,20 @@ private void updateBytesUsed() { } @Override - Sorter.DocComparator getDocComparator(int maxDoc, SortField sortField) throws IOException { - assert sortField.getType().equals(SortField.Type.STRING); - assert finalSortedValues == null && finalOrdMap == null &&finalOrds == null; + SortedDocValues getDocValues() { int valueCount = hash.size(); - finalSortedValues = hash.sort(); - finalOrds = pending.build(); - finalOrdMap = new int[valueCount]; + if (finalSortedValues == null) { + updateBytesUsed(); + assert finalOrdMap == null && finalOrds == null; + finalSortedValues = hash.sort(); + finalOrds = pending.build(); + finalOrdMap = new int[valueCount]; + } for (int ord = 0; ord < valueCount; ord++) { finalOrdMap[finalSortedValues[ord]] = ord; } - final SortedDocValues docValues = - new BufferedSortedDocValues(hash, valueCount, finalOrds, finalSortedValues, finalOrdMap, + return new BufferedSortedDocValues(hash, valueCount, finalOrds, finalSortedValues, finalOrdMap, docsWithField.iterator()); - return Sorter.getDocComparator(maxDoc, sortField, () -> docValues, () -> null); } private int[] sortDocValues(int maxDoc, Sorter.DocMap sortMap, SortedDocValues oldValues) throws IOException { @@ -137,26 +131,20 @@ private int[] sortDocValues(int maxDoc, Sorter.DocMap sortMap, SortedDocValues o @Override public void flush(SegmentWriteState state, Sorter.DocMap sortMap, DocValuesConsumer dvConsumer) throws IOException { final int valueCount = hash.size(); - final PackedLongValues ords; - final int[] sortedValues; - final int[] ordMap; if (finalOrds == null) { - sortedValues = hash.sort(); - ords = pending.build(); - ordMap = new int[valueCount]; + updateBytesUsed(); + finalSortedValues = hash.sort(); + finalOrds = pending.build(); + finalOrdMap = new int[valueCount]; for (int ord = 0; ord < valueCount; ord++) { - ordMap[sortedValues[ord]] = ord; + finalOrdMap[finalSortedValues[ord]] = ord; } - } else { - sortedValues = finalSortedValues; - ords = finalOrds; - ordMap = finalOrdMap; } final int[] sorted; if (sortMap != null) { sorted = sortDocValues(state.segmentInfo.maxDoc(), sortMap, - new BufferedSortedDocValues(hash, valueCount, ords, sortedValues, ordMap, docsWithField.iterator())); + new BufferedSortedDocValues(hash, valueCount, finalOrds, finalSortedValues, finalOrdMap, docsWithField.iterator())); } else { sorted = null; } @@ -168,7 +156,7 @@ public SortedDocValues getSorted(FieldInfo fieldInfoIn) { throw new IllegalArgumentException("wrong fieldInfo"); } final SortedDocValues buf = - new BufferedSortedDocValues(hash, valueCount, ords, sortedValues, ordMap, docsWithField.iterator()); + new BufferedSortedDocValues(hash, valueCount, finalOrds, finalSortedValues, finalOrdMap, docsWithField.iterator()); if (sorted == null) { return buf; } @@ -245,8 +233,4 @@ public int getValueCount() { } } - @Override - DocIdSetIterator getDocIdSet() { - return docsWithField.iterator(); - } } diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java index bdc65cc80574..83c394fc207b 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java @@ -22,9 +22,6 @@ import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.SortField; -import org.apache.lucene.search.SortedNumericSelector; -import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.Counter; import org.apache.lucene.util.RamUsageEstimator; @@ -34,7 +31,7 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; /** Buffers up pending long[] per doc, sorts, then flushes when segment flushes. */ -class SortedNumericDocValuesWriter extends DocValuesWriter { +class SortedNumericDocValuesWriter extends DocValuesWriter { private PackedLongValues.Builder pending; // stream of all values private PackedLongValues.Builder pendingCounts; // count of values per doc private DocsWithFieldSet docsWithField; @@ -85,11 +82,6 @@ private void finishCurrentDoc() { docsWithField.add(currentDoc); } - @Override - public void finish(int maxDoc) { - finishCurrentDoc(); - } - private void addOneValue(long value) { if (currentUpto == currentValues.length) { currentValues = ArrayUtil.grow(currentValues, currentValues.length+1); @@ -106,16 +98,14 @@ private void updateBytesUsed() { } @Override - Sorter.DocComparator getDocComparator(int maxDoc, SortField sortField) throws IOException { - assert sortField instanceof SortedNumericSortField; - assert finalValues == null && finalValuesCount == null; - finalValues = pending.build(); - finalValuesCount = pendingCounts.build(); - final SortedNumericDocValues docValues = - new BufferedSortedNumericDocValues(finalValues, finalValuesCount, docsWithField.iterator()); - SortedNumericSortField sf = (SortedNumericSortField) sortField; - return Sorter.getDocComparator(maxDoc, sf, () -> null, - () -> SortedNumericSelector.wrap(docValues, sf.getSelector(), sf.getNumericType())); + SortedNumericDocValues getDocValues() { + if (finalValues == null) { + assert finalValuesCount == null; + finishCurrentDoc(); + finalValues = pending.build(); + finalValuesCount = pendingCounts.build(); + } + return new BufferedSortedNumericDocValues(finalValues, finalValuesCount, docsWithField.iterator()); } private long[][] sortDocValues(int maxDoc, Sorter.DocMap sortMap, SortedNumericDocValues oldValues) throws IOException { @@ -137,6 +127,7 @@ public void flush(SegmentWriteState state, Sorter.DocMap sortMap, DocValuesConsu final PackedLongValues values; final PackedLongValues valueCounts; if (finalValues == null) { + finishCurrentDoc(); values = pending.build(); valueCounts = pendingCounts.build(); } else { @@ -232,8 +223,4 @@ public long cost() { } } - @Override - DocIdSetIterator getDocIdSet() { - return docsWithField.iterator(); - } } diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java index 71a14a5cb7cb..022b17da0956 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java @@ -21,9 +21,6 @@ import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.SortField; -import org.apache.lucene.search.SortedSetSelector; -import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ByteBlockPool; import org.apache.lucene.util.BytesRef; @@ -39,7 +36,7 @@ /** Buffers up pending byte[]s per doc, deref and sorting via * int ord, then flushes when segment flushes. */ -class SortedSetDocValuesWriter extends DocValuesWriter { +class SortedSetDocValuesWriter extends DocValuesWriter { final BytesRefHash hash; private PackedLongValues.Builder pending; // stream of all termIDs private PackedLongValues.Builder pendingCounts; // termIDs per doc @@ -115,11 +112,6 @@ private void finishCurrentDoc() { docsWithField.add(currentDoc); } - @Override - public void finish(int maxDoc) { - finishCurrentDoc(); - } - private void addOneValue(BytesRef value) { int termID = hash.add(value); if (termID < 0) { @@ -170,22 +162,20 @@ private long[][] sortDocValues(int maxDoc, Sorter.DocMap sortMap, SortedSetDocVa } @Override - Sorter.DocComparator getDocComparator(int maxDoc, SortField sortField) throws IOException { - assert sortField instanceof SortedSetSortField; - assert finalOrds == null && finalOrdCounts == null && finalSortedValues == null && finalOrdMap == null; - int valueCount = hash.size(); - finalOrds = pending.build(); - finalOrdCounts = pendingCounts.build(); - finalSortedValues = hash.sort(); - finalOrdMap = new int[valueCount]; - for (int ord = 0; ord < valueCount; ord++) { + SortedSetDocValues getDocValues() { + if (finalOrds == null) { + assert finalOrdCounts == null && finalSortedValues == null && finalOrdMap == null; + finishCurrentDoc(); + int valueCount = hash.size(); + finalOrds = pending.build(); + finalOrdCounts = pendingCounts.build(); + finalSortedValues = hash.sort(); + finalOrdMap = new int[valueCount]; + } + for (int ord = 0; ord < finalOrdMap.length; ord++) { finalOrdMap[finalSortedValues[ord]] = ord; } - - SortedSetSortField sf = (SortedSetSortField) sortField; - final SortedSetDocValues dvs = - new BufferedSortedSetDocValues(finalSortedValues, finalOrdMap, hash, finalOrds, finalOrdCounts, maxCount, docsWithField.iterator()); - return Sorter.getDocComparator(maxDoc, sf, () -> SortedSetSelector.wrap(dvs, sf.getSelector()), () -> null); + return new BufferedSortedSetDocValues(finalSortedValues, finalOrdMap, hash, finalOrds, finalOrdCounts, maxCount, docsWithField.iterator()); } @Override @@ -196,7 +186,9 @@ public void flush(SegmentWriteState state, Sorter.DocMap sortMap, DocValuesConsu final int[] sortedValues; final int[] ordMap; - if (finalOrdCounts == null) { + if (finalOrds == null) { + assert finalOrdCounts == null && finalSortedValues == null && finalOrdMap == null; + finishCurrentDoc(); ords = pending.build(); ordCounts = pendingCounts.build(); sortedValues = hash.sort(); @@ -315,8 +307,5 @@ public BytesRef lookupOrd(long ord) { return scratch; } } - @Override - DocIdSetIterator getDocIdSet() { - return docsWithField.iterator(); - } + } diff --git a/lucene/core/src/java/org/apache/lucene/index/Sorter.java b/lucene/core/src/java/org/apache/lucene/index/Sorter.java index 5f43c5ad1566..c8605696fd35 100644 --- a/lucene/core/src/java/org/apache/lucene/index/Sorter.java +++ b/lucene/core/src/java/org/apache/lucene/index/Sorter.java @@ -17,22 +17,13 @@ package org.apache.lucene.index; import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; -import org.apache.lucene.search.SortedNumericSelector; -import org.apache.lucene.search.SortedNumericSortField; -import org.apache.lucene.search.SortedSetSelector; -import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.util.TimSorter; import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; -import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; - /** * Sorts documents of a given index by returning a permutation on the document * IDs. @@ -84,21 +75,13 @@ static boolean isConsistent(DocMap docMap) { return true; } - /** A comparator of doc IDs. */ - static abstract class DocComparator { - - /** Compare docID1 against docID2. The contract for the return value is the - * same as {@link Comparator#compare(Object, Object)}. */ - public abstract int compare(int docID1, int docID2); - } - private static final class DocValueSorter extends TimSorter { private final int[] docs; - private final Sorter.DocComparator comparator; + private final IndexSorter.DocComparator comparator; private final int[] tmp; - DocValueSorter(int[] docs, Sorter.DocComparator comparator) { + DocValueSorter(int[] docs, IndexSorter.DocComparator comparator) { super(docs.length / 64); this.docs = docs; this.comparator = comparator; @@ -139,7 +122,7 @@ protected int compareSaved(int i, int j) { } /** Computes the old-to-new permutation over the given comparator. */ - private static Sorter.DocMap sort(final int maxDoc, DocComparator comparator) { + private static Sorter.DocMap sort(final int maxDoc, IndexSorter.DocComparator comparator) { // check if the index is sorted boolean sorted = true; for (int i = 1; i < maxDoc; ++i) { @@ -202,196 +185,10 @@ public int size() { }; } - /** Returns the native sort type for {@link SortedSetSortField} and {@link SortedNumericSortField}, - * {@link SortField#getType()} otherwise */ - static SortField.Type getSortFieldType(SortField sortField) { - if (sortField instanceof SortedSetSortField) { - return SortField.Type.STRING; - } else if (sortField instanceof SortedNumericSortField) { - return ((SortedNumericSortField) sortField).getNumericType(); - } else { - return sortField.getType(); - } - } - - /** Wraps a {@link SortedNumericDocValues} as a single-valued view if the field is an instance of {@link SortedNumericSortField}, - * returns {@link NumericDocValues} for the field otherwise. */ - static NumericDocValues getOrWrapNumeric(LeafReader reader, SortField sortField) throws IOException { - if (sortField instanceof SortedNumericSortField) { - SortedNumericSortField sf = (SortedNumericSortField) sortField; - return SortedNumericSelector.wrap(DocValues.getSortedNumeric(reader, sf.getField()), sf.getSelector(), sf.getNumericType()); - } else { - return DocValues.getNumeric(reader, sortField.getField()); - } - } - - /** Wraps a {@link SortedSetDocValues} as a single-valued view if the field is an instance of {@link SortedSetSortField}, - * returns {@link SortedDocValues} for the field otherwise. */ - static SortedDocValues getOrWrapSorted(LeafReader reader, SortField sortField) throws IOException { - if (sortField instanceof SortedSetSortField) { - SortedSetSortField sf = (SortedSetSortField) sortField; - return SortedSetSelector.wrap(DocValues.getSortedSet(reader, sf.getField()), sf.getSelector()); - } else { - return DocValues.getSorted(reader, sortField.getField()); - } - } - - static DocComparator getDocComparator(LeafReader reader, SortField sortField) throws IOException { - return getDocComparator(reader.maxDoc(), sortField, - () -> getOrWrapSorted(reader, sortField), - () -> getOrWrapNumeric(reader, sortField)); - } - - interface NumericDocValuesSupplier { - NumericDocValues get() throws IOException; - } - - interface SortedDocValuesSupplier { - SortedDocValues get() throws IOException; - } - - /** We cannot use the {@link FieldComparator} API because that API requires that you send it docIDs in order. Note that this API - * allocates arrays[maxDoc] to hold the native values needed for comparison, but 1) they are transient (only alive while sorting this one - * segment), and 2) in the typical index sorting case, they are only used to sort newly flushed segments, which will be smaller than - * merged segments. */ - static DocComparator getDocComparator(int maxDoc, - SortField sortField, - SortedDocValuesSupplier sortedProvider, - NumericDocValuesSupplier numericProvider) throws IOException { - - final int reverseMul = sortField.getReverse() ? -1 : 1; - final SortField.Type sortType = getSortFieldType(sortField); - - switch(sortType) { - - case STRING: - { - final SortedDocValues sorted = sortedProvider.get(); - final int missingOrd; - if (sortField.getMissingValue() == SortField.STRING_LAST) { - missingOrd = Integer.MAX_VALUE; - } else { - missingOrd = Integer.MIN_VALUE; - } - - final int[] ords = new int[maxDoc]; - Arrays.fill(ords, missingOrd); - int docID; - while ((docID = sorted.nextDoc()) != NO_MORE_DOCS) { - ords[docID] = sorted.ordValue(); - } - - return new DocComparator() { - @Override - public int compare(int docID1, int docID2) { - return reverseMul * Integer.compare(ords[docID1], ords[docID2]); - } - }; - } - - case LONG: - { - final NumericDocValues dvs = numericProvider.get(); - long[] values = new long[maxDoc]; - if (sortField.getMissingValue() != null) { - Arrays.fill(values, (Long) sortField.getMissingValue()); - } - while (true) { - int docID = dvs.nextDoc(); - if (docID == NO_MORE_DOCS) { - break; - } - values[docID] = dvs.longValue(); - } - - return new DocComparator() { - @Override - public int compare(int docID1, int docID2) { - return reverseMul * Long.compare(values[docID1], values[docID2]); - } - }; - } - - case INT: - { - final NumericDocValues dvs = numericProvider.get(); - int[] values = new int[maxDoc]; - if (sortField.getMissingValue() != null) { - Arrays.fill(values, (Integer) sortField.getMissingValue()); - } - - while (true) { - int docID = dvs.nextDoc(); - if (docID == NO_MORE_DOCS) { - break; - } - values[docID] = (int) dvs.longValue(); - } - - return new DocComparator() { - @Override - public int compare(int docID1, int docID2) { - return reverseMul * Integer.compare(values[docID1], values[docID2]); - } - }; - } - - case DOUBLE: - { - final NumericDocValues dvs = numericProvider.get(); - double[] values = new double[maxDoc]; - if (sortField.getMissingValue() != null) { - Arrays.fill(values, (Double) sortField.getMissingValue()); - } - while (true) { - int docID = dvs.nextDoc(); - if (docID == NO_MORE_DOCS) { - break; - } - values[docID] = Double.longBitsToDouble(dvs.longValue()); - } - - return new DocComparator() { - @Override - public int compare(int docID1, int docID2) { - return reverseMul * Double.compare(values[docID1], values[docID2]); - } - }; - } - - case FLOAT: - { - final NumericDocValues dvs = numericProvider.get(); - float[] values = new float[maxDoc]; - if (sortField.getMissingValue() != null) { - Arrays.fill(values, (Float) sortField.getMissingValue()); - } - while (true) { - int docID = dvs.nextDoc(); - if (docID == NO_MORE_DOCS) { - break; - } - values[docID] = Float.intBitsToFloat((int) dvs.longValue()); - } - - return new DocComparator() { - @Override - public int compare(int docID1, int docID2) { - return reverseMul * Float.compare(values[docID1], values[docID2]); - } - }; - } - - default: - throw new IllegalArgumentException("unhandled SortField.getType()=" + sortField.getType()); - } - } - - /** * Returns a mapping from the old document ID to its new location in the * sorted index. Implementations can use the auxiliary - * {@link #sort(int, DocComparator)} to compute the old-to-new permutation + * {@link #sort(int, IndexSorter.DocComparator)} to compute the old-to-new permutation * given a list of documents and their corresponding values. *

* A return value of null is allowed and means that @@ -401,28 +198,29 @@ public int compare(int docID1, int docID2) { * well, they will however be marked as deleted in the sorted view. */ DocMap sort(LeafReader reader) throws IOException { - SortField fields[] = sort.getSort(); - final DocComparator comparators[] = new DocComparator[fields.length]; + SortField[] fields = sort.getSort(); + final IndexSorter.DocComparator[] comparators = new IndexSorter.DocComparator[fields.length]; for (int i = 0; i < fields.length; i++) { - comparators[i] = getDocComparator(reader, fields[i]); + IndexSorter sorter = fields[i].getIndexSorter(); + if (sorter == null) { + throw new IllegalArgumentException("Cannot use sortfield + " + fields[i] + " to sort indexes"); + } + comparators[i] = sorter.getDocComparator(reader, reader.maxDoc()); } return sort(reader.maxDoc(), comparators); } - DocMap sort(int maxDoc, DocComparator[] comparators) throws IOException { - final DocComparator comparator = new DocComparator() { - @Override - public int compare(int docID1, int docID2) { - for (int i = 0; i < comparators.length; i++) { - int comp = comparators[i].compare(docID1, docID2); - if (comp != 0) { - return comp; - } + DocMap sort(int maxDoc, IndexSorter.DocComparator[] comparators) throws IOException { + final IndexSorter.DocComparator comparator = (docID1, docID2) -> { + for (int i = 0; i < comparators.length; i++) { + int comp = comparators[i].compare(docID1, docID2); + if (comp != 0) { + return comp; } - return Integer.compare(docID1, docID2); // docid order tiebreak } + return Integer.compare(docID1, docID2); // docid order tiebreak }; return sort(maxDoc, comparator); diff --git a/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumer.java b/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumer.java index f9b851760945..80213f92a9e9 100644 --- a/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumer.java +++ b/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumer.java @@ -44,11 +44,11 @@ class TermVectorsConsumer extends TermsHash { final ByteSliceReader vectorSliceReaderOff = new ByteSliceReader(); boolean hasVectors; - int numVectorFields; + private int numVectorFields; int lastDocID; private TermVectorsConsumerPerField[] perFields = new TermVectorsConsumerPerField[1]; - public TermVectorsConsumer(DocumentsWriterPerThread docWriter) { + TermVectorsConsumer(DocumentsWriterPerThread docWriter) { super(docWriter, false, null); this.docWriter = docWriter; } @@ -91,7 +91,7 @@ void initTermVectorsWriter() throws IOException { } @Override - void finishDocument() throws IOException { + void finishDocument(int docID) throws IOException { if (!hasVectors) { return; @@ -102,7 +102,7 @@ void finishDocument() throws IOException { initTermVectorsWriter(); - fill(docState.docID); + fill(docID); // Append term vectors to the real outputs: writer.startDocument(numVectorFields); @@ -111,7 +111,7 @@ void finishDocument() throws IOException { } writer.finishDocument(); - assert lastDocID == docState.docID: "lastDocID=" + lastDocID + " docState.docID=" + docState.docID; + assert lastDocID == docID: "lastDocID=" + lastDocID + " docID=" + docID; lastDocID++; diff --git a/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumerPerField.java b/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumerPerField.java index 4e0aa3cdaadb..a1abd985f31c 100644 --- a/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumerPerField.java +++ b/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumerPerField.java @@ -20,27 +20,37 @@ import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; +import org.apache.lucene.analysis.tokenattributes.TermFrequencyAttribute; import org.apache.lucene.codecs.TermVectorsWriter; +import org.apache.lucene.util.ByteBlockPool; import org.apache.lucene.util.BytesRef; final class TermVectorsConsumerPerField extends TermsHashPerField { private TermVectorsPostingsArray termVectorsPostingsArray; - final TermVectorsConsumer termsWriter; + private final TermVectorsConsumer termsWriter; + private final FieldInvertState fieldState; + private final FieldInfo fieldInfo; - boolean doVectors; - boolean doVectorPositions; - boolean doVectorOffsets; - boolean doVectorPayloads; + private boolean doVectors; + private boolean doVectorPositions; + private boolean doVectorOffsets; + private boolean doVectorPayloads; - OffsetAttribute offsetAttribute; - PayloadAttribute payloadAttribute; - boolean hasPayloads; // if enabled, and we actually saw any for this field + private OffsetAttribute offsetAttribute; + private PayloadAttribute payloadAttribute; + private TermFrequencyAttribute termFreqAtt; + private final ByteBlockPool termBytePool; - public TermVectorsConsumerPerField(FieldInvertState invertState, TermVectorsConsumer termsWriter, FieldInfo fieldInfo) { - super(2, invertState, termsWriter, null, fieldInfo); - this.termsWriter = termsWriter; + private boolean hasPayloads; // if enabled, and we actually saw any for this field + + TermVectorsConsumerPerField(FieldInvertState invertState, TermVectorsConsumer termsHash, FieldInfo fieldInfo) { + super(2, termsHash.intPool, termsHash.bytePool, termsHash.termBytePool, termsHash.bytesUsed, null, fieldInfo.name, fieldInfo.getIndexOptions()); + this.termsWriter = termsHash; + this.fieldInfo = fieldInfo; + this.fieldState = invertState; + termBytePool = termsHash.termBytePool; } /** Called once per field per document if term vectors @@ -48,7 +58,7 @@ public TermVectorsConsumerPerField(FieldInvertState invertState, TermVectorsCons * RAMOutputStream, which is then quickly flushed to * the real term vectors files in the Directory. */ @Override void finish() { - if (!doVectors || bytesHash.size() == 0) { + if (!doVectors || getNumTerms() == 0) { return; } termsWriter.addFieldToFlush(this); @@ -61,7 +71,7 @@ void finishDocument() throws IOException { doVectors = false; - final int numPostings = bytesHash.size(); + final int numPostings = getNumTerms(); final BytesRef flushTerm = termsWriter.flushTerm; @@ -74,7 +84,8 @@ void finishDocument() throws IOException { TermVectorsPostingsArray postings = termVectorsPostingsArray; final TermVectorsWriter tv = termsWriter.writer; - final int[] termIDs = sortPostings(); + sortTerms(); + final int[] termIDs = getSortedTermIDs(); tv.startField(fieldInfo, numPostings, doVectorPositions, doVectorOffsets, hasPayloads); @@ -110,18 +121,19 @@ void finishDocument() throws IOException { @Override boolean start(IndexableField field, boolean first) { super.start(field, first); + termFreqAtt = fieldState.termFreqAttribute; assert field.fieldType().indexOptions() != IndexOptions.NONE; if (first) { - if (bytesHash.size() != 0) { + if (getNumTerms() != 0) { // Only necessary if previous doc hit a // non-aborting exception while writing vectors in // this field: reset(); } - bytesHash.reinit(); + reinitHash(); hasPayloads = false; @@ -189,8 +201,8 @@ boolean start(IndexableField field, boolean first) { return doVectors; } - - void writeProx(TermVectorsPostingsArray postings, int termID) { + + void writeProx(TermVectorsPostingsArray postings, int termID) { if (doVectorOffsets) { int startOffset = fieldState.offset + offsetAttribute.startOffset(); int endOffset = fieldState.offset + offsetAttribute.endOffset(); @@ -222,7 +234,7 @@ void writeProx(TermVectorsPostingsArray postings, int termID) { } @Override - void newTerm(final int termID) { + void newTerm(final int termID, final int docID) { TermVectorsPostingsArray postings = termVectorsPostingsArray; postings.freqs[termID] = getTermFreq(); @@ -233,7 +245,7 @@ void newTerm(final int termID) { } @Override - void addTerm(final int termID) { + void addTerm(final int termID, final int docID) { TermVectorsPostingsArray postings = termVectorsPostingsArray; postings.freqs[termID] += getTermFreq(); @@ -245,10 +257,10 @@ private int getTermFreq() { int freq = termFreqAtt.getTermFrequency(); if (freq != 1) { if (doVectorPositions) { - throw new IllegalArgumentException("field \"" + fieldInfo.name + "\": cannot index term vector positions while using custom TermFrequencyAttribute"); + throw new IllegalArgumentException("field \"" + getFieldName() + "\": cannot index term vector positions while using custom TermFrequencyAttribute"); } if (doVectorOffsets) { - throw new IllegalArgumentException("field \"" + fieldInfo.name + "\": cannot index term vector offsets while using custom TermFrequencyAttribute"); + throw new IllegalArgumentException("field \"" + getFieldName() + "\": cannot index term vector offsets while using custom TermFrequencyAttribute"); } } @@ -266,7 +278,7 @@ ParallelPostingsArray createPostingsArray(int size) { } static final class TermVectorsPostingsArray extends ParallelPostingsArray { - public TermVectorsPostingsArray(int size) { + TermVectorsPostingsArray(int size) { super(size); freqs = new int[size]; lastOffsets = new int[size]; diff --git a/lucene/core/src/java/org/apache/lucene/index/TermsHash.java b/lucene/core/src/java/org/apache/lucene/index/TermsHash.java index f420aca65b23..0f702d925b8e 100644 --- a/lucene/core/src/java/org/apache/lucene/index/TermsHash.java +++ b/lucene/core/src/java/org/apache/lucene/index/TermsHash.java @@ -40,14 +40,10 @@ abstract class TermsHash { final ByteBlockPool bytePool; ByteBlockPool termBytePool; final Counter bytesUsed; - - final DocumentsWriterPerThread.DocState docState; - final boolean trackAllocations; TermsHash(final DocumentsWriterPerThread docWriter, boolean trackAllocations, TermsHash nextTermsHash) { - this.docState = docWriter.docState; - this.trackAllocations = trackAllocations; + this.trackAllocations = trackAllocations; this.nextTermsHash = nextTermsHash; this.bytesUsed = trackAllocations ? docWriter.bytesUsed : Counter.newCounter(); intPool = new IntBlockPool(docWriter.intBlockAllocator); @@ -82,7 +78,7 @@ void flush(Map fieldsToFlush, final SegmentWriteState if (nextTermsHash != null) { Map nextChildFields = new HashMap<>(); for (final Map.Entry entry : fieldsToFlush.entrySet()) { - nextChildFields.put(entry.getKey(), entry.getValue().nextPerField); + nextChildFields.put(entry.getKey(), entry.getValue().getNextPerField()); } nextTermsHash.flush(nextChildFields, state, sortMap, norms); } @@ -90,9 +86,9 @@ void flush(Map fieldsToFlush, final SegmentWriteState abstract TermsHashPerField addField(FieldInvertState fieldInvertState, FieldInfo fieldInfo); - void finishDocument() throws IOException { + void finishDocument(int docID) throws IOException { if (nextTermsHash != null) { - nextTermsHash.finishDocument(); + nextTermsHash.finishDocument(docID); } } diff --git a/lucene/core/src/java/org/apache/lucene/index/TermsHashPerField.java b/lucene/core/src/java/org/apache/lucene/index/TermsHashPerField.java index 2586378267da..d3e048703d2d 100644 --- a/lucene/core/src/java/org/apache/lucene/index/TermsHashPerField.java +++ b/lucene/core/src/java/org/apache/lucene/index/TermsHashPerField.java @@ -19,182 +19,186 @@ import java.io.IOException; -import org.apache.lucene.analysis.tokenattributes.TermFrequencyAttribute; -import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.util.ByteBlockPool; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefHash.BytesStartArray; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.Counter; import org.apache.lucene.util.IntBlockPool; +/** + * This class stores streams of information per term without knowing + * the size of the stream ahead of time. Each stream typically encodes one level + * of information like term frequency per document or term proximity. Internally + * this class allocates a linked list of slices that can be read by a {@link ByteSliceReader} + * for each term. Terms are first deduplicated in a {@link BytesRefHash} once this is done + * internal data-structures point to the current offset of each stream that can be written to. + */ abstract class TermsHashPerField implements Comparable { private static final int HASH_INIT_SIZE = 4; - final TermsHash termsHash; - - final TermsHashPerField nextPerField; - protected final DocumentsWriterPerThread.DocState docState; - protected final FieldInvertState fieldState; - TermToBytesRefAttribute termAtt; - protected TermFrequencyAttribute termFreqAtt; - - // Copied from our perThread - final IntBlockPool intPool; + private final TermsHashPerField nextPerField; + private final IntBlockPool intPool; final ByteBlockPool bytePool; - final ByteBlockPool termBytePool; - - final int streamCount; - final int numPostingInt; - - protected final FieldInfo fieldInfo; - - final BytesRefHash bytesHash; + // for each term we store an integer per stream that points into the bytePool above + // the address is updated once data is written to the stream to point to the next free offset + // in the terms stream. The start address for the stream is stored in postingsArray.byteStarts[termId] + // This is initialized in the #addTerm method, either to a brand new per term stream if the term is new or + // to the addresses where the term stream was written to when we saw it the last time. + private int[] termStreamAddressBuffer; + private int streamAddressOffset; + private final int streamCount; + private final String fieldName; + final IndexOptions indexOptions; + /* This stores the actual term bytes for postings and offsets into the parent hash in the case that this + * TermsHashPerField is hashing term vectors.*/ + private final BytesRefHash bytesHash; ParallelPostingsArray postingsArray; - private final Counter bytesUsed; + private int lastDocID; // only with assert /** streamCount: how many streams this field stores per term. * E.g. doc(+freq) is 1 stream, prox+offset is a second. */ - - public TermsHashPerField(int streamCount, FieldInvertState fieldState, TermsHash termsHash, TermsHashPerField nextPerField, FieldInfo fieldInfo) { - intPool = termsHash.intPool; - bytePool = termsHash.bytePool; - termBytePool = termsHash.termBytePool; - docState = termsHash.docState; - this.termsHash = termsHash; - bytesUsed = termsHash.bytesUsed; - this.fieldState = fieldState; + TermsHashPerField(int streamCount, IntBlockPool intPool, ByteBlockPool bytePool, ByteBlockPool termBytePool, + Counter bytesUsed, TermsHashPerField nextPerField, String fieldName, IndexOptions indexOptions) { + this.intPool = intPool; + this.bytePool = bytePool; this.streamCount = streamCount; - numPostingInt = 2*streamCount; - this.fieldInfo = fieldInfo; + this.fieldName = fieldName; this.nextPerField = nextPerField; + assert indexOptions != IndexOptions.NONE; + this.indexOptions = indexOptions; PostingsBytesStartArray byteStarts = new PostingsBytesStartArray(this, bytesUsed); bytesHash = new BytesRefHash(termBytePool, HASH_INIT_SIZE, byteStarts); } void reset() { bytesHash.clear(false); + sortedTermIDs = null; if (nextPerField != null) { nextPerField.reset(); } } - public void initReader(ByteSliceReader reader, int termID, int stream) { + final void initReader(ByteSliceReader reader, int termID, int stream) { assert stream < streamCount; - int intStart = postingsArray.intStarts[termID]; - final int[] ints = intPool.buffers[intStart >> IntBlockPool.INT_BLOCK_SHIFT]; - final int upto = intStart & IntBlockPool.INT_BLOCK_MASK; + int streamStartOffset = postingsArray.addressOffset[termID]; + final int[] streamAddressBuffer = intPool.buffers[streamStartOffset >> IntBlockPool.INT_BLOCK_SHIFT]; + final int offsetInAddressBuffer = streamStartOffset & IntBlockPool.INT_BLOCK_MASK; reader.init(bytePool, postingsArray.byteStarts[termID]+stream*ByteBlockPool.FIRST_LEVEL_SIZE, - ints[upto+stream]); + streamAddressBuffer[offsetInAddressBuffer+stream]); } - int[] sortedTermIDs; + private int[] sortedTermIDs; /** Collapse the hash table and sort in-place; also sets - * this.sortedTermIDs to the results */ - public int[] sortPostings() { + * this.sortedTermIDs to the results + * This method must not be called twice unless {@link #reset()} + * or {@link #reinitHash()} was called. */ + final void sortTerms() { + assert sortedTermIDs == null; sortedTermIDs = bytesHash.sort(); + } + + /** + * Returns the sorted term IDs. {@link #sortTerms()} must be called before + */ + final int[] getSortedTermIDs() { + assert sortedTermIDs != null; return sortedTermIDs; } + final void reinitHash() { + sortedTermIDs = null; + bytesHash.reinit(); + } + private boolean doNextCall; // Secondary entry point (for 2nd & subsequent TermsHash), // because token text has already been "interned" into // textStart, so we hash by textStart. term vectors use // this API. - public void add(int textStart) throws IOException { + private void add(int textStart, final int docID) throws IOException { int termID = bytesHash.addByPoolOffset(textStart); if (termID >= 0) { // New posting // First time we are seeing this token since we last // flushed the hash. - // Init stream slices - if (numPostingInt + intPool.intUpto > IntBlockPool.INT_BLOCK_SIZE) { - intPool.nextBuffer(); - } - - if (ByteBlockPool.BYTE_BLOCK_SIZE - bytePool.byteUpto < numPostingInt*ByteBlockPool.FIRST_LEVEL_SIZE) { - bytePool.nextBuffer(); - } + initStreamSlices(termID, docID); + } else { + positionStreamSlice(termID, docID); + } + } - intUptos = intPool.buffer; - intUptoStart = intPool.intUpto; - intPool.intUpto += streamCount; + private void initStreamSlices(int termID, int docID) throws IOException { + // Init stream slices + // TODO: figure out why this is 2*streamCount here. streamCount should be enough? + if ((2*streamCount) + intPool.intUpto > IntBlockPool.INT_BLOCK_SIZE) { + // can we fit all the streams in the current buffer? + intPool.nextBuffer(); + } - postingsArray.intStarts[termID] = intUptoStart + intPool.intOffset; + if (ByteBlockPool.BYTE_BLOCK_SIZE - bytePool.byteUpto < (2*streamCount) * ByteBlockPool.FIRST_LEVEL_SIZE) { + // can we fit at least one byte per stream in the current buffer, if not allocate a new one + bytePool.nextBuffer(); + } - for(int i=0;i> IntBlockPool.INT_BLOCK_SHIFT]; - intUptoStart = intStart & IntBlockPool.INT_BLOCK_MASK; - addTerm(termID); + for (int i = 0; i < streamCount; i++) { + // initialize each stream with a slice we start with ByteBlockPool.FIRST_LEVEL_SIZE) + // and grow as we need more space. see ByteBlockPool.LEVEL_SIZE_ARRAY + final int upto = bytePool.newSlice(ByteBlockPool.FIRST_LEVEL_SIZE); + termStreamAddressBuffer[streamAddressOffset + i] = upto + bytePool.byteOffset; } + postingsArray.byteStarts[termID] = termStreamAddressBuffer[streamAddressOffset]; + newTerm(termID, docID); + } + + private boolean assertDocId(int docId) { + assert docId >= lastDocID : "docID must be >= " + lastDocID + " but was: " + docId; + lastDocID = docId; + return true; } /** Called once per inverted token. This is the primary * entry point (for first TermsHash); postings use this * API. */ - void add() throws IOException { + void add(BytesRef termBytes, final int docID) throws IOException { + assert assertDocId(docID); // We are first in the chain so we must "intern" the // term text into textStart address // Get the text & hash of this term. - int termID = bytesHash.add(termAtt.getBytesRef()); - + int termID = bytesHash.add(termBytes); //System.out.println("add term=" + termBytesRef.utf8ToString() + " doc=" + docState.docID + " termID=" + termID); - - if (termID >= 0) {// New posting - bytesHash.byteStart(termID); + if (termID >= 0) { // New posting // Init stream slices - if (numPostingInt + intPool.intUpto > IntBlockPool.INT_BLOCK_SIZE) { - intPool.nextBuffer(); - } - - if (ByteBlockPool.BYTE_BLOCK_SIZE - bytePool.byteUpto < numPostingInt*ByteBlockPool.FIRST_LEVEL_SIZE) { - bytePool.nextBuffer(); - } - - intUptos = intPool.buffer; - intUptoStart = intPool.intUpto; - intPool.intUpto += streamCount; - - postingsArray.intStarts[termID] = intUptoStart + intPool.intOffset; - - for(int i=0;i> IntBlockPool.INT_BLOCK_SHIFT]; - intUptoStart = intStart & IntBlockPool.INT_BLOCK_MASK; - addTerm(termID); + termID = positionStreamSlice(termID, docID); } - if (doNextCall) { - nextPerField.add(postingsArray.textStarts[termID]); + nextPerField.add(postingsArray.textStarts[termID], docID); } } - int[] intUptos; - int intUptoStart; + private int positionStreamSlice(int termID, final int docID) throws IOException { + termID = (-termID) - 1; + int intStart = postingsArray.addressOffset[termID]; + termStreamAddressBuffer = intPool.buffers[intStart >> IntBlockPool.INT_BLOCK_SHIFT]; + streamAddressOffset = intStart & IntBlockPool.INT_BLOCK_MASK; + addTerm(termID, docID); + return termID; + } - void writeByte(int stream, byte b) { - int upto = intUptos[intUptoStart+stream]; + final void writeByte(int stream, byte b) { + int streamAddress = streamAddressOffset + stream; + int upto = termStreamAddressBuffer[streamAddress]; byte[] bytes = bytePool.buffers[upto >> ByteBlockPool.BYTE_BLOCK_SHIFT]; assert bytes != null; int offset = upto & ByteBlockPool.BYTE_BLOCK_MASK; @@ -202,20 +206,20 @@ void writeByte(int stream, byte b) { // End of slice; allocate a new one offset = bytePool.allocSlice(bytes, offset); bytes = bytePool.buffer; - intUptos[intUptoStart+stream] = offset + bytePool.byteOffset; + termStreamAddressBuffer[streamAddress] = offset + bytePool.byteOffset; } bytes[offset] = b; - (intUptos[intUptoStart+stream])++; + (termStreamAddressBuffer[streamAddress])++; } - public void writeBytes(int stream, byte[] b, int offset, int len) { + final void writeBytes(int stream, byte[] b, int offset, int len) { // TODO: optimize final int end = offset + len; for(int i=offset;i LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) { + throw new IllegalArgumentException("max edits must be 0.." + LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE + ", inclusive; got: " + maxEdits); + } + if (prefixLength < 0) { + throw new IllegalArgumentException("prefixLength cannot be less than 0"); + } + this.term = term; + this.maxEdits = maxEdits; + int[] codePoints = stringToUTF32(term); + this.termLength = codePoints.length; + prefixLength = Math.min(prefixLength, codePoints.length); + int[] suffix = new int[codePoints.length - prefixLength]; + System.arraycopy(codePoints, prefixLength, suffix, 0, suffix.length); + this.levBuilder = new LevenshteinAutomata(suffix, Character.MAX_CODE_POINT, transpositions); + this.prefix = UnicodeUtil.newString(codePoints, 0, prefixLength); + } + + CompiledAutomaton[] buildAutomatonSet() { + CompiledAutomaton[] compiled = new CompiledAutomaton[maxEdits + 1]; + for (int i = 0; i <= maxEdits; i++) { + try { + compiled[i] = new CompiledAutomaton(levBuilder.toAutomaton(i, prefix), true, false); + } + catch (TooComplexToDeterminizeException e) { + throw new FuzzyTermsEnum.FuzzyTermsException(term, e); + } + } + return compiled; + } + + CompiledAutomaton buildMaxEditAutomaton() { + try { + return new CompiledAutomaton(levBuilder.toAutomaton(maxEdits, prefix), true, false); + } catch (TooComplexToDeterminizeException e) { + throw new FuzzyTermsEnum.FuzzyTermsException(term, e); + } + } + + int getTermLength() { + return this.termLength; + } + + private static int[] stringToUTF32(String text) { + int[] termText = new int[text.codePointCount(0, text.length())]; + for (int cp, i = 0, j = 0; i < text.length(); i += Character.charCount(cp)) { + termText[j++] = cp = text.codePointAt(i); + } + return termText; + } +} diff --git a/lucene/core/src/java/org/apache/lucene/search/FuzzyQuery.java b/lucene/core/src/java/org/apache/lucene/search/FuzzyQuery.java index c4b4d1b6adab..041f0ca180ae 100644 --- a/lucene/core/src/java/org/apache/lucene/search/FuzzyQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/FuzzyQuery.java @@ -18,14 +18,13 @@ import java.io.IOException; +import java.util.Objects; import org.apache.lucene.index.SingleTermsEnum; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.util.Accountable; import org.apache.lucene.util.AttributeSource; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.automaton.CompiledAutomaton; import org.apache.lucene.util.automaton.LevenshteinAutomata; @@ -53,9 +52,7 @@ * not match an indexed term "ab", and FuzzyQuery on term "a" with maxEdits=2 will not * match an indexed term "abc". */ -public class FuzzyQuery extends MultiTermQuery implements Accountable { - - private static final long BASE_RAM_BYTES = RamUsageEstimator.shallowSizeOfInstance(AutomatonQuery.class); +public class FuzzyQuery extends MultiTermQuery { public final static int defaultMaxEdits = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE; public final static int defaultPrefixLength = 0; @@ -67,10 +64,6 @@ public class FuzzyQuery extends MultiTermQuery implements Accountable { private final boolean transpositions; private final int prefixLength; private final Term term; - private final int termLength; - private final CompiledAutomaton[] automata; - - private final long ramBytesUsed; /** * Create a new FuzzyQuery that will match terms with an edit distance @@ -106,22 +99,7 @@ public FuzzyQuery(Term term, int maxEdits, int prefixLength, int maxExpansions, this.prefixLength = prefixLength; this.transpositions = transpositions; this.maxExpansions = maxExpansions; - int[] codePoints = FuzzyTermsEnum.stringToUTF32(term.text()); - this.termLength = codePoints.length; - this.automata = FuzzyTermsEnum.buildAutomata(term.text(), codePoints, prefixLength, transpositions, maxEdits); setRewriteMethod(new MultiTermQuery.TopTermsBlendedFreqScoringRewrite(maxExpansions)); - this.ramBytesUsed = calculateRamBytesUsed(term, this.automata); - } - - private static long calculateRamBytesUsed(Term term, CompiledAutomaton[] automata) { - long bytes = BASE_RAM_BYTES + term.ramBytesUsed(); - for (CompiledAutomaton a : automata) { - bytes += a.ramBytesUsed(); - } - bytes += 4 * Integer.BYTES; - bytes += Long.BYTES; - bytes += 1; - return bytes; } /** @@ -173,8 +151,9 @@ public boolean getTranspositions() { /** * Returns the compiled automata used to match terms */ - public CompiledAutomaton[] getAutomata() { - return automata; + public CompiledAutomaton getAutomata() { + FuzzyAutomatonBuilder builder = new FuzzyAutomatonBuilder(term.text(), maxEdits, prefixLength, transpositions); + return builder.buildMaxEditAutomaton(); } @Override @@ -183,17 +162,17 @@ public void visit(QueryVisitor visitor) { if (maxEdits == 0 || prefixLength >= term.text().length()) { visitor.consumeTerms(this, term); } else { - automata[automata.length - 1].visit(visitor, this, field); + visitor.consumeTermsMatching(this, term.field(), () -> getAutomata().runAutomaton); } } } @Override protected TermsEnum getTermsEnum(Terms terms, AttributeSource atts) throws IOException { - if (maxEdits == 0 || prefixLength >= term.text().length()) { // can only match if it's exact + if (maxEdits == 0) { // can only match if it's exact return new SingleTermsEnum(terms.iterator(), term.bytes()); } - return new FuzzyTermsEnum(terms, atts, getTerm(), termLength, maxEdits, automata); + return new FuzzyTermsEnum(terms, atts, getTerm(), maxEdits, prefixLength, transpositions); } /** @@ -237,22 +216,9 @@ public boolean equals(Object obj) { if (getClass() != obj.getClass()) return false; FuzzyQuery other = (FuzzyQuery) obj; - // Note that we don't need to compare termLength or automata because they - // are entirely determined by the other fields - if (maxEdits != other.maxEdits) - return false; - if (prefixLength != other.prefixLength) - return false; - if (maxExpansions != other.maxExpansions) - return false; - if (transpositions != other.transpositions) - return false; - if (term == null) { - if (other.term != null) - return false; - } else if (!term.equals(other.term)) - return false; - return true; + return Objects.equals(maxEdits, other.maxEdits) && Objects.equals(prefixLength, other.prefixLength) + && Objects.equals(maxExpansions, other.maxExpansions) && Objects.equals(transpositions, other.transpositions) + && Objects.equals(term, other.term); } /** @@ -274,8 +240,4 @@ public static int floatToEdits(float minimumSimilarity, int termLen) { } } - @Override - public long ramBytesUsed() { - return ramBytesUsed; - } } diff --git a/lucene/core/src/java/org/apache/lucene/search/FuzzyTermsEnum.java b/lucene/core/src/java/org/apache/lucene/search/FuzzyTermsEnum.java index 91a44d5245c0..4c49d8accd77 100644 --- a/lucene/core/src/java/org/apache/lucene/search/FuzzyTermsEnum.java +++ b/lucene/core/src/java/org/apache/lucene/search/FuzzyTermsEnum.java @@ -18,6 +18,7 @@ import java.io.IOException; +import java.util.function.Supplier; import org.apache.lucene.index.ImpactsEnum; import org.apache.lucene.index.PostingsEnum; @@ -25,14 +26,14 @@ import org.apache.lucene.index.TermState; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.util.Attribute; +import org.apache.lucene.util.AttributeImpl; +import org.apache.lucene.util.AttributeReflector; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.UnicodeUtil; -import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.LevenshteinAutomata; -import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; /** Subclass of TermsEnum for enumerating all terms that are similar * to the specified filter term. @@ -57,21 +58,21 @@ public final class FuzzyTermsEnum extends TermsEnum { private final MaxNonCompetitiveBoostAttribute maxBoostAtt; private final CompiledAutomaton[] automata; + private final Terms terms; + private final int termLength; + private final Term term; private float bottom; private BytesRef bottomTerm; private BytesRef queuedBottom; - private final int termLength; // Maximum number of edits we will accept. This is either 2 or 1 (or, degenerately, 0) passed by the user originally, // but as we collect terms, we can lower this (e.g. from 2 to 1) if we detect that the term queue is full, and all // collected terms are ed=1: private int maxEdits; - private final Terms terms; - private final Term term; /** * Constructor for enumeration of all terms from specified reader which share a prefix of @@ -88,43 +89,44 @@ public final class FuzzyTermsEnum extends TermsEnum { * @throws IOException if there is a low-level IO error */ public FuzzyTermsEnum(Terms terms, Term term, int maxEdits, int prefixLength, boolean transpositions) throws IOException { - this(terms, term, stringToUTF32(term.text()), maxEdits, prefixLength, transpositions); - } - - private FuzzyTermsEnum(Terms terms, Term term, int[] codePoints, int maxEdits, int prefixLength, boolean transpositions) throws IOException { - this(terms, new AttributeSource(), term, codePoints.length, maxEdits, - buildAutomata(term.text(), codePoints, prefixLength, transpositions, maxEdits)); + this(terms, new AttributeSource(), term, () -> new FuzzyAutomatonBuilder(term.text(), maxEdits, prefixLength, transpositions)); } /** * Constructor for enumeration of all terms from specified reader which share a prefix of * length prefixLength with term and which have at most {@code maxEdits} edits. *

- * After calling the constructor the enumeration is already pointing to the first - * valid term if such a term exists. - * + * After calling the constructor the enumeration is already pointing to the first + * valid term if such a term exists. + * * @param terms Delivers terms. - * @param atts {@link AttributeSource} created by the rewrite method of {@link MultiTermQuery} - * that contains information about competitive boosts during rewrite + * @param atts An AttributeSource used to share automata between segments * @param term Pattern term. * @param maxEdits Maximum edit distance. - * @param automata An array of levenshtein automata to match against terms, - * see {@link #buildAutomata(String, int[], int, boolean, int)} + * @param prefixLength the length of the required common prefix + * @param transpositions whether transpositions should count as a single edit * @throws IOException if there is a low-level IO error */ - public FuzzyTermsEnum(Terms terms, AttributeSource atts, Term term, int termLength, - final int maxEdits, CompiledAutomaton[] automata) throws IOException { + FuzzyTermsEnum(Terms terms, AttributeSource atts, Term term, int maxEdits, int prefixLength, boolean transpositions) throws IOException { + this(terms, atts, term, () -> new FuzzyAutomatonBuilder(term.text(), maxEdits, prefixLength, transpositions)); + } + + private FuzzyTermsEnum(Terms terms, AttributeSource atts, Term term, Supplier automatonBuilder) throws IOException { - this.maxEdits = maxEdits; this.terms = terms; - this.term = term; this.atts = atts; - this.termLength = termLength; + this.term = term; this.maxBoostAtt = atts.addAttribute(MaxNonCompetitiveBoostAttribute.class); this.boostAtt = atts.addAttribute(BoostAttribute.class); - this.automata = automata; + atts.addAttributeImpl(new AutomatonAttributeImpl()); + AutomatonAttribute aa = atts.addAttribute(AutomatonAttribute.class); + aa.init(automatonBuilder); + + this.automata = aa.getAutomata(); + this.termLength = aa.getTermLength(); + this.maxEdits = this.automata.length - 1; bottom = maxBoostAtt.getMaxNonCompetitiveBoost(); bottomTerm = maxBoostAtt.getCompetitiveTerm(); @@ -145,47 +147,6 @@ public void setMaxNonCompetitiveBoost(float boost) { public float getBoost() { return boostAtt.getBoost(); } - - static CompiledAutomaton[] buildAutomata(String text, int[] termText, int prefixLength, boolean transpositions, int maxEdits) { - CompiledAutomaton[] compiled = new CompiledAutomaton[maxEdits + 1]; - Automaton[] automata = buildAutomata(termText, prefixLength, transpositions, maxEdits); - for (int i = 0; i <= maxEdits; i++) { - try { - compiled[i] = new CompiledAutomaton(automata[i], true, false); - } - catch (TooComplexToDeterminizeException e) { - throw new FuzzyTermsException(text, e); - } - } - return compiled; - } - - static int[] stringToUTF32(String text) { - int[] termText = new int[text.codePointCount(0, text.length())]; - for (int cp, i = 0, j = 0; i < text.length(); i += Character.charCount(cp)) { - termText[j++] = cp = text.codePointAt(i); - } - return termText; - } - - private static Automaton[] buildAutomata(int[] termText, int prefixLength, boolean transpositions, int maxEdits) { - if (maxEdits < 0 || maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) { - throw new IllegalArgumentException("max edits must be 0.." + LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE + ", inclusive; got: " + maxEdits); - } - if (prefixLength < 0) { - throw new IllegalArgumentException("prefixLength cannot be less than 0"); - } - Automaton[] automata = new Automaton[maxEdits + 1]; - int termLength = termText.length; - prefixLength = Math.min(prefixLength, termLength); - String suffix = UnicodeUtil.newString(termText, prefixLength, termText.length - prefixLength); - LevenshteinAutomata builder = new LevenshteinAutomata(suffix, transpositions); - String prefix = UnicodeUtil.newString(termText, 0, prefixLength); - for (int i = 0; i <= maxEdits; i++) { - automata[i] = builder.toAutomaton(i, prefix); - } - return automata; - } /** * return an automata-based enum for matching up to editDistance from @@ -274,7 +235,7 @@ public BytesRef next() throws IOException { final float bottom = maxBoostAtt.getMaxNonCompetitiveBoost(); final BytesRef bottomTerm = maxBoostAtt.getCompetitiveTerm(); - if (term != null && (bottom != this.bottom || bottomTerm != this.bottomTerm)) { + if (bottom != this.bottom || bottomTerm != this.bottomTerm) { this.bottom = bottom; this.bottomTerm = bottomTerm; // clone the term before potentially doing something with it @@ -364,4 +325,60 @@ public static class FuzzyTermsException extends RuntimeException { } } + /** + * Used for sharing automata between segments + * + * Levenshtein automata are large and expensive to build; we don't want to build + * them directly on the query because this can blow up caches that use queries + * as keys; we also don't want to rebuild them for every segment. This attribute + * allows the FuzzyTermsEnum to build the automata once for its first segment + * and then share them for subsequent segment calls. + */ + private interface AutomatonAttribute extends Attribute { + CompiledAutomaton[] getAutomata(); + int getTermLength(); + void init(Supplier builder); + } + + private static class AutomatonAttributeImpl extends AttributeImpl implements AutomatonAttribute { + + private CompiledAutomaton[] automata; + private int termLength; + + @Override + public CompiledAutomaton[] getAutomata() { + return automata; + } + + @Override + public int getTermLength() { + return termLength; + } + + @Override + public void init(Supplier supplier) { + if (automata != null) { + return; + } + FuzzyAutomatonBuilder builder = supplier.get(); + this.termLength = builder.getTermLength(); + this.automata = builder.buildAutomatonSet(); + } + + @Override + public void clear() { + this.automata = null; + } + + @Override + public void reflectWith(AttributeReflector reflector) { + throw new UnsupportedOperationException(); + } + + @Override + public void copyTo(AttributeImpl target) { + throw new UnsupportedOperationException(); + } + } + } diff --git a/lucene/core/src/java/org/apache/lucene/search/MultiCollector.java b/lucene/core/src/java/org/apache/lucene/search/MultiCollector.java index 82251e43cfe8..5cb6db8acd48 100644 --- a/lucene/core/src/java/org/apache/lucene/search/MultiCollector.java +++ b/lucene/core/src/java/org/apache/lucene/search/MultiCollector.java @@ -117,7 +117,7 @@ public ScoreMode scoreMode() { @Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { - final List leafCollectors = new ArrayList<>(); + final List leafCollectors = new ArrayList<>(collectors.length); for (Collector collector : collectors) { final LeafCollector leafCollector; try { @@ -134,7 +134,7 @@ public LeafCollector getLeafCollector(LeafReaderContext context) throws IOExcept case 1: return leafCollectors.get(0); default: - return new MultiLeafCollector(leafCollectors, cacheScores); + return new MultiLeafCollector(leafCollectors, cacheScores, scoreMode() == ScoreMode.TOP_SCORES); } } @@ -142,12 +142,14 @@ private static class MultiLeafCollector implements LeafCollector { private final boolean cacheScores; private final LeafCollector[] collectors; - private int numCollectors; + private final float[] minScores; + private final boolean skipNonCompetitiveScores; - private MultiLeafCollector(List collectors, boolean cacheScores) { + private MultiLeafCollector(List collectors, boolean cacheScores, boolean skipNonCompetitive) { this.collectors = collectors.toArray(new LeafCollector[collectors.size()]); this.cacheScores = cacheScores; - this.numCollectors = this.collectors.length; + this.skipNonCompetitiveScores = skipNonCompetitive; + this.minScores = this.skipNonCompetitiveScores ? new float[this.collectors.length] : null; } @Override @@ -155,48 +157,89 @@ public void setScorer(Scorable scorer) throws IOException { if (cacheScores) { scorer = new ScoreCachingWrappingScorer(scorer); } - scorer = new FilterScorable(scorer) { - @Override - public void setMinCompetitiveScore(float minScore) { - // Ignore calls to setMinCompetitiveScore so that if we wrap two - // collectors and one of them wants to skip low-scoring hits, then - // the other collector still sees all hits. We could try to reconcile - // min scores and take the maximum min score across collectors, but - // this is very unlikely to be helpful in practice. + if (skipNonCompetitiveScores) { + for (int i = 0; i < collectors.length; ++i) { + final LeafCollector c = collectors[i]; + if (c != null) { + c.setScorer(new MinCompetitiveScoreAwareScorable(scorer, i, minScores)); + } } + } else { + scorer = new FilterScorable(scorer) { + @Override + public void setMinCompetitiveScore(float minScore) throws IOException { + // Ignore calls to setMinCompetitiveScore so that if we wrap two + // collectors and one of them wants to skip low-scoring hits, then + // the other collector still sees all hits. + } - }; - for (int i = 0; i < numCollectors; ++i) { - final LeafCollector c = collectors[i]; - c.setScorer(scorer); + }; + for (int i = 0; i < collectors.length; ++i) { + final LeafCollector c = collectors[i]; + if (c != null) { + c.setScorer(scorer); + } + } } } - private void removeCollector(int i) { - System.arraycopy(collectors, i + 1, collectors, i, numCollectors - i - 1); - --numCollectors; - collectors[numCollectors] = null; - } - @Override public void collect(int doc) throws IOException { - final LeafCollector[] collectors = this.collectors; - int numCollectors = this.numCollectors; - for (int i = 0; i < numCollectors; ) { + for (int i = 0; i < collectors.length; i++) { final LeafCollector collector = collectors[i]; - try { - collector.collect(doc); - ++i; - } catch (CollectionTerminatedException e) { - removeCollector(i); - numCollectors = this.numCollectors; - if (numCollectors == 0) { - throw new CollectionTerminatedException(); + if (collector != null) { + try { + collector.collect(doc); + } catch (CollectionTerminatedException e) { + collectors[i] = null; + if (allCollectorsTerminated()) { + throw new CollectionTerminatedException(); + } } } } } + private boolean allCollectorsTerminated() { + for (int i = 0; i < collectors.length; i++) { + if (collectors[i] != null) { + return false; + } + } + return true; + } + + } + + final static class MinCompetitiveScoreAwareScorable extends FilterScorable { + + private final int idx; + private final float[] minScores; + + MinCompetitiveScoreAwareScorable(Scorable in, int idx, float[] minScores) { + super(in); + this.idx = idx; + this.minScores = minScores; + } + + @Override + public void setMinCompetitiveScore(float minScore) throws IOException { + if (minScore > minScores[idx]) { + minScores[idx] = minScore; + in.setMinCompetitiveScore(minScore()); + } + } + + private float minScore() { + float min = Float.MAX_VALUE; + for (int i = 0; i < minScores.length; i++) { + if (minScores[i] < min) { + min = minScores[i]; + } + } + return min; + } + } } diff --git a/lucene/core/src/java/org/apache/lucene/search/MultiTermQuery.java b/lucene/core/src/java/org/apache/lucene/search/MultiTermQuery.java index 327227afe151..e3c4ff7097f4 100644 --- a/lucene/core/src/java/org/apache/lucene/search/MultiTermQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/MultiTermQuery.java @@ -286,9 +286,9 @@ public MultiTermQuery(final String field) { * (should instead return {@link TermsEnum#EMPTY} if no * terms match). The TermsEnum must already be * positioned to the first matching term. - * The given {@link AttributeSource} is passed by the {@link RewriteMethod} to - * provide attributes, the rewrite method uses to inform about e.g. maximum competitive boosts. - * This is currently only used by {@link TopTermsRewrite} + * The given {@link AttributeSource} is passed by the {@link RewriteMethod} to + * share information between segments, for example {@link TopTermsRewrite} uses + * it to share maximum competitive boosts */ protected abstract TermsEnum getTermsEnum(Terms terms, AttributeSource atts) throws IOException; diff --git a/lucene/core/src/java/org/apache/lucene/search/SortField.java b/lucene/core/src/java/org/apache/lucene/search/SortField.java index 2cfae46f01d8..7512ec934416 100644 --- a/lucene/core/src/java/org/apache/lucene/search/SortField.java +++ b/lucene/core/src/java/org/apache/lucene/search/SortField.java @@ -21,7 +21,13 @@ import java.util.Comparator; import java.util.Objects; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.IndexSorter; +import org.apache.lucene.index.SortFieldProvider; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.NumericUtils; /** * Stores information about how to sort documents by terms in an individual @@ -120,6 +126,106 @@ public SortField(String field, Type type, boolean reverse) { this.reverse = reverse; } + /** A SortFieldProvider for field sorts */ + public static final class Provider extends SortFieldProvider { + + /** The name this Provider is registered under */ + public static final String NAME = "SortField"; + + /** Creates a new Provider */ + public Provider() { + super(NAME); + } + + @Override + public SortField readSortField(DataInput in) throws IOException { + SortField sf = new SortField(in.readString(), readType(in), in.readInt() == 1); + if (in.readInt() == 1) { + // missing object + switch (sf.type) { + case STRING: + int missingString = in.readInt(); + if (missingString == 1) { + sf.setMissingValue(STRING_FIRST); + } + else { + sf.setMissingValue(STRING_LAST); + } + break; + case INT: + sf.setMissingValue(in.readInt()); + break; + case LONG: + sf.setMissingValue(in.readLong()); + break; + case FLOAT: + sf.setMissingValue(NumericUtils.sortableIntToFloat(in.readInt())); + break; + case DOUBLE: + sf.setMissingValue(NumericUtils.sortableLongToDouble(in.readLong())); + break; + default: + throw new IllegalArgumentException("Cannot deserialize sort of type " + sf.type); + } + } + return sf; + } + + @Override + public void writeSortField(SortField sf, DataOutput out) throws IOException { + sf.serialize(out); + } + } + + protected static Type readType(DataInput in) throws IOException { + String type = in.readString(); + try { + return Type.valueOf(type); + } + catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Can't deserialize SortField - unknown type " + type); + } + } + + private void serialize(DataOutput out) throws IOException { + out.writeString(field); + out.writeString(type.toString()); + out.writeInt(reverse ? 1 : 0); + if (missingValue == null) { + out.writeInt(0); + } + else { + out.writeInt(1); + switch (type) { + case STRING: + if (missingValue == STRING_LAST) { + out.writeInt(0); + } + else if (missingValue == STRING_FIRST) { + out.writeInt(1); + } + else { + throw new IllegalArgumentException("Cannot serialize missing value of " + missingValue + " for type STRING"); + } + break; + case INT: + out.writeInt((int)missingValue); + break; + case LONG: + out.writeLong((long)missingValue); + break; + case FLOAT: + out.writeInt(NumericUtils.floatToSortableInt((float)missingValue)); + break; + case DOUBLE: + out.writeLong(NumericUtils.doubleToSortableLong((double)missingValue)); + break; + default: + throw new IllegalArgumentException("Cannot serialize SortField of type " + type); + } + } + } + /** Pass this to {@link #setMissingValue} to have missing * string values sort first. */ public final static Object STRING_FIRST = new Object() { @@ -392,4 +498,33 @@ public SortField rewrite(IndexSearcher searcher) throws IOException { public boolean needsScores() { return type == Type.SCORE; } + + /** + * Returns an {@link IndexSorter} used for sorting index segments by this SortField. + * + * If the SortField cannot be used for index sorting (for example, if it uses scores or + * other query-dependent values) then this method should return {@code null} + * + * SortFields that implement this method should also implement a companion + * {@link SortFieldProvider} to serialize and deserialize the sort in index segment + * headers + * + * @lucene.experimental + */ + public IndexSorter getIndexSorter() { + switch (type) { + case STRING: + return new IndexSorter.StringSorter(Provider.NAME, missingValue, reverse, reader -> DocValues.getSorted(reader, field)); + case INT: + return new IndexSorter.IntSorter(Provider.NAME, (Integer)missingValue, reverse, reader -> DocValues.getNumeric(reader, field)); + case LONG: + return new IndexSorter.LongSorter(Provider.NAME, (Long)missingValue, reverse, reader -> DocValues.getNumeric(reader, field)); + case DOUBLE: + return new IndexSorter.DoubleSorter(Provider.NAME, (Double)missingValue, reverse, reader -> DocValues.getNumeric(reader, field)); + case FLOAT: + return new IndexSorter.FloatSorter(Provider.NAME, (Float)missingValue, reverse, reader -> DocValues.getNumeric(reader, field)); + default: return null; + } + } + } diff --git a/lucene/core/src/java/org/apache/lucene/search/SortedNumericSortField.java b/lucene/core/src/java/org/apache/lucene/search/SortedNumericSortField.java index fff000b96f6a..6c5154a3ee35 100644 --- a/lucene/core/src/java/org/apache/lucene/search/SortedNumericSortField.java +++ b/lucene/core/src/java/org/apache/lucene/search/SortedNumericSortField.java @@ -20,9 +20,15 @@ import java.io.IOException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.IndexSorter; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.SortFieldProvider; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.util.NumericUtils; /** * SortField for {@link SortedNumericDocValues}. @@ -83,6 +89,86 @@ public SortedNumericSortField(String field, SortField.Type type, boolean reverse this.type = type; } + /** A SortFieldProvider for this sort field */ + public static final class Provider extends SortFieldProvider { + + /** The name this provider is registered under */ + public static final String NAME = "SortedNumericSortField"; + + /** Creates a new Provider */ + public Provider() { + super(NAME); + } + + @Override + public SortField readSortField(DataInput in) throws IOException { + SortedNumericSortField sf = new SortedNumericSortField(in.readString(), readType(in), in.readInt() == 1, readSelectorType(in)); + if (in.readInt() == 1) { + switch (sf.type) { + case INT: + sf.setMissingValue(in.readInt()); + break; + case LONG: + sf.setMissingValue(in.readLong()); + break; + case FLOAT: + sf.setMissingValue(NumericUtils.sortableIntToFloat(in.readInt())); + break; + case DOUBLE: + sf.setMissingValue(NumericUtils.sortableLongToDouble(in.readLong())); + break; + default: + throw new AssertionError(); + } + } + return sf; + } + + @Override + public void writeSortField(SortField sf, DataOutput out) throws IOException { + assert sf instanceof SortedNumericSortField; + ((SortedNumericSortField)sf).serialize(out); + } + } + + private static SortedNumericSelector.Type readSelectorType(DataInput in) throws IOException { + int selectorType = in.readInt(); + if (selectorType >= SortedNumericSelector.Type.values().length) { + throw new IllegalArgumentException("Can't deserialize SortedNumericSortField - unknown selector type " + selectorType); + } + return SortedNumericSelector.Type.values()[selectorType]; + } + + private void serialize(DataOutput out) throws IOException { + out.writeString(getField()); + out.writeString(type.toString()); + out.writeInt(reverse ? 1 : 0); + out.writeInt(selector.ordinal()); + if (missingValue == null) { + out.writeInt(0); + } + else { + out.writeInt(1); + // oh for switch expressions... + switch (type) { + case INT: + out.writeInt((int)missingValue); + break; + case LONG: + out.writeLong((long)missingValue); + break; + case FLOAT: + out.writeInt(NumericUtils.floatToSortableInt((float)missingValue)); + break; + case DOUBLE: + out.writeLong(NumericUtils.doubleToSortableLong((double)missingValue)); + break; + default: + throw new AssertionError(); + } + } + } + /** Returns the numeric type in use for this sort */ public SortField.Type getNumericType() { return type; @@ -170,4 +256,24 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String throw new AssertionError(); } } + + private NumericDocValues getValue(LeafReader reader) throws IOException { + return SortedNumericSelector.wrap(DocValues.getSortedNumeric(reader, getField()), selector, type); + } + + @Override + public IndexSorter getIndexSorter() { + switch(type) { + case INT: + return new IndexSorter.IntSorter(Provider.NAME, (Integer)missingValue, reverse, this::getValue); + case LONG: + return new IndexSorter.LongSorter(Provider.NAME, (Long)missingValue, reverse, this::getValue); + case DOUBLE: + return new IndexSorter.DoubleSorter(Provider.NAME, (Double)missingValue, reverse, this::getValue); + case FLOAT: + return new IndexSorter.FloatSorter(Provider.NAME, (Float)missingValue, reverse, this::getValue); + default: + throw new AssertionError(); + } + } } diff --git a/lucene/core/src/java/org/apache/lucene/search/SortedSetSortField.java b/lucene/core/src/java/org/apache/lucene/search/SortedSetSortField.java index b095c6e88fcd..2321a667bdb9 100644 --- a/lucene/core/src/java/org/apache/lucene/search/SortedSetSortField.java +++ b/lucene/core/src/java/org/apache/lucene/search/SortedSetSortField.java @@ -16,13 +16,17 @@ */ package org.apache.lucene.search; - import java.io.IOException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.IndexSorter; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortFieldProvider; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; /** * SortField for {@link SortedSetDocValues}. @@ -68,6 +72,60 @@ public SortedSetSortField(String field, boolean reverse, SortedSetSelector.Type } this.selector = selector; } + + /** A SortFieldProvider for this sort */ + public static final class Provider extends SortFieldProvider { + + /** The name this provider is registered under */ + public static final String NAME = "SortedSetSortField"; + + /** Creates a new Provider */ + public Provider() { + super(NAME); + } + + @Override + public SortField readSortField(DataInput in) throws IOException { + SortField sf = new SortedSetSortField(in.readString(), in.readInt() == 1, readSelectorType(in)); + int missingValue = in.readInt(); + if (missingValue == 1) { + sf.setMissingValue(SortField.STRING_FIRST); + } + else if (missingValue == 2) { + sf.setMissingValue(SortField.STRING_LAST); + } + return sf; + } + + @Override + public void writeSortField(SortField sf, DataOutput out) throws IOException { + assert sf instanceof SortedSetSortField; + ((SortedSetSortField)sf).serialize(out); + } + } + + private static SortedSetSelector.Type readSelectorType(DataInput in) throws IOException { + int type = in.readInt(); + if (type >= SortedSetSelector.Type.values().length) { + throw new IllegalArgumentException("Cannot deserialize SortedSetSortField: unknown selector type " + type); + } + return SortedSetSelector.Type.values()[type]; + } + + private void serialize(DataOutput out) throws IOException { + out.writeString(getField()); + out.writeInt(reverse ? 1 : 0); + out.writeInt(selector.ordinal()); + if (missingValue == SortField.STRING_FIRST) { + out.writeInt(1); + } + else if (missingValue == SortField.STRING_LAST) { + out.writeInt(2); + } + else { + out.writeInt(0); + } + } /** Returns the selector in use for this sort */ public SortedSetSelector.Type getSelector() { @@ -126,4 +184,13 @@ protected SortedDocValues getSortedDocValues(LeafReaderContext context, String f } }; } + + private SortedDocValues getValues(LeafReader reader) throws IOException { + return SortedSetSelector.wrap(DocValues.getSortedSet(reader, getField()), selector); + } + + @Override + public IndexSorter getIndexSorter() { + return new IndexSorter.StringSorter(Provider.NAME, missingValue, reverse, this::getValues); + } } diff --git a/lucene/core/src/java/org/apache/lucene/store/BufferedChecksum.java b/lucene/core/src/java/org/apache/lucene/store/BufferedChecksum.java index 4378c9a7705d..74b97d2d13cc 100644 --- a/lucene/core/src/java/org/apache/lucene/store/BufferedChecksum.java +++ b/lucene/core/src/java/org/apache/lucene/store/BufferedChecksum.java @@ -27,8 +27,8 @@ public class BufferedChecksum implements Checksum { private final Checksum in; private final byte buffer[]; private int upto; - /** Default buffer size: 256 */ - public static final int DEFAULT_BUFFERSIZE = 256; + /** Default buffer size: 1024 */ + public static final int DEFAULT_BUFFERSIZE = 1024; /** Create a new BufferedChecksum with {@link #DEFAULT_BUFFERSIZE} */ public BufferedChecksum(Checksum in) { diff --git a/lucene/core/src/java/org/apache/lucene/store/ByteBuffersIndexOutput.java b/lucene/core/src/java/org/apache/lucene/store/ByteBuffersIndexOutput.java index 19dc4004853b..f6785bef79d8 100644 --- a/lucene/core/src/java/org/apache/lucene/store/ByteBuffersIndexOutput.java +++ b/lucene/core/src/java/org/apache/lucene/store/ByteBuffersIndexOutput.java @@ -81,24 +81,10 @@ public long getChecksum() throws IOException { if (lastChecksumPosition != delegate.size()) { lastChecksumPosition = delegate.size(); checksum.reset(); - byte [] buffer = null; for (ByteBuffer bb : delegate.toBufferList()) { - if (bb.hasArray()) { - checksum.update(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining()); - } else { - if (buffer == null) buffer = new byte [1024 * 4]; - - bb = bb.asReadOnlyBuffer(); - int remaining = bb.remaining(); - while (remaining > 0) { - int len = Math.min(remaining, buffer.length); - bb.get(buffer, 0, len); - checksum.update(buffer, 0, len); - remaining -= len; - } - } + checksum.update(bb); } - lastChecksum = checksum.getValue(); + lastChecksum = checksum.getValue(); } return lastChecksum; } diff --git a/lucene/core/src/java/org/apache/lucene/util/IntBlockPool.java b/lucene/core/src/java/org/apache/lucene/util/IntBlockPool.java index 46500fcdf073..d09cb83e2cc3 100644 --- a/lucene/core/src/java/org/apache/lucene/util/IntBlockPool.java +++ b/lucene/core/src/java/org/apache/lucene/util/IntBlockPool.java @@ -175,7 +175,7 @@ private int newSlice(final int size) { return upto; } - private static final boolean assertSliceBuffer(int[] buffer) { + private static boolean assertSliceBuffer(int[] buffer) { int count = 0; for (int i = 0; i < buffer.length; i++) { count += buffer[i]; // for slices the buffer must only have 0 values diff --git a/lucene/core/src/java/org/apache/lucene/util/Version.java b/lucene/core/src/java/org/apache/lucene/util/Version.java index 5ed1a959da30..f5dbcc1e8e72 100644 --- a/lucene/core/src/java/org/apache/lucene/util/Version.java +++ b/lucene/core/src/java/org/apache/lucene/util/Version.java @@ -102,6 +102,13 @@ public final class Version { @Deprecated public static final Version LUCENE_8_5_1 = new Version(8, 5, 1); + /** + * Match settings and bugs in Lucene's 8.5.2 release. + * @deprecated Use latest + */ + @Deprecated + public static final Version LUCENE_8_5_2 = new Version(8, 5, 2); + /** * Match settings and bugs in Lucene's 8.6.0 release. * @deprecated Use latest diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/RegExp.java b/lucene/core/src/java/org/apache/lucene/util/automaton/RegExp.java index e8b37e59a3ff..0874cde71015 100644 --- a/lucene/core/src/java/org/apache/lucene/util/automaton/RegExp.java +++ b/lucene/core/src/java/org/apache/lucene/util/automaton/RegExp.java @@ -290,6 +290,55 @@ * (a single non-reserved character) * * + * + * + * + * | + * \d + * (a digit [0-9]) + * + * + * + * + * + * | + * \D + * (a non-digit [^0-9]) + * + * + * + * + * + * | + * \s + * (whitespace [ \t\n\r]) + * + * + * + * + * + * | + * \S + * (non whitespace [^\s]) + * + * + * + * + * + * | + * \w + * (a word character [a-zA-Z_0-9]) + * + * + * + * + * + * | + * \W + * (a non word character [^\w]) + * + * + * * * * | @@ -316,8 +365,44 @@ */ public class RegExp { - enum Kind { - REGEXP_UNION, REGEXP_CONCATENATION, REGEXP_INTERSECTION, REGEXP_OPTIONAL, REGEXP_REPEAT, REGEXP_REPEAT_MIN, REGEXP_REPEAT_MINMAX, REGEXP_COMPLEMENT, REGEXP_CHAR, REGEXP_CHAR_RANGE, REGEXP_ANYCHAR, REGEXP_EMPTY, REGEXP_STRING, REGEXP_ANYSTRING, REGEXP_AUTOMATON, REGEXP_INTERVAL + /** + * The type of expression represented by a RegExp node. + */ + public enum Kind { + /** The union of two expressions */ + REGEXP_UNION, + /** A sequence of two expressions */ + REGEXP_CONCATENATION, + /** The intersection of two expressions */ + REGEXP_INTERSECTION, + /** An optional expression */ + REGEXP_OPTIONAL, + /** An expression that repeats */ + REGEXP_REPEAT, + /** An expression that repeats a minimum number of times*/ + REGEXP_REPEAT_MIN, + /** An expression that repeats a minimum and maximum number of times*/ + REGEXP_REPEAT_MINMAX, + /** The complement of an expression */ + REGEXP_COMPLEMENT, + /** A Character */ + REGEXP_CHAR, + /** A Character range*/ + REGEXP_CHAR_RANGE, + /** Any Character allowed*/ + REGEXP_ANYCHAR, + /** An empty expression*/ + REGEXP_EMPTY, + /** A string expression*/ + REGEXP_STRING, + /** Any string allowed */ + REGEXP_ANYSTRING, + /** An Automaton expression*/ + REGEXP_AUTOMATON, + /** An Interval expression */ + REGEXP_INTERVAL, + /** An expression for a pre-defined class e.g. \w */ + REGEXP_PRE_CLASS } /** @@ -361,21 +446,37 @@ enum Kind { */ public static final int NONE = 0x0000; + //Immutable parsed state + /** + * The type of expression + */ + public final Kind kind; + /** + * Child expressions held by a container type expression + */ + public final RegExp exp1, exp2; + /** + * String expression + */ + public final String s; + /** + * Character expression + */ + public final int c; + /** + * Limits for repeatable type expressions + */ + public final int min, max, digits; + /** + * Extents for range type expressions + */ + public final int from, to; + + // Parser variables private final String originalString; - Kind kind; - RegExp exp1, exp2; - String s; - int c; - int min, max, digits; - int from, to; - int flags; int pos; - - RegExp() { - this.originalString = null; - } - + /** * Constructs new RegExp from a string. Same as * RegExp(s, ALL). @@ -418,6 +519,37 @@ public RegExp(String s, int syntax_flags) throws IllegalArgumentException { from = e.from; to = e.to; } + + RegExp(Kind kind, RegExp exp1, RegExp exp2, String s, int c, int min, int max, int digits, int from, int to){ + this.originalString = null; + this.kind = kind; + this.flags = 0; + this.exp1 = exp1; + this.exp2 = exp2; + this.s = s; + this.c = c; + this.min = min; + this.max = max; + this.digits = digits; + this.from = from; + this.to = to; + } + + // Simplified construction of container nodes + static RegExp newContainerNode(Kind kind, RegExp exp1, RegExp exp2) { + return new RegExp(kind, exp1, exp2, null, 0, 0, 0, 0, 0, 0); + } + + // Simplified construction of repeating nodes + static RegExp newRepeatingNode(Kind kind, RegExp exp, int min, int max) { + return new RegExp(kind, exp, null, null, 0, min, max, 0, 0, 0); + } + + + // Simplified construction of leaf nodes + static RegExp newLeafNode(Kind kind, String s, int c, int min, int max, int digits, int from, int to) { + return new RegExp(kind, null, null, s, c, min, max, digits, from, to); + } /** * Constructs new Automaton from this RegExp. Same @@ -506,6 +638,10 @@ private Automaton toAutomatonInternal(Map automata, List list; Automaton a = null; switch (kind) { + case REGEXP_PRE_CLASS: + RegExp expanded = expandPredefined(); + a = expanded.toAutomatonInternal(automata, automaton_provider, maxDeterminizedStates); + break; case REGEXP_UNION: list = new ArrayList<>(); findLeaves(exp1, Kind.REGEXP_UNION, list, automata, automaton_provider, @@ -716,6 +852,9 @@ void toStringBuilder(StringBuilder b) { b.append('0'); b.append(s2).append(">"); break; + case REGEXP_PRE_CLASS: + b.append("\\").appendCodePoint(from); + break; } } @@ -774,6 +913,13 @@ void toStringTree(StringBuilder b, String indent) { b.appendCodePoint(c); b.append('\n'); break; + case REGEXP_PRE_CLASS: + b.append(indent); + b.append(kind); + b.append(" class=\\"); + b.appendCodePoint(from); + b.append('\n'); + break; case REGEXP_CHAR_RANGE: b.append(indent); b.append(kind); @@ -855,34 +1001,29 @@ void getIdentifiers(Set set) { } static RegExp makeUnion(RegExp exp1, RegExp exp2) { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_UNION; - r.exp1 = exp1; - r.exp2 = exp2; - return r; + return newContainerNode(Kind.REGEXP_UNION, exp1, exp2); } static RegExp makeConcatenation(RegExp exp1, RegExp exp2) { if ((exp1.kind == Kind.REGEXP_CHAR || exp1.kind == Kind.REGEXP_STRING) && (exp2.kind == Kind.REGEXP_CHAR || exp2.kind == Kind.REGEXP_STRING)) return makeString( exp1, exp2); - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_CONCATENATION; + RegExp rexp1, rexp2; if (exp1.kind == Kind.REGEXP_CONCATENATION && (exp1.exp2.kind == Kind.REGEXP_CHAR || exp1.exp2.kind == Kind.REGEXP_STRING) && (exp2.kind == Kind.REGEXP_CHAR || exp2.kind == Kind.REGEXP_STRING)) { - r.exp1 = exp1.exp1; - r.exp2 = makeString(exp1.exp2, exp2); + rexp1 = exp1.exp1; + rexp2 = makeString(exp1.exp2, exp2); } else if ((exp1.kind == Kind.REGEXP_CHAR || exp1.kind == Kind.REGEXP_STRING) && exp2.kind == Kind.REGEXP_CONCATENATION && (exp2.exp1.kind == Kind.REGEXP_CHAR || exp2.exp1.kind == Kind.REGEXP_STRING)) { - r.exp1 = makeString(exp1, exp2.exp1); - r.exp2 = exp2.exp2; + rexp1 = makeString(exp1, exp2.exp1); + rexp2 = exp2.exp2; } else { - r.exp1 = exp1; - r.exp2 = exp2; + rexp1 = exp1; + rexp2 = exp2; } - return r; + return newContainerNode(Kind.REGEXP_CONCATENATION, rexp1, rexp2); } static private RegExp makeString(RegExp exp1, RegExp exp2) { @@ -895,107 +1036,61 @@ static private RegExp makeString(RegExp exp1, RegExp exp2) { } static RegExp makeIntersection(RegExp exp1, RegExp exp2) { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_INTERSECTION; - r.exp1 = exp1; - r.exp2 = exp2; - return r; + return newContainerNode(Kind.REGEXP_INTERSECTION, exp1, exp2); } static RegExp makeOptional(RegExp exp) { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_OPTIONAL; - r.exp1 = exp; - return r; + return newContainerNode(Kind.REGEXP_OPTIONAL, exp, null); } static RegExp makeRepeat(RegExp exp) { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_REPEAT; - r.exp1 = exp; - return r; + return newContainerNode(Kind.REGEXP_REPEAT, exp, null); } static RegExp makeRepeat(RegExp exp, int min) { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_REPEAT_MIN; - r.exp1 = exp; - r.min = min; - return r; + return newRepeatingNode(Kind.REGEXP_REPEAT_MIN, exp, min, 0); } static RegExp makeRepeat(RegExp exp, int min, int max) { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_REPEAT_MINMAX; - r.exp1 = exp; - r.min = min; - r.max = max; - return r; + return newRepeatingNode(Kind.REGEXP_REPEAT_MINMAX, exp, min, max); } static RegExp makeComplement(RegExp exp) { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_COMPLEMENT; - r.exp1 = exp; - return r; + return newContainerNode(Kind.REGEXP_COMPLEMENT, exp, null); } static RegExp makeChar(int c) { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_CHAR; - r.c = c; - return r; + return newLeafNode(Kind.REGEXP_CHAR, null, c, 0, 0, 0, 0, 0); } static RegExp makeCharRange(int from, int to) { if (from > to) throw new IllegalArgumentException("invalid range: from (" + from + ") cannot be > to (" + to + ")"); - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_CHAR_RANGE; - r.from = from; - r.to = to; - return r; + return newLeafNode(Kind.REGEXP_CHAR_RANGE, null, 0, 0, 0, 0, from, to); } static RegExp makeAnyChar() { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_ANYCHAR; - return r; + return newContainerNode(Kind.REGEXP_ANYCHAR, null, null); } static RegExp makeEmpty() { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_EMPTY; - return r; + return newContainerNode(Kind.REGEXP_EMPTY, null, null); } static RegExp makeString(String s) { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_STRING; - r.s = s; - return r; + return newLeafNode(Kind.REGEXP_STRING, s, 0, 0, 0, 0, 0, 0); } static RegExp makeAnyString() { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_ANYSTRING; - return r; + return newContainerNode(Kind.REGEXP_ANYSTRING, null, null); } static RegExp makeAutomaton(String s) { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_AUTOMATON; - r.s = s; - return r; + return newLeafNode(Kind.REGEXP_AUTOMATON, s, 0, 0, 0, 0, 0, 0); } static RegExp makeInterval(int min, int max, int digits) { - RegExp r = new RegExp(); - r.kind = Kind.REGEXP_INTERVAL; - r.min = min; - r.max = max; - r.digits = digits; - return r; + return newLeafNode(Kind.REGEXP_INTERVAL, null, 0, min, max, digits, 0, 0); } private boolean peek(String s) { @@ -1101,10 +1196,60 @@ final RegExp parseCharClasses() throws IllegalArgumentException { } final RegExp parseCharClass() throws IllegalArgumentException { + RegExp predefinedExp = matchPredefinedCharacterClass(); + if (predefinedExp != null) { + return predefinedExp; + } + int c = parseCharExp(); if (match('-')) return makeCharRange(c, parseCharExp()); else return makeChar(c); } + + RegExp expandPredefined() { + //See https://docs.oracle.com/javase/tutorial/essential/regex/pre_char_classes.html + switch (from) { + case 'd': + return new RegExp("[0-9]"); // digit + case 'D': + return new RegExp("[^0-9]"); // non-digit + case 's': + return new RegExp("[ \t\n\r]"); // whitespace + case 'S': + return new RegExp("[^\\s]"); // non-whitespace + case 'w': + return new RegExp("[a-zA-Z_0-9]"); // word + case 'W': + return new RegExp("[^\\w]"); // non-word + default: + throw new IllegalArgumentException( + "invalid character class " + from); + } + } + + + final RegExp matchPredefinedCharacterClass() { + //See https://docs.oracle.com/javase/tutorial/essential/regex/pre_char_classes.html + if (match('\\')) { + if (peek("dDwWsS")) { + return newLeafNode(Kind.REGEXP_PRE_CLASS, null, 0, 0, 0, 0, next(), 0); + } + + if (peek("\\")) { + return makeChar(next()); + } + + // From https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html#bs + // "It is an error to use a backslash prior to any alphabetic character that does not denote an escaped + // construct;" + if (peek("abcefghijklmnopqrtuvxyz") || peek("ABCEFGHIJKLMNOPQRTUVXYZ")) { + throw new IllegalArgumentException("invalid character class \\" + next()); + } + } + + return null; + } + final RegExp parseSimpleExp() throws IllegalArgumentException { if (match('.')) return makeAnyChar(); @@ -1158,7 +1303,13 @@ else if (match('"')) { "interval syntax error at position " + (pos - 1)); } } - } else return makeChar(parseCharExp()); + } else { + RegExp predefined = matchPredefinedCharacterClass(); + if (predefined != null) { + return predefined; + } + return makeChar(parseCharExp()); + } } final int parseCharExp() throws IllegalArgumentException { diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java index 490432693efc..1e3702db84c9 100644 --- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java +++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java @@ -24,11 +24,7 @@ import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.PointValues; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.store.ByteArrayDataInput; -import org.apache.lucene.store.ByteBufferIndexInput; -import org.apache.lucene.store.DataInput; import org.apache.lucene.store.IndexInput; -import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.MathUtil; @@ -36,124 +32,7 @@ * * @lucene.experimental */ -public final class BKDReader extends PointValues implements Accountable { - - private static abstract class BKDInput extends DataInput implements Cloneable { - abstract long getMinLeafBlockFP(); - abstract long ramBytesUsed(); - - abstract int getPosition(); - abstract void setPosition(int pos) throws IOException; - - @Override - public BKDInput clone() { - return (BKDInput)super.clone(); - } - } - - private static class BKDOffHeapInput extends BKDInput implements Cloneable { - - private final IndexInput packedIndex; - private final long minLeafBlockFP; - - BKDOffHeapInput(IndexInput packedIndex) throws IOException { - this.packedIndex = packedIndex; - this.minLeafBlockFP = packedIndex.clone().readVLong(); - } - - private BKDOffHeapInput(IndexInput packedIndex, long minLeadBlockFP) { - this.packedIndex = packedIndex; - this.minLeafBlockFP = minLeadBlockFP; - } - - @Override - public BKDOffHeapInput clone() { - return new BKDOffHeapInput(packedIndex.clone(), minLeafBlockFP); - } - - @Override - long getMinLeafBlockFP() { - return minLeafBlockFP; - } - - @Override - long ramBytesUsed() { - return 0; - } - - @Override - int getPosition() { - return (int)packedIndex.getFilePointer(); - } - - @Override - void setPosition(int pos) throws IOException { - packedIndex.seek(pos); - } - - @Override - public byte readByte() throws IOException { - return packedIndex.readByte(); - } - - @Override - public void readBytes(byte[] b, int offset, int len) throws IOException { - packedIndex.readBytes(b, offset, len); - } - } - - private static class BKDOnHeapInput extends BKDInput implements Cloneable { - - private final ByteArrayDataInput packedIndex; - private final long minLeafBlockFP; - - BKDOnHeapInput(IndexInput packedIndex, int numBytes) throws IOException { - byte[] packedBytes = new byte[numBytes]; - packedIndex.readBytes(packedBytes, 0, numBytes); - this.packedIndex = new ByteArrayDataInput(packedBytes); - this.minLeafBlockFP = this.packedIndex.clone().readVLong(); - } - - private BKDOnHeapInput(ByteArrayDataInput packedIndex, long minLeadBlockFP) { - this.packedIndex = packedIndex; - this.minLeafBlockFP = minLeadBlockFP; - } - - @Override - public BKDOnHeapInput clone() { - return new BKDOnHeapInput((ByteArrayDataInput)packedIndex.clone(), minLeafBlockFP); - } - - @Override - long getMinLeafBlockFP() { - return minLeafBlockFP; - } - - @Override - long ramBytesUsed() { - return packedIndex.length(); - } - - @Override - int getPosition() { - return packedIndex.getPosition(); - } - - @Override - void setPosition(int pos) { - packedIndex.setPosition(pos); - } - - @Override - public byte readByte() throws IOException { - return packedIndex.readByte(); - } - - @Override - public void readBytes(byte[] b, int offset, int len) throws IOException { - packedIndex.readBytes(b, offset, len); - } - } +public final class BKDReader extends PointValues { // Packed array of byte[] holding all split values in the full binary tree: final int leafNodeOffset; @@ -170,67 +49,64 @@ public void readBytes(byte[] b, int offset, int len) throws IOException { final int version; protected final int packedBytesLength; protected final int packedIndexBytesLength; + final long minLeafBlockFP; - final BKDInput packedIndex; - - /** Caller must pre-seek the provided {@link IndexInput} to the index location that {@link BKDWriter#finish} returned */ - public BKDReader(IndexInput in) throws IOException { - this(in, in instanceof ByteBufferIndexInput); - } + final IndexInput packedIndex; - /** - * Caller must pre-seek the provided {@link IndexInput} to the index location that {@link BKDWriter#finish} returned - * and specify {@code true} to store BKD off-heap ({@code false} otherwise) - */ - public BKDReader(IndexInput in, boolean offHeap) throws IOException { - version = CodecUtil.checkHeader(in, BKDWriter.CODEC_NAME, BKDWriter.VERSION_START, BKDWriter.VERSION_CURRENT); - numDataDims = in.readVInt(); + /** Caller must pre-seek the provided {@link IndexInput} to the index location that {@link BKDWriter#finish} returned. + * BKD tree is always stored off-heap. */ + public BKDReader(IndexInput metaIn, IndexInput indexIn, IndexInput dataIn) throws IOException { + version = CodecUtil.checkHeader(metaIn, BKDWriter.CODEC_NAME, BKDWriter.VERSION_START, BKDWriter.VERSION_CURRENT); + numDataDims = metaIn.readVInt(); if (version >= BKDWriter.VERSION_SELECTIVE_INDEXING) { - numIndexDims = in.readVInt(); + numIndexDims = metaIn.readVInt(); } else { numIndexDims = numDataDims; } - maxPointsInLeafNode = in.readVInt(); - bytesPerDim = in.readVInt(); + maxPointsInLeafNode = metaIn.readVInt(); + bytesPerDim = metaIn.readVInt(); packedBytesLength = numDataDims * bytesPerDim; packedIndexBytesLength = numIndexDims * bytesPerDim; // Read index: - numLeaves = in.readVInt(); + numLeaves = metaIn.readVInt(); assert numLeaves > 0; leafNodeOffset = numLeaves; minPackedValue = new byte[packedIndexBytesLength]; maxPackedValue = new byte[packedIndexBytesLength]; - in.readBytes(minPackedValue, 0, packedIndexBytesLength); - in.readBytes(maxPackedValue, 0, packedIndexBytesLength); + metaIn.readBytes(minPackedValue, 0, packedIndexBytesLength); + metaIn.readBytes(maxPackedValue, 0, packedIndexBytesLength); for(int dim=0;dim 0) { - throw new CorruptIndexException("minPackedValue " + new BytesRef(minPackedValue) + " is > maxPackedValue " + new BytesRef(maxPackedValue) + " for dim=" + dim, in); + throw new CorruptIndexException("minPackedValue " + new BytesRef(minPackedValue) + " is > maxPackedValue " + new BytesRef(maxPackedValue) + " for dim=" + dim, metaIn); } } - pointCount = in.readVLong(); - docCount = in.readVInt(); - - int numBytes = in.readVInt(); - IndexInput slice = in.slice("packedIndex", in.getFilePointer(), numBytes); - if (offHeap) { - packedIndex = new BKDOffHeapInput(slice); + pointCount = metaIn.readVLong(); + docCount = metaIn.readVInt(); + + int numIndexBytes = metaIn.readVInt(); + long indexStartPointer; + if (version >= BKDWriter.VERSION_META_FILE) { + minLeafBlockFP = metaIn.readLong(); + indexStartPointer = metaIn.readLong(); } else { - packedIndex = new BKDOnHeapInput(slice, numBytes); + indexStartPointer = indexIn.getFilePointer(); + minLeafBlockFP = indexIn.readVLong(); + indexIn.seek(indexStartPointer); } - - this.in = in; + this.packedIndex = indexIn.slice("packedIndex", indexStartPointer, numIndexBytes); + this.in = dataIn; } long getMinLeafBlockFP() { - return packedIndex.getMinLeafBlockFP(); + return minLeafBlockFP; } - /** Used to walk the in-heap index. The format takes advantage of the limited + /** Used to walk the off-heap index. The format takes advantage of the limited * access pattern to the BKD tree at search time, i.e. starting at the root * node and recursing downwards one child at a time. * @lucene.internal */ @@ -240,13 +116,11 @@ public class IndexTree implements Cloneable { private int level; private int splitDim; private final byte[][] splitPackedValueStack; - // used to read the packed byte[] - private final BKDInput in; + // used to read the packed tree off-heap + private final IndexInput in; // holds the minimum (left most) leaf block file pointer for each level we've recursed to: private final long[] leafBlockFPStack; - // holds the address, in the packed byte[] index, of the left-node of each level: - private final int[] leftNodePositions; - // holds the address, in the packed byte[] index, of the right-node of each level: + // holds the address, in the off-heap index, of the right-node of each level: private final int[] rightNodePositions; // holds the splitDim for each level: private final int[] splitDims; @@ -260,52 +134,41 @@ public class IndexTree implements Cloneable { private final BytesRef scratch; IndexTree() { + this(packedIndex.clone(), 1, 1); + // read root node + readNodeData(false); + } + + private IndexTree(IndexInput in, int nodeID, int level) { int treeDepth = getTreeDepth(); splitPackedValueStack = new byte[treeDepth+1][]; - nodeID = 1; - level = 1; + this.nodeID = nodeID; + this.level = level; splitPackedValueStack[level] = new byte[packedIndexBytesLength]; leafBlockFPStack = new long[treeDepth+1]; - leftNodePositions = new int[treeDepth+1]; rightNodePositions = new int[treeDepth+1]; splitValuesStack = new byte[treeDepth+1][]; splitDims = new int[treeDepth+1]; negativeDeltas = new boolean[numIndexDims*(treeDepth+1)]; - - in = packedIndex.clone(); + this.in = in; splitValuesStack[0] = new byte[packedIndexBytesLength]; - readNodeData(false); scratch = new BytesRef(); scratch.length = bytesPerDim; } public void pushLeft() { - int nodePosition = leftNodePositions[level]; nodeID *= 2; level++; - if (splitPackedValueStack[level] == null) { - splitPackedValueStack[level] = new byte[packedIndexBytesLength]; - } - System.arraycopy(negativeDeltas, (level-1)*numIndexDims, negativeDeltas, level*numIndexDims, numIndexDims); - assert splitDim != -1; - negativeDeltas[level*numIndexDims+splitDim] = true; - try { - in.setPosition(nodePosition); - } catch (IOException e) { - throw new UncheckedIOException(e); - } readNodeData(true); } /** Clone, but you are not allowed to pop up past the point where the clone happened. */ @Override public IndexTree clone() { - IndexTree index = new IndexTree(); - index.nodeID = nodeID; - index.level = level; + IndexTree index = new IndexTree(in.clone(), nodeID, level); + // copy node data index.splitDim = splitDim; index.leafBlockFPStack[level] = leafBlockFPStack[level]; - index.leftNodePositions[level] = leftNodePositions[level]; index.rightNodePositions[level] = rightNodePositions[level]; index.splitValuesStack[index.level] = splitValuesStack[index.level].clone(); System.arraycopy(negativeDeltas, level*numIndexDims, index.negativeDeltas, level*numIndexDims, numIndexDims); @@ -314,17 +177,12 @@ public IndexTree clone() { } public void pushRight() { - int nodePosition = rightNodePositions[level]; + final int nodePosition = rightNodePositions[level]; + assert nodePosition >= in.getFilePointer() : "nodePosition = " + nodePosition + " < currentPosition=" + in.getFilePointer(); nodeID = nodeID * 2 + 1; level++; - if (splitPackedValueStack[level] == null) { - splitPackedValueStack[level] = new byte[packedIndexBytesLength]; - } - System.arraycopy(negativeDeltas, (level-1)*numIndexDims, negativeDeltas, level*numIndexDims, numIndexDims); - assert splitDim != -1; - negativeDeltas[level*numIndexDims+splitDim] = false; try { - in.setPosition(nodePosition); + in.seek(nodePosition); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -412,6 +270,13 @@ private int getNumLeavesSlow(int node) { } private void readNodeData(boolean isLeft) { + if (splitPackedValueStack[level] == null) { + splitPackedValueStack[level] = new byte[packedIndexBytesLength]; + } + System.arraycopy(negativeDeltas, (level-1)*numIndexDims, negativeDeltas, level*numIndexDims, numIndexDims); + assert splitDim != -1; + negativeDeltas[level*numIndexDims+splitDim] = isLeft; + try { leafBlockFPStack[level] = leafBlockFPStack[level - 1]; @@ -454,9 +319,7 @@ private void readNodeData(boolean isLeft) { } else { leftNumBytes = 0; } - - leftNodePositions[level] = in.getPosition(); - rightNodePositions[level] = leftNodePositions[level] + leftNumBytes; + rightNodePositions[level] = Math.toIntExact(in.getFilePointer()) + leftNumBytes; } } catch (IOException e) { throw new UncheckedIOException(e); @@ -880,11 +743,6 @@ private long estimatePointCount(IntersectState state, byte[] cellMinPacked, byte } } - @Override - public long ramBytesUsed() { - return packedIndex.ramBytesUsed(); - } - @Override public byte[] getMinPackedValue() { return minPackedValue.clone(); diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java index 727b824f7214..de71941ec52e 100644 --- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java +++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java @@ -18,6 +18,7 @@ import java.io.Closeable; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -57,9 +58,10 @@ * Recursively builds a block KD-tree to assign all incoming points in N-dim space to smaller * and smaller N-dim rectangles (cells) until the number of points in a given * rectangle is <= maxPointsInLeafNode. The tree is - * fully balanced, which means the leaf nodes will have between 50% and 100% of - * the requested maxPointsInLeafNode. Values that fall exactly - * on a cell boundary may be in either cell. + * partially balanced, which means the leaf nodes will have + * the requested maxPointsInLeafNode except one that might have less. + * Leaf nodes may straddle the two bottom levels of the binary tree. + * Values that fall exactly on a cell boundary may be in either cell. * *

The number of dimensions can be 1 to 8, but every byte[] value is fixed length. * @@ -68,7 +70,7 @@ * {@code maxMBSortInHeap} heap space for writing. * *

- * NOTE: This can write at most Integer.MAX_VALUE * maxPointsInLeafNode / (1+bytesPerDim) + * NOTE: This can write at most Integer.MAX_VALUE * maxPointsInLeafNode / bytesPerDim * total points. * * @lucene.experimental */ @@ -81,13 +83,14 @@ public class BKDWriter implements Closeable { public static final int VERSION_LEAF_STORES_BOUNDS = 5; public static final int VERSION_SELECTIVE_INDEXING = 6; public static final int VERSION_LOW_CARDINALITY_LEAVES = 7; - public static final int VERSION_CURRENT = VERSION_LOW_CARDINALITY_LEAVES; + public static final int VERSION_META_FILE = 9; + public static final int VERSION_CURRENT = VERSION_META_FILE; /** How many bytes each docs takes in the fixed-width offline format */ private final int bytesPerDoc; /** Default maximum number of point in each leaf block */ - public static final int DEFAULT_MAX_POINTS_IN_LEAF_NODE = 1024; + public static final int DEFAULT_MAX_POINTS_IN_LEAF_NODE = 512; /** Default maximum heap to use, before spilling to (slower) disk */ public static final float DEFAULT_MAX_MB_SORT_IN_HEAP = 16.0f; @@ -252,11 +255,6 @@ public void add(byte[] packedValue, int docID) throws IOException { docsSeen.set(docID); } - /** How many points have been added so far */ - public long getPointCount() { - return pointCount; - } - private static class MergeReader { final BKDReader bkd; final BKDReader.IntersectState state; @@ -371,16 +369,32 @@ public boolean lessThan(MergeReader a, MergeReader b) { } } + /** flat representation of a kd-tree */ + private interface BKDTreeLeafNodes { + /** number of leaf nodes */ + int numLeaves(); + /** pointer to the leaf node previously written. Leaves are order from + * left to right, so leaf at {@code index} 0 is the leftmost leaf and + * the the leaf at {@code numleaves()} -1 is the rightmost leaf */ + long getLeafLP(int index); + /** split value between two leaves. The split value at position n corresponds to the + * leaves at (n -1) and n. */ + BytesRef getSplitValue(int index); + /** split dimension between two leaves. The split dimension at position n corresponds to the + * leaves at (n -1) and n.*/ + int getSplitDimension(int index); + } + /** Write a field from a {@link MutablePointValues}. This way of writing * points is faster than regular writes with {@link BKDWriter#add} since * there is opportunity for reordering points before writing them to * disk. This method does not use transient disk in order to reorder points. */ - public long writeField(IndexOutput out, String fieldName, MutablePointValues reader) throws IOException { + public Runnable writeField(IndexOutput metaOut, IndexOutput indexOut, IndexOutput dataOut, String fieldName, MutablePointValues reader) throws IOException { if (numDataDims == 1) { - return writeField1Dim(out, fieldName, reader); + return writeField1Dim(metaOut, indexOut, dataOut, fieldName, reader); } else { - return writeFieldNDims(out, fieldName, reader); + return writeFieldNDims(metaOut, indexOut, dataOut, fieldName, reader); } } @@ -407,7 +421,7 @@ private void computePackedValueBounds(MutablePointValues values, int from, int t /* In the 2+D case, we recursively pick the split dimension, compute the * median value and partition other values around it. */ - private long writeFieldNDims(IndexOutput out, String fieldName, MutablePointValues values) throws IOException { + private Runnable writeFieldNDims(IndexOutput metaOut, IndexOutput indexOut, IndexOutput dataOut, String fieldName, MutablePointValues values) throws IOException { if (pointCount != 0) { throw new IllegalStateException("cannot mix add and writeField"); } @@ -420,19 +434,15 @@ private long writeFieldNDims(IndexOutput out, String fieldName, MutablePointValu // Mark that we already finished: finished = true; - long countPerLeaf = pointCount = values.size(); - long innerNodeCount = 1; - - while (countPerLeaf > maxPointsInLeafNode) { - countPerLeaf = (countPerLeaf+1)/2; - innerNodeCount *= 2; - } + pointCount = values.size(); - int numLeaves = Math.toIntExact(innerNodeCount); + final int numLeaves = Math.toIntExact((pointCount + maxPointsInLeafNode - 1) / maxPointsInLeafNode); + final int numSplits = numLeaves - 1; checkMaxLeafNodeCount(numLeaves); - final byte[] splitPackedValues = new byte[numLeaves * (bytesPerDim + 1)]; + final byte[] splitPackedValues = new byte[numSplits * bytesPerDim]; + final byte[] splitDimensionValues = new byte[numSplits]; final long[] leafBlockFPs = new long[numLeaves]; // compute the min/max for this slice @@ -441,24 +451,55 @@ private long writeFieldNDims(IndexOutput out, String fieldName, MutablePointValu docsSeen.set(values.getDocID(i)); } + final long dataStartFP = dataOut.getFilePointer(); final int[] parentSplits = new int[numIndexDims]; - build(1, numLeaves, values, 0, Math.toIntExact(pointCount), out, + build(0, numLeaves, values, 0, Math.toIntExact(pointCount), dataOut, minPackedValue.clone(), maxPackedValue.clone(), parentSplits, - splitPackedValues, leafBlockFPs, + splitPackedValues, splitDimensionValues, leafBlockFPs, new int[maxPointsInLeafNode]); assert Arrays.equals(parentSplits, new int[numIndexDims]); - long indexFP = out.getFilePointer(); - writeIndex(out, Math.toIntExact(countPerLeaf), leafBlockFPs, splitPackedValues); - return indexFP; + scratchBytesRef1.length = bytesPerDim; + scratchBytesRef1.bytes = splitPackedValues; + + BKDTreeLeafNodes leafNodes = new BKDTreeLeafNodes() { + @Override + public long getLeafLP(int index) { + return leafBlockFPs[index]; + } + + @Override + public BytesRef getSplitValue(int index) { + scratchBytesRef1.offset = index * bytesPerDim; + return scratchBytesRef1; + } + + @Override + public int getSplitDimension(int index) { + return splitDimensionValues[index] & 0xff; + } + + @Override + public int numLeaves() { + return leafBlockFPs.length; + } + }; + + return () -> { + try { + writeIndex(metaOut, indexOut, maxPointsInLeafNode, leafNodes, dataStartFP); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }; } /* In the 1D case, we can simply sort points in ascending order and use the * same writing logic as we use at merge time. */ - private long writeField1Dim(IndexOutput out, String fieldName, MutablePointValues reader) throws IOException { + private Runnable writeField1Dim(IndexOutput metaOut, IndexOutput indexOut, IndexOutput dataOut, String fieldName, MutablePointValues reader) throws IOException { MutablePointsReaderUtils.sort(maxDoc, packedIndexBytesLength, reader, 0, Math.toIntExact(reader.size())); - final OneDimensionBKDWriter oneDimWriter = new OneDimensionBKDWriter(out); + final OneDimensionBKDWriter oneDimWriter = new OneDimensionBKDWriter(metaOut, indexOut, dataOut); reader.intersect(new IntersectVisitor() { @@ -484,7 +525,7 @@ public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { /** More efficient bulk-add for incoming {@link BKDReader}s. This does a merge sort of the already * sorted values and currently only works when numDims==1. This returns -1 if all documents containing * dimensional values were deleted. */ - public long merge(IndexOutput out, List docMaps, List readers) throws IOException { + public Runnable merge(IndexOutput metaOut, IndexOutput indexOut, IndexOutput dataOut, List docMaps, List readers) throws IOException { assert docMaps == null || readers.size() == docMaps.size(); BKDMergeQueue queue = new BKDMergeQueue(bytesPerDim, readers.size()); @@ -503,7 +544,7 @@ public long merge(IndexOutput out, List docMaps, List docMaps, List leafBlockFPs = new ArrayList<>(); final List leafBlockStartValues = new ArrayList<>(); final byte[] leafValues = new byte[maxPointsInLeafNode * packedBytesLength]; @@ -536,7 +578,7 @@ private class OneDimensionBKDWriter { private int leafCount; private int leafCardinality; - OneDimensionBKDWriter(IndexOutput out) { + OneDimensionBKDWriter(IndexOutput metaOut, IndexOutput indexOut, IndexOutput dataOut) { if (numIndexDims != 1) { throw new UnsupportedOperationException("numIndexDims must be 1 but got " + numIndexDims); } @@ -552,7 +594,10 @@ private class OneDimensionBKDWriter { // Mark that we already finished: finished = true; - this.out = out; + this.metaOut = metaOut; + this.indexOut = indexOut; + this.dataOut = dataOut; + this.dataStartFP = dataOut.getFilePointer(); lastPackedValue = new byte[packedBytesLength]; } @@ -588,7 +633,7 @@ assert valueInOrder(valueCount + leafCount, assert (lastDocID = docID) >= 0; // only assign when asserts are enabled } - public long finish() throws IOException { + public Runnable finish() throws IOException { if (leafCount > 0) { writeLeafBlock(leafCardinality); leafCardinality = 0; @@ -596,25 +641,43 @@ public long finish() throws IOException { } if (valueCount == 0) { - return -1; + return null; } pointCount = valueCount; - long indexFP = out.getFilePointer(); + scratchBytesRef1.length = bytesPerDim; + scratchBytesRef1.offset = 0; + assert leafBlockStartValues.size() + 1 == leafBlockFPs.size(); + BKDTreeLeafNodes leafNodes = new BKDTreeLeafNodes() { + @Override + public long getLeafLP(int index) { + return leafBlockFPs.get(index); + } - int numInnerNodes = leafBlockStartValues.size(); + @Override + public BytesRef getSplitValue(int index) { + scratchBytesRef1.bytes = leafBlockStartValues.get(index); + return scratchBytesRef1; + } - //System.out.println("BKDW: now rotate numInnerNodes=" + numInnerNodes + " leafBlockStarts=" + leafBlockStartValues.size()); + @Override + public int getSplitDimension(int index) { + return 0; + } - byte[] index = new byte[(1+numInnerNodes) * (1+bytesPerDim)]; - rotateToTree(1, 0, numInnerNodes, index, leafBlockStartValues); - long[] arr = new long[leafBlockFPs.size()]; - for(int i=0;i { + try { + writeIndex(metaOut, indexOut, maxPointsInLeafNode, leafNodes, dataStartFP); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }; } private void writeLeafBlock(int leafCardinality) throws IOException { @@ -630,7 +693,7 @@ private void writeLeafBlock(int leafCardinality) throws IOException { // Save the first (minimum) value in each leaf block except the first, to build the split value index in the end: leafBlockStartValues.add(ArrayUtil.copyOfSubArray(leafValues, 0, packedBytesLength)); } - leafBlockFPs.add(out.getFilePointer()); + leafBlockFPs.add(dataOut.getFilePointer()); checkMaxLeafNodeCount(leafBlockFPs.size()); // Find per-dim common prefix: @@ -660,58 +723,26 @@ assert valuesInOrderAndBounds(leafCount, 0, ArrayUtil.copyOfSubArray(leafValues, ArrayUtil.copyOfSubArray(leafValues, (leafCount - 1) * packedBytesLength, leafCount * packedBytesLength), packedValues, leafDocs, 0); writeLeafBlockPackedValues(scratchOut, commonPrefixLengths, leafCount, 0, packedValues, leafCardinality); - scratchOut.copyTo(out); + scratchOut.copyTo(dataOut); scratchOut.reset(); } } - // TODO: there must be a simpler way? - private void rotateToTree(int nodeID, int offset, int count, byte[] index, List leafBlockStartValues) { - //System.out.println("ROTATE: nodeID=" + nodeID + " offset=" + offset + " count=" + count + " bpd=" + bytesPerDim + " index.length=" + index.length); - if (count == 1) { - // Leaf index node - //System.out.println(" leaf index node"); - //System.out.println(" index[" + nodeID + "] = blockStartValues[" + offset + "]"); - System.arraycopy(leafBlockStartValues.get(offset), 0, index, nodeID*(1+bytesPerDim)+1, bytesPerDim); - } else if (count > 1) { - // Internal index node: binary partition of count - int countAtLevel = 1; - int totalCount = 0; - while (true) { - int countLeft = count - totalCount; - //System.out.println(" cycle countLeft=" + countLeft + " coutAtLevel=" + countAtLevel); - if (countLeft <= countAtLevel) { - // This is the last level, possibly partially filled: - int lastLeftCount = Math.min(countAtLevel/2, countLeft); - assert lastLeftCount >= 0; - int leftHalf = (totalCount-1)/2 + lastLeftCount; - - int rootOffset = offset + leftHalf; - /* - System.out.println(" last left count " + lastLeftCount); - System.out.println(" leftHalf " + leftHalf + " rightHalf=" + (count-leftHalf-1)); - System.out.println(" rootOffset=" + rootOffset); - */ - - System.arraycopy(leafBlockStartValues.get(rootOffset), 0, index, nodeID*(1+bytesPerDim)+1, bytesPerDim); - //System.out.println(" index[" + nodeID + "] = blockStartValues[" + rootOffset + "]"); - - // TODO: we could optimize/specialize, when we know it's simply fully balanced binary tree - // under here, to save this while loop on each recursion - - // Recurse left - rotateToTree(2*nodeID, offset, leftHalf, index, leafBlockStartValues); - - // Recurse right - rotateToTree(2*nodeID+1, rootOffset+1, count-leftHalf-1, index, leafBlockStartValues); - return; - } - totalCount += countAtLevel; - countAtLevel *= 2; - } - } else { - assert count == 0; - } + private int getNumLeftLeafNodes(int numLeaves) { + assert numLeaves > 1: "getNumLeftLeaveNodes() called with " + numLeaves; + // return the level that can be filled with this number of leaves + int lastFullLevel = 31 - Integer.numberOfLeadingZeros(numLeaves); + // how many leaf nodes are in the full level + int leavesFullLevel = 1 << lastFullLevel; + // half of the leaf nodes from the full level goes to the left + int numLeftLeafNodes = leavesFullLevel / 2; + // leaf nodes that do not fit in the full level + int unbalancedLeafNodes = numLeaves - leavesFullLevel; + // distribute unbalanced leaf nodes + numLeftLeafNodes += Math.min(unbalancedLeafNodes, numLeftLeafNodes); + // we should always place unbalanced leaf nodes on the left + assert numLeftLeafNodes >= numLeaves - numLeftLeafNodes && numLeftLeafNodes <= 2L * (numLeaves - numLeftLeafNodes); + return numLeftLeafNodes; } // TODO: if we fixed each partition step to just record the file offset at the "split point", we could probably handle variable length @@ -736,13 +767,14 @@ private void printPathSlice(String desc, PathSlice slice, int dim) throws IOExce */ private void checkMaxLeafNodeCount(int numLeaves) { - if ((1+bytesPerDim) * (long) numLeaves > ArrayUtil.MAX_ARRAY_LENGTH) { + if (bytesPerDim * (long) numLeaves > ArrayUtil.MAX_ARRAY_LENGTH) { throw new IllegalStateException("too many nodes; increase maxPointsInLeafNode (currently " + maxPointsInLeafNode + ") and reindex"); } } - /** Writes the BKD tree to the provided {@link IndexOutput} and returns the file offset where index was written. */ - public long finish(IndexOutput out) throws IOException { + /** Writes the BKD tree to the provided {@link IndexOutput}s and returns a {@link Runnable} that + * writes the index of the tree if at least one point has been added, or {@code null} otherwise. */ + public Runnable finish(IndexOutput metaOut, IndexOutput indexOut, IndexOutput dataOut) throws IOException { // System.out.println("\nBKDTreeWriter.finish pointCount=" + pointCount + " out=" + out + " heapWriter=" + heapPointWriter); // TODO: specialize the 1D case? it's much faster at indexing time (no partitioning on recurse...) @@ -753,7 +785,7 @@ public long finish(IndexOutput out) throws IOException { } if (pointCount == 0) { - throw new IllegalStateException("must index at least one point"); + return null; } //mark as finished @@ -765,16 +797,8 @@ public long finish(IndexOutput out) throws IOException { tempInput = null; pointWriter = null; - - long countPerLeaf = pointCount; - long innerNodeCount = 1; - - while (countPerLeaf > maxPointsInLeafNode) { - countPerLeaf = (countPerLeaf+1)/2; - innerNodeCount *= 2; - } - - int numLeaves = (int) innerNodeCount; + final int numLeaves = Math.toIntExact((pointCount + maxPointsInLeafNode - 1) / maxPointsInLeafNode); + final int numSplits = numLeaves - 1; checkMaxLeafNodeCount(numLeaves); @@ -782,7 +806,8 @@ public long finish(IndexOutput out) throws IOException { // step of the recursion to recompute the split dim: // Indexed by nodeID, but first (root) nodeID is 1. We do 1+ because the lead byte at each recursion says which dim we split on. - byte[] splitPackedValues = new byte[Math.toIntExact(numLeaves*(1+bytesPerDim))]; + byte[] splitPackedValues = new byte[Math.toIntExact(numSplits*bytesPerDim)]; + byte[] splitDimensionValues = new byte[numSplits]; // +1 because leaf count is power of 2 (e.g. 8), and innerNodeCount is power of 2 minus 1 (e.g. 7) long[] leafBlockFPs = new long[numLeaves]; @@ -793,15 +818,17 @@ public long finish(IndexOutput out) throws IOException { //We re-use the selector so we do not need to create an object every time. BKDRadixSelector radixSelector = new BKDRadixSelector(numDataDims, numIndexDims, bytesPerDim, maxPointsSortInHeap, tempDir, tempFileNamePrefix); + final long dataStartFP = dataOut.getFilePointer(); boolean success = false; try { final int[] parentSplits = new int[numIndexDims]; - build(1, numLeaves, points, - out, radixSelector, + build(0, numLeaves, points, + dataOut, radixSelector, minPackedValue.clone(), maxPackedValue.clone(), parentSplits, splitPackedValues, + splitDimensionValues, leafBlockFPs, new int[maxPointsInLeafNode]); assert Arrays.equals(parentSplits, new int[numIndexDims]); @@ -818,43 +845,43 @@ public long finish(IndexOutput out) throws IOException { } } - //System.out.println("Total nodes: " + innerNodeCount); - - // Write index: - long indexFP = out.getFilePointer(); - writeIndex(out, Math.toIntExact(countPerLeaf), leafBlockFPs, splitPackedValues); - return indexFP; - } + scratchBytesRef1.bytes = splitPackedValues; + scratchBytesRef1.length = bytesPerDim; + BKDTreeLeafNodes leafNodes = new BKDTreeLeafNodes() { + @Override + public long getLeafLP(int index) { + return leafBlockFPs[index]; + } - /** Packs the two arrays, representing a balanced binary tree, into a compact byte[] structure. */ - private byte[] packIndex(long[] leafBlockFPs, byte[] splitPackedValues) throws IOException { + @Override + public BytesRef getSplitValue(int index) { + scratchBytesRef1.offset = index * bytesPerDim; + return scratchBytesRef1; + } - int numLeaves = leafBlockFPs.length; + @Override + public int getSplitDimension(int index) { + return splitDimensionValues[index] & 0xff; + } - // Possibly rotate the leaf block FPs, if the index not fully balanced binary tree (only happens - // if it was created by OneDimensionBKDWriter). In this case the leaf nodes may straddle the two bottom - // levels of the binary tree: - if (numIndexDims == 1 && numLeaves > 1) { - int levelCount = 2; - while (true) { - if (numLeaves >= levelCount && numLeaves <= 2*levelCount) { - int lastLevel = 2*(numLeaves - levelCount); - assert lastLevel >= 0; - if (lastLevel != 0) { - // Last level is partially filled, so we must rotate the leaf FPs to match. We do this here, after loading - // at read-time, so that we can still delta code them on disk at write: - long[] newLeafBlockFPs = new long[numLeaves]; - System.arraycopy(leafBlockFPs, lastLevel, newLeafBlockFPs, 0, leafBlockFPs.length - lastLevel); - System.arraycopy(leafBlockFPs, 0, newLeafBlockFPs, leafBlockFPs.length - lastLevel, lastLevel); - leafBlockFPs = newLeafBlockFPs; - } - break; - } + @Override + public int numLeaves() { + return leafBlockFPs.length; + } + }; - levelCount *= 2; + return () -> { + // Write index: + try { + writeIndex(metaOut, indexOut, maxPointsInLeafNode, leafNodes, dataStartFP); + } catch (IOException e) { + throw new UncheckedIOException(e); } - } + }; + } + /** Packs the two arrays, representing a semi-balanced binary tree, into a compact byte[] structure. */ + private byte[] packIndex(BKDTreeLeafNodes leafNodes) throws IOException { /** Reused while packing the index */ ByteBuffersDataOutput writeBuffer = ByteBuffersDataOutput.newResettableInstance(); @@ -862,7 +889,8 @@ private byte[] packIndex(long[] leafBlockFPs, byte[] splitPackedValues) throws I List blocks = new ArrayList<>(); byte[] lastSplitValues = new byte[bytesPerDim * numIndexDims]; //System.out.println("\npack index"); - int totalSize = recursePackIndex(writeBuffer, leafBlockFPs, splitPackedValues, 0l, blocks, 1, lastSplitValues, new boolean[numIndexDims], false); + int totalSize = recursePackIndex(writeBuffer, leafNodes, 0l, blocks, lastSplitValues, new boolean[numIndexDims], false, + 0, leafNodes.numLeaves()); // Compact the byte[] blocks into single byte index: byte[] index = new byte[totalSize]; @@ -887,45 +915,43 @@ private int appendBlock(ByteBuffersDataOutput writeBuffer, List blocks) /** * lastSplitValues is per-dimension split value previously seen; we use this to prefix-code the split byte[] on each inner node */ - private int recursePackIndex(ByteBuffersDataOutput writeBuffer, long[] leafBlockFPs, byte[] splitPackedValues, long minBlockFP, List blocks, - int nodeID, byte[] lastSplitValues, boolean[] negativeDeltas, boolean isLeft) throws IOException { - if (nodeID >= leafBlockFPs.length) { - int leafID = nodeID - leafBlockFPs.length; - //System.out.println("recursePack leaf nodeID=" + nodeID); - - // In the unbalanced case it's possible the left most node only has one child: - if (leafID < leafBlockFPs.length) { - long delta = leafBlockFPs[leafID] - minBlockFP; - if (isLeft) { - assert delta == 0; - return 0; - } else { - assert nodeID == 1 || delta > 0: "nodeID=" + nodeID; - writeBuffer.writeVLong(delta); - return appendBlock(writeBuffer, blocks); - } - } else { + private int recursePackIndex(ByteBuffersDataOutput writeBuffer, BKDTreeLeafNodes leafNodes, long minBlockFP, List blocks, + byte[] lastSplitValues, boolean[] negativeDeltas, boolean isLeft, int leavesOffset, int numLeaves) throws IOException { + if (numLeaves == 1) { + if (isLeft) { + assert leafNodes.getLeafLP(leavesOffset) - minBlockFP == 0; return 0; + } else { + long delta = leafNodes.getLeafLP(leavesOffset) - minBlockFP; + assert leafNodes.numLeaves() == numLeaves || delta > 0 : "expected delta > 0; got numLeaves =" + numLeaves + " and delta=" + delta; + writeBuffer.writeVLong(delta); + return appendBlock(writeBuffer, blocks); } } else { long leftBlockFP; - if (isLeft == false) { - leftBlockFP = getLeftMostLeafBlockFP(leafBlockFPs, nodeID); - long delta = leftBlockFP - minBlockFP; - assert nodeID == 1 || delta > 0 : "expected nodeID=1 or delta > 0; got nodeID=" + nodeID + " and delta=" + delta; - writeBuffer.writeVLong(delta); - } else { + if (isLeft) { // The left tree's left most leaf block FP is always the minimal FP: + assert leafNodes.getLeafLP(leavesOffset) == minBlockFP; leftBlockFP = minBlockFP; + } else { + leftBlockFP = leafNodes.getLeafLP(leavesOffset); + long delta = leftBlockFP - minBlockFP; + assert leafNodes.numLeaves() == numLeaves || delta > 0 : "expected delta > 0; got numLeaves =" + numLeaves + " and delta=" + delta; + writeBuffer.writeVLong(delta); } - int address = nodeID * (1+bytesPerDim); - int splitDim = splitPackedValues[address++] & 0xff; + int numLeftLeafNodes = getNumLeftLeafNodes(numLeaves); + final int rightOffset = leavesOffset + numLeftLeafNodes; + final int splitOffset = rightOffset - 1; + + int splitDim = leafNodes.getSplitDimension(splitOffset); + BytesRef splitValue = leafNodes.getSplitValue(splitOffset); + int address = splitValue.offset; //System.out.println("recursePack inner nodeID=" + nodeID + " splitDim=" + splitDim + " splitValue=" + new BytesRef(splitPackedValues, address, bytesPerDim)); // find common prefix with last split value in this dim: - int prefix = Arrays.mismatch(splitPackedValues, address, address + bytesPerDim, lastSplitValues, + int prefix = Arrays.mismatch(splitValue.bytes, address, address + bytesPerDim, lastSplitValues, splitDim * bytesPerDim, splitDim * bytesPerDim + bytesPerDim); if (prefix == -1) { prefix = bytesPerDim; @@ -936,7 +962,7 @@ private int recursePackIndex(ByteBuffersDataOutput writeBuffer, long[] leafBlock int firstDiffByteDelta; if (prefix < bytesPerDim) { //System.out.println(" delta byte cur=" + Integer.toHexString(splitPackedValues[address+prefix]&0xFF) + " prev=" + Integer.toHexString(lastSplitValues[splitDim * bytesPerDim + prefix]&0xFF) + " negated?=" + negativeDeltas[splitDim]); - firstDiffByteDelta = (splitPackedValues[address+prefix]&0xFF) - (lastSplitValues[splitDim * bytesPerDim + prefix]&0xFF); + firstDiffByteDelta = (splitValue.bytes[address+prefix]&0xFF) - (lastSplitValues[splitDim * bytesPerDim + prefix]&0xFF); if (negativeDeltas[splitDim]) { firstDiffByteDelta = -firstDiffByteDelta; } @@ -958,7 +984,7 @@ private int recursePackIndex(ByteBuffersDataOutput writeBuffer, long[] leafBlock int suffix = bytesPerDim - prefix; byte[] savSplitValue = new byte[suffix]; if (suffix > 1) { - writeBuffer.writeBytes(splitPackedValues, address+prefix+1, suffix-1); + writeBuffer.writeBytes(splitValue.bytes, address+prefix+1, suffix-1); } byte[] cmp = lastSplitValues.clone(); @@ -966,7 +992,7 @@ private int recursePackIndex(ByteBuffersDataOutput writeBuffer, long[] leafBlock System.arraycopy(lastSplitValues, splitDim * bytesPerDim + prefix, savSplitValue, 0, suffix); // copy our split value into lastSplitValues for our children to prefix-code against - System.arraycopy(splitPackedValues, address+prefix, lastSplitValues, splitDim * bytesPerDim + prefix, suffix); + System.arraycopy(splitValue.bytes, address+prefix, lastSplitValues, splitDim * bytesPerDim + prefix, suffix); int numBytes = appendBlock(writeBuffer, blocks); @@ -978,9 +1004,11 @@ private int recursePackIndex(ByteBuffersDataOutput writeBuffer, long[] leafBlock boolean savNegativeDelta = negativeDeltas[splitDim]; negativeDeltas[splitDim] = true; - int leftNumBytes = recursePackIndex(writeBuffer, leafBlockFPs, splitPackedValues, leftBlockFP, blocks, 2*nodeID, lastSplitValues, negativeDeltas, true); - if (nodeID * 2 < leafBlockFPs.length) { + int leftNumBytes = recursePackIndex(writeBuffer, leafNodes, leftBlockFP, blocks, lastSplitValues, negativeDeltas, true, + leavesOffset, numLeftLeafNodes); + + if (numLeftLeafNodes != 1) { writeBuffer.writeVInt(leftNumBytes); } else { assert leftNumBytes == 0: "leftNumBytes=" + leftNumBytes; @@ -992,7 +1020,8 @@ private int recursePackIndex(ByteBuffersDataOutput writeBuffer, long[] leafBlock blocks.set(idxSav, bytes2); negativeDeltas[splitDim] = false; - int rightNumBytes = recursePackIndex(writeBuffer, leafBlockFPs, splitPackedValues, leftBlockFP, blocks, 2*nodeID+1, lastSplitValues, negativeDeltas, false); + int rightNumBytes = recursePackIndex(writeBuffer, leafNodes, leftBlockFP, blocks, lastSplitValues, negativeDeltas, false, + rightOffset, numLeaves - numLeftLeafNodes); negativeDeltas[splitDim] = savNegativeDelta; @@ -1005,46 +1034,32 @@ private int recursePackIndex(ByteBuffersDataOutput writeBuffer, long[] leafBlock } } - private long getLeftMostLeafBlockFP(long[] leafBlockFPs, int nodeID) { - // TODO: can we do this cheaper, e.g. a closed form solution instead of while loop? Or - // change the recursion while packing the index to return this left-most leaf block FP - // from each recursion instead? - // - // Still, the overall cost here is minor: this method's cost is O(log(N)), and while writing - // we call it O(N) times (N = number of leaf blocks) - while (nodeID < leafBlockFPs.length) { - nodeID *= 2; - } - int leafID = nodeID - leafBlockFPs.length; - long result = leafBlockFPs[leafID]; - if (result < 0) { - throw new AssertionError(result + " for leaf " + leafID); - } - return result; - } - - private void writeIndex(IndexOutput out, int countPerLeaf, long[] leafBlockFPs, byte[] splitPackedValues) throws IOException { - byte[] packedIndex = packIndex(leafBlockFPs, splitPackedValues); - writeIndex(out, countPerLeaf, leafBlockFPs.length, packedIndex); + private void writeIndex(IndexOutput metaOut, IndexOutput indexOut, int countPerLeaf, BKDTreeLeafNodes leafNodes, long dataStartFP) throws IOException { + byte[] packedIndex = packIndex(leafNodes); + writeIndex(metaOut, indexOut, countPerLeaf, leafNodes.numLeaves(), packedIndex, dataStartFP); } - private void writeIndex(IndexOutput out, int countPerLeaf, int numLeaves, byte[] packedIndex) throws IOException { - - CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); - out.writeVInt(numDataDims); - out.writeVInt(numIndexDims); - out.writeVInt(countPerLeaf); - out.writeVInt(bytesPerDim); + private void writeIndex(IndexOutput metaOut, IndexOutput indexOut, int countPerLeaf, int numLeaves, byte[] packedIndex, long dataStartFP) throws IOException { + CodecUtil.writeHeader(metaOut, CODEC_NAME, VERSION_CURRENT); + metaOut.writeVInt(numDataDims); + metaOut.writeVInt(numIndexDims); + metaOut.writeVInt(countPerLeaf); + metaOut.writeVInt(bytesPerDim); assert numLeaves > 0; - out.writeVInt(numLeaves); - out.writeBytes(minPackedValue, 0, packedIndexBytesLength); - out.writeBytes(maxPackedValue, 0, packedIndexBytesLength); - - out.writeVLong(pointCount); - out.writeVInt(docsSeen.cardinality()); - out.writeVInt(packedIndex.length); - out.writeBytes(packedIndex, 0, packedIndex.length); + metaOut.writeVInt(numLeaves); + metaOut.writeBytes(minPackedValue, 0, packedIndexBytesLength); + metaOut.writeBytes(maxPackedValue, 0, packedIndexBytesLength); + + metaOut.writeVLong(pointCount); + metaOut.writeVInt(docsSeen.cardinality()); + metaOut.writeVInt(packedIndex.length); + metaOut.writeLong(dataStartFP); + // If metaOut and indexOut are the same file, we account for the fact that + // writing a long makes the index start 8 bytes later. + metaOut.writeLong(indexOut.getFilePointer() + (metaOut == indexOut ? Long.BYTES : 0)); + + indexOut.writeBytes(packedIndex, 0, packedIndex.length); } private void writeLeafBlockDocs(DataOutput out, int[] docIDs, int start, int count) throws IOException { @@ -1319,16 +1334,17 @@ private HeapPointWriter switchToHeap(PointWriter source) throws IOException { /* Recursively reorders the provided reader and writes the bkd-tree on the fly; this method is used * when we are writing a new segment directly from IndexWriter's indexing buffer (MutablePointsReader). */ - private void build(int nodeID, int leafNodeOffset, + private void build(int leavesOffset, int numLeaves, MutablePointValues reader, int from, int to, IndexOutput out, byte[] minPackedValue, byte[] maxPackedValue, int[] parentSplits, byte[] splitPackedValues, + byte[] splitDimensionValues, long[] leafBlockFPs, int[] spareDocIds) throws IOException { - if (nodeID >= leafNodeOffset) { + if (numLeaves == 1) { // leaf node final int count = to - from; assert count <= maxPointsInLeafNode; @@ -1402,7 +1418,7 @@ private void build(int nodeID, int leafNodeOffset, } } // Save the block file pointer: - leafBlockFPs[nodeID - leafNodeOffset] = out.getFilePointer(); + leafBlockFPs[leavesOffset] = out.getFilePointer(); assert scratchOut.size() == 0; @@ -1443,13 +1459,16 @@ assert valuesInOrderAndBounds(count, sortedDim, minPackedValue, maxPackedValue, // for dimensions > 2 we recompute the bounds for the current inner node to help the algorithm choose best // split dimensions. Because it is an expensive operation, the frequency we recompute the bounds is given // by SPLITS_BEFORE_EXACT_BOUNDS. - if (nodeID > 1 && numIndexDims > 2 && Arrays.stream(parentSplits).sum() % SPLITS_BEFORE_EXACT_BOUNDS == 0) { + if (numLeaves != leafBlockFPs.length && numIndexDims > 2 && Arrays.stream(parentSplits).sum() % SPLITS_BEFORE_EXACT_BOUNDS == 0) { computePackedValueBounds(reader, from, to, minPackedValue, maxPackedValue, scratchBytesRef1); } splitDim = split(minPackedValue, maxPackedValue, parentSplits); } - final int mid = (from + to + 1) >>> 1; + // How many leaves will be in the left tree: + int numLeftLeafNodes = getNumLeftLeafNodes(numLeaves); + // How many points will be in the left tree: + final int mid = from + numLeftLeafNodes * maxPointsInLeafNode; int commonPrefixLen = Arrays.mismatch(minPackedValue, splitDim * bytesPerDim, splitDim * bytesPerDim + bytesPerDim, maxPackedValue, splitDim * bytesPerDim, @@ -1461,11 +1480,13 @@ assert valuesInOrderAndBounds(count, sortedDim, minPackedValue, maxPackedValue, MutablePointsReaderUtils.partition(numDataDims, numIndexDims, maxDoc, splitDim, bytesPerDim, commonPrefixLen, reader, from, to, mid, scratchBytesRef1, scratchBytesRef2); + final int rightOffset = leavesOffset + numLeftLeafNodes; + final int splitOffset = rightOffset - 1; // set the split value - final int address = nodeID * (1+bytesPerDim); - splitPackedValues[address] = (byte) splitDim; + final int address = splitOffset * bytesPerDim; + splitDimensionValues[splitOffset] = (byte) splitDim; reader.getValue(mid, scratchBytesRef1); - System.arraycopy(scratchBytesRef1.bytes, scratchBytesRef1.offset + splitDim * bytesPerDim, splitPackedValues, address + 1, bytesPerDim); + System.arraycopy(scratchBytesRef1.bytes, scratchBytesRef1.offset + splitDim * bytesPerDim, splitPackedValues, address, bytesPerDim); byte[] minSplitPackedValue = ArrayUtil.copyOfSubArray(minPackedValue, 0, packedIndexBytesLength); byte[] maxSplitPackedValue = ArrayUtil.copyOfSubArray(maxPackedValue, 0, packedIndexBytesLength); @@ -1476,12 +1497,12 @@ assert valuesInOrderAndBounds(count, sortedDim, minPackedValue, maxPackedValue, // recurse parentSplits[splitDim]++; - build(nodeID * 2, leafNodeOffset, reader, from, mid, out, + build(leavesOffset, numLeftLeafNodes, reader, from, mid, out, minPackedValue, maxSplitPackedValue, parentSplits, - splitPackedValues, leafBlockFPs, spareDocIds); - build(nodeID * 2 + 1, leafNodeOffset, reader, mid, to, out, + splitPackedValues, splitDimensionValues, leafBlockFPs, spareDocIds); + build(rightOffset, numLeaves - numLeftLeafNodes, reader, mid, to, out, minSplitPackedValue, maxPackedValue, parentSplits, - splitPackedValues, leafBlockFPs, spareDocIds); + splitPackedValues, splitDimensionValues, leafBlockFPs, spareDocIds); parentSplits[splitDim]--; } } @@ -1512,17 +1533,18 @@ private void computePackedValueBounds(BKDRadixSelector.PathSlice slice, byte[] m /** The point writer contains the data that is going to be splitted using radix selection. /* This method is used when we are merging previously written segments, in the numDims > 1 case. */ - private void build(int nodeID, int leafNodeOffset, + private void build(int leavesOffset, int numLeaves, BKDRadixSelector.PathSlice points, IndexOutput out, BKDRadixSelector radixSelector, byte[] minPackedValue, byte[] maxPackedValue, int[] parentSplits, byte[] splitPackedValues, + byte[] splitDimensionValues, long[] leafBlockFPs, int[] spareDocIds) throws IOException { - if (nodeID >= leafNodeOffset) { + if (numLeaves == 1) { // Leaf node: write block // We can write the block in any order so by default we write it sorted by the dimension that has the @@ -1573,13 +1595,13 @@ private void build(int nodeID, int leafNodeOffset, int leafCardinality = heapSource.computeCardinality(from ,to, numDataDims, bytesPerDim, commonPrefixLengths); // Save the block file pointer: - leafBlockFPs[nodeID - leafNodeOffset] = out.getFilePointer(); + leafBlockFPs[leavesOffset] = out.getFilePointer(); //System.out.println(" write leaf block @ fp=" + out.getFilePointer()); // Write docIDs first, as their own chunk, so that at intersect time we can add all docIDs w/o // loading the values: int count = to - from; - assert count > 0: "nodeID=" + nodeID + " leafNodeOffset=" + leafNodeOffset; + assert count > 0: "numLeaves=" + numLeaves + " leavesOffset=" + leavesOffset; assert count <= spareDocIds.length : "count=" + count + " > length=" + spareDocIds.length; // Write doc IDs int[] docIDs = spareDocIds; @@ -1622,17 +1644,18 @@ assert valuesInOrderAndBounds(count, sortedDim, minPackedValue, maxPackedValue, // for dimensions > 2 we recompute the bounds for the current inner node to help the algorithm choose best // split dimensions. Because it is an expensive operation, the frequency we recompute the bounds is given // by SPLITS_BEFORE_EXACT_BOUNDS. - if (nodeID > 1 && numIndexDims > 2 && Arrays.stream(parentSplits).sum() % SPLITS_BEFORE_EXACT_BOUNDS == 0) { + if (numLeaves != leafBlockFPs.length && numIndexDims > 2 && Arrays.stream(parentSplits).sum() % SPLITS_BEFORE_EXACT_BOUNDS == 0) { computePackedValueBounds(points, minPackedValue, maxPackedValue); } splitDim = split(minPackedValue, maxPackedValue, parentSplits); } - assert nodeID < splitPackedValues.length : "nodeID=" + nodeID + " splitValues.length=" + splitPackedValues.length; + assert numLeaves <= leafBlockFPs.length : "numLeaves=" + numLeaves + " leafBlockFPs.length=" + leafBlockFPs.length; + // How many leaves will be in the left tree: + final int numLeftLeafNodes = getNumLeftLeafNodes(numLeaves); // How many points will be in the left tree: - long rightCount = points.count / 2; - long leftCount = points.count - rightCount; + final long leftCount = numLeftLeafNodes * maxPointsInLeafNode; BKDRadixSelector.PathSlice[] slices = new BKDRadixSelector.PathSlice[2]; @@ -1645,9 +1668,12 @@ assert valuesInOrderAndBounds(count, sortedDim, minPackedValue, maxPackedValue, byte[] splitValue = radixSelector.select(points, slices, points.start, points.start + points.count, points.start + leftCount, splitDim, commonPrefixLen); - int address = nodeID * (1 + bytesPerDim); - splitPackedValues[address] = (byte) splitDim; - System.arraycopy(splitValue, 0, splitPackedValues, address + 1, bytesPerDim); + final int rightOffset = leavesOffset + numLeftLeafNodes; + final int splitValueOffset = rightOffset - 1; + + splitDimensionValues[splitValueOffset] = (byte) splitDim; + int address = splitValueOffset * bytesPerDim; + System.arraycopy(splitValue, 0, splitPackedValues, address, bytesPerDim); byte[] minSplitPackedValue = new byte[packedIndexBytesLength]; System.arraycopy(minPackedValue, 0, minSplitPackedValue, 0, packedIndexBytesLength); @@ -1660,14 +1686,14 @@ assert valuesInOrderAndBounds(count, sortedDim, minPackedValue, maxPackedValue, parentSplits[splitDim]++; // Recurse on left tree: - build(2 * nodeID, leafNodeOffset, slices[0], + build(leavesOffset, numLeftLeafNodes, slices[0], out, radixSelector, minPackedValue, maxSplitPackedValue, - parentSplits, splitPackedValues, leafBlockFPs, spareDocIds); + parentSplits, splitPackedValues, splitDimensionValues, leafBlockFPs, spareDocIds); // Recurse on right tree: - build(2 * nodeID + 1, leafNodeOffset, slices[1], - out, radixSelector, minSplitPackedValue, maxPackedValue - , parentSplits, splitPackedValues, leafBlockFPs, spareDocIds); + build(rightOffset, numLeaves - numLeftLeafNodes, slices[1], + out, radixSelector, minSplitPackedValue, maxPackedValue, + parentSplits, splitPackedValues, splitDimensionValues, leafBlockFPs, spareDocIds); parentSplits[splitDim]--; } diff --git a/lucene/core/src/java/org/apache/lucene/util/fst/FST.java b/lucene/core/src/java/org/apache/lucene/util/fst/FST.java index 260e9dab1aeb..c3cf67bfbdd5 100644 --- a/lucene/core/src/java/org/apache/lucene/util/fst/FST.java +++ b/lucene/core/src/java/org/apache/lucene/util/fst/FST.java @@ -26,6 +26,7 @@ import java.nio.file.Path; import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; @@ -407,26 +408,26 @@ private static boolean flag(int flags, int bit) { private static final int DEFAULT_MAX_BLOCK_BITS = Constants.JRE_IS_64BIT ? 30 : 28; /** Load a previously saved FST. */ - public FST(DataInput in, Outputs outputs) throws IOException { - this(in, outputs, new OnHeapFSTStore(DEFAULT_MAX_BLOCK_BITS)); + public FST(DataInput metaIn, DataInput in, Outputs outputs) throws IOException { + this(metaIn, in, outputs, new OnHeapFSTStore(DEFAULT_MAX_BLOCK_BITS)); } /** Load a previously saved FST; maxBlockBits allows you to * control the size of the byte[] pages used to hold the FST bytes. */ - public FST(DataInput in, Outputs outputs, FSTStore fstStore) throws IOException { + public FST(DataInput metaIn, DataInput in, Outputs outputs, FSTStore fstStore) throws IOException { bytes = null; this.fstStore = fstStore; this.outputs = outputs; // NOTE: only reads formats VERSION_START up to VERSION_CURRENT; we don't have // back-compat promise for FSTs (they are experimental), but we are sometimes able to offer it - CodecUtil.checkHeader(in, FILE_FORMAT_NAME, VERSION_START, VERSION_CURRENT); - if (in.readByte() == 1) { + CodecUtil.checkHeader(metaIn, FILE_FORMAT_NAME, VERSION_START, VERSION_CURRENT); + if (metaIn.readByte() == 1) { // accepts empty string // 1 KB blocks: BytesStore emptyBytes = new BytesStore(10); - int numBytes = in.readVInt(); - emptyBytes.copyBytes(in, numBytes); + int numBytes = metaIn.readVInt(); + emptyBytes.copyBytes(metaIn, numBytes); // De-serialize empty-string output: BytesReader reader = emptyBytes.getReverseReader(); @@ -440,7 +441,7 @@ public FST(DataInput in, Outputs outputs, FSTStore fstStore) throws IOExcepti } else { emptyOutput = null; } - final byte t = in.readByte(); + final byte t = metaIn.readByte(); switch(t) { case 0: inputType = INPUT_TYPE.BYTE1; @@ -452,11 +453,11 @@ public FST(DataInput in, Outputs outputs, FSTStore fstStore) throws IOExcepti inputType = INPUT_TYPE.BYTE4; break; default: - throw new IllegalStateException("invalid input type " + t); + throw new CorruptIndexException("invalid input type " + t, in); } - startNode = in.readVLong(); + startNode = metaIn.readVLong(); - long numBytes = in.readVLong(); + long numBytes = metaIn.readVLong(); this.fstStore.init(in, numBytes); } @@ -501,16 +502,16 @@ void setEmptyOutput(T v) { } } - public void save(DataOutput out) throws IOException { + public void save(DataOutput metaOut, DataOutput out) throws IOException { if (startNode == -1) { throw new IllegalStateException("call finish first"); } - CodecUtil.writeHeader(out, FILE_FORMAT_NAME, VERSION_CURRENT); + CodecUtil.writeHeader(metaOut, FILE_FORMAT_NAME, VERSION_CURRENT); // TODO: really we should encode this as an arc, arriving // to the root node, instead of special casing here: if (emptyOutput != null) { // Accepts empty string - out.writeByte((byte) 1); + metaOut.writeByte((byte) 1); // Serialize empty-string output: ByteBuffersDataOutput ros = new ByteBuffersDataOutput(); @@ -527,10 +528,10 @@ public void save(DataOutput out) throws IOException { emptyOutputBytes[emptyLen - upto - 1] = b; upto++; } - out.writeVInt(emptyLen); - out.writeBytes(emptyOutputBytes, 0, emptyLen); + metaOut.writeVInt(emptyLen); + metaOut.writeBytes(emptyOutputBytes, 0, emptyLen); } else { - out.writeByte((byte) 0); + metaOut.writeByte((byte) 0); } final byte t; if (inputType == INPUT_TYPE.BYTE1) { @@ -540,11 +541,11 @@ public void save(DataOutput out) throws IOException { } else { t = 2; } - out.writeByte(t); - out.writeVLong(startNode); + metaOut.writeByte(t); + metaOut.writeVLong(startNode); if (bytes != null) { long numBytes = bytes.getPosition(); - out.writeVLong(numBytes); + metaOut.writeVLong(numBytes); bytes.writeTo(out); } else { assert fstStore != null; @@ -557,7 +558,8 @@ public void save(DataOutput out) throws IOException { */ public void save(final Path path) throws IOException { try (OutputStream os = new BufferedOutputStream(Files.newOutputStream(path))) { - save(new OutputStreamDataOutput(os)); + DataOutput out = new OutputStreamDataOutput(os); + save(out, out); } } @@ -566,7 +568,8 @@ public void save(final Path path) throws IOException { */ public static FST read(Path path, Outputs outputs) throws IOException { try (InputStream is = Files.newInputStream(path)) { - return new FST<>(new InputStreamDataInput(new BufferedInputStream(is)), outputs); + DataInput in = new InputStreamDataInput(new BufferedInputStream(is)); + return new FST<>(in, in, outputs); } } diff --git a/lucene/core/src/java/org/apache/lucene/util/fst/PositiveIntOutputs.java b/lucene/core/src/java/org/apache/lucene/util/fst/PositiveIntOutputs.java index bc9806dcdddc..c6ea6f263f44 100644 --- a/lucene/core/src/java/org/apache/lucene/util/fst/PositiveIntOutputs.java +++ b/lucene/core/src/java/org/apache/lucene/util/fst/PositiveIntOutputs.java @@ -33,6 +33,7 @@ @SuppressForbidden(reason = "Uses a Long instance as a marker") public final class PositiveIntOutputs extends Outputs { + @SuppressWarnings("deprecation") private final static Long NO_OUTPUT = new Long(0); private final static PositiveIntOutputs singleton = new PositiveIntOutputs(); diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/Packed64SingleBlock.java b/lucene/core/src/java/org/apache/lucene/util/packed/Packed64SingleBlock.java index 85e7ea8e0780..dab9a4b9a234 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/Packed64SingleBlock.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/Packed64SingleBlock.java @@ -90,6 +90,7 @@ public int get(int index, long[] arr, int off, int len) { // bulk get assert index % valuesPerBlock == 0; + @SuppressWarnings("deprecation") final PackedInts.Decoder decoder = BulkOperation.of(PackedInts.Format.PACKED_SINGLE_BLOCK, bitsPerValue); assert decoder.longBlockCount() == 1; assert decoder.longValueCount() == valuesPerBlock; @@ -134,6 +135,7 @@ public int set(int index, long[] arr, int off, int len) { // bulk set assert index % valuesPerBlock == 0; + @SuppressWarnings("deprecation") final BulkOperation op = BulkOperation.of(PackedInts.Format.PACKED_SINGLE_BLOCK, bitsPerValue); assert op.longBlockCount() == 1; assert op.longValueCount() == valuesPerBlock; @@ -195,6 +197,7 @@ public void fill(int fromIndex, int toIndex, long val) { } @Override + @SuppressWarnings("deprecation") protected PackedInts.Format getFormat() { return PackedInts.Format.PACKED_SINGLE_BLOCK; } diff --git a/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec b/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec index 611e237384c3..2897a8ac7c1a 100644 --- a/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -org.apache.lucene.codecs.lucene84.Lucene84Codec +org.apache.lucene.codecs.lucene86.Lucene86Codec diff --git a/lucene/core/src/resources/META-INF/services/org.apache.lucene.index.SortFieldProvider b/lucene/core/src/resources/META-INF/services/org.apache.lucene.index.SortFieldProvider new file mode 100644 index 000000000000..a96a47b7b810 --- /dev/null +++ b/lucene/core/src/resources/META-INF/services/org.apache.lucene.index.SortFieldProvider @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.apache.lucene.search.SortField$Provider +org.apache.lucene.search.SortedNumericSortField$Provider +org.apache.lucene.search.SortedSetSortField$Provider \ No newline at end of file diff --git a/lucene/core/src/test/org/apache/lucene/analysis/TestCharArraySet.java b/lucene/core/src/test/org/apache/lucene/analysis/TestCharArraySet.java index 8430eb82def1..1a32a800bbba 100644 --- a/lucene/core/src/test/org/apache/lucene/analysis/TestCharArraySet.java +++ b/lucene/core/src/test/org/apache/lucene/analysis/TestCharArraySet.java @@ -61,15 +61,19 @@ public void testNonZeroOffset() { public void testObjectContains() { CharArraySet set = new CharArraySet(10, true); Integer val = Integer.valueOf(1); + @SuppressWarnings("deprecation") + Integer val1 = new Integer(1); + // Verify explicitly the case of different Integer instances + assertNotSame(val, val1); set.add(val); assertTrue(set.contains(val)); - assertTrue(set.contains(new Integer(1))); // another integer + assertTrue(set.contains(val1)); // another integer assertTrue(set.contains("1")); assertTrue(set.contains(new char[]{'1'})); // test unmodifiable set = CharArraySet.unmodifiableSet(set); assertTrue(set.contains(val)); - assertTrue(set.contains(new Integer(1))); // another integer + assertTrue(set.contains(val1)); // another integer assertTrue(set.contains("1")); assertTrue(set.contains(new char[]{'1'})); } diff --git a/lucene/core/src/test/org/apache/lucene/codecs/TestCodecUtil.java b/lucene/core/src/test/org/apache/lucene/codecs/TestCodecUtil.java index 0a11a9b5b495..ea0972d5a2af 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/TestCodecUtil.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/TestCodecUtil.java @@ -26,6 +26,8 @@ import org.apache.lucene.store.ByteBuffersIndexInput; import org.apache.lucene.store.ByteBuffersIndexOutput; import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.LuceneTestCase; @@ -148,13 +150,13 @@ public void testCheckFooterValidPastFooter() throws Exception { // bogusly read a byte too far (can happen) input.readByte(); Exception mine = new RuntimeException("fake exception"); - RuntimeException expected = expectThrows(RuntimeException.class, () -> { + CorruptIndexException expected = expectThrows(CorruptIndexException.class, () -> { CodecUtil.checkFooter(input, mine); }); - assertEquals("fake exception", expected.getMessage()); + assertTrue(expected.getMessage().contains("checksum status indeterminate")); Throwable suppressed[] = expected.getSuppressed(); assertEquals(1, suppressed.length); - assertTrue(suppressed[0].getMessage().contains("checksum status indeterminate")); + assertEquals("fake exception", suppressed[0].getMessage()); input.close(); } @@ -172,13 +174,13 @@ public void testCheckFooterInvalid() throws Exception { CodecUtil.checkHeader(input, "FooBar", 5, 5); assertEquals("this is the data", input.readString()); Exception mine = new RuntimeException("fake exception"); - RuntimeException expected = expectThrows(RuntimeException.class, () -> { + CorruptIndexException expected = expectThrows(CorruptIndexException.class, () -> { CodecUtil.checkFooter(input, mine); }); - assertEquals("fake exception", expected.getMessage()); + assertTrue(expected.getMessage().contains("checksum failed")); Throwable suppressed[] = expected.getSuppressed(); assertEquals(1, suppressed.length); - assertTrue(suppressed[0].getMessage().contains("checksum failed")); + assertEquals("fake exception", suppressed[0].getMessage()); input.close(); } @@ -319,4 +321,47 @@ public void testTruncatedFileThrowsCorruptIndexException() throws IOException { () -> CodecUtil.retrieveChecksum(input)); assertTrue(e.getMessage(), e.getMessage().contains("misplaced codec footer (file truncated?): length=0 but footerLength==16 (resource")); } + + public void testRetrieveChecksum() throws IOException { + Directory dir = newDirectory(); + try (IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT)) { + out.writeByte((byte) 42); + CodecUtil.writeFooter(out); + } + try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) { + CodecUtil.retrieveChecksum(in, in.length()); // no exception + + CorruptIndexException exception = expectThrows(CorruptIndexException.class, + () -> CodecUtil.retrieveChecksum(in, in.length() - 1)); + assertTrue(exception.getMessage().contains("too long")); + assertArrayEquals(new Throwable[0], exception.getSuppressed()); + + exception = expectThrows(CorruptIndexException.class, + () -> CodecUtil.retrieveChecksum(in, in.length() + 1)); + assertTrue(exception.getMessage().contains("truncated")); + assertArrayEquals(new Throwable[0], exception.getSuppressed()); + } + + try (IndexOutput out = dir.createOutput("bar", IOContext.DEFAULT)) { + for (int i = 0; i <= CodecUtil.footerLength(); ++i) { + out.writeByte((byte) i); + } + } + try (IndexInput in = dir.openInput("bar", IOContext.DEFAULT)) { + CorruptIndexException exception = expectThrows(CorruptIndexException.class, + () -> CodecUtil.retrieveChecksum(in, in.length())); + assertTrue(exception.getMessage().contains("codec footer mismatch")); + assertArrayEquals(new Throwable[0], exception.getSuppressed()); + + exception = expectThrows(CorruptIndexException.class, + () -> CodecUtil.retrieveChecksum(in, in.length() - 1)); + assertTrue(exception.getMessage().contains("too long")); + + exception = expectThrows(CorruptIndexException.class, + () -> CodecUtil.retrieveChecksum(in, in.length() + 1)); + assertTrue(exception.getMessage().contains("truncated")); + } + + dir.close(); + } } diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java index 6a3ce93a0f03..cccee736d46b 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java @@ -17,9 +17,10 @@ package org.apache.lucene.codecs.lucene50; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; -import org.apache.lucene.codecs.lucene84.Lucene84Codec; +import org.apache.lucene.codecs.lucene86.Lucene86Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.BaseStoredFieldsFormatTestCase; @@ -28,12 +29,10 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.store.Directory; -import com.carrotsearch.randomizedtesting.generators.RandomPicks; - public class TestLucene50StoredFieldsFormatHighCompression extends BaseStoredFieldsFormatTestCase { @Override protected Codec getCodec() { - return new Lucene84Codec(Mode.BEST_COMPRESSION); + return new Lucene86Codec(Mode.BEST_COMPRESSION); } /** @@ -44,7 +43,7 @@ public void testMixedCompressions() throws Exception { Directory dir = newDirectory(); for (int i = 0; i < 10; i++) { IndexWriterConfig iwc = newIndexWriterConfig(); - iwc.setCodec(new Lucene84Codec(RandomPicks.randomFrom(random(), Mode.values()))); + iwc.setCodec(new Lucene86Codec(RandomPicks.randomFrom(random(), Mode.values()))); IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig()); Document doc = new Document(); doc.add(new StoredField("field1", "value1")); @@ -71,7 +70,7 @@ public void testMixedCompressions() throws Exception { public void testInvalidOptions() { expectThrows(NullPointerException.class, () -> { - new Lucene84Codec(null); + new Lucene86Codec(null); }); expectThrows(NullPointerException.class, () -> { diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80NormsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80NormsFormat.java index 4eadf05ef5a2..b6e7268d67f1 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80NormsFormat.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80NormsFormat.java @@ -18,14 +18,14 @@ import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene84.Lucene84Codec; +import org.apache.lucene.codecs.lucene86.Lucene86Codec; import org.apache.lucene.index.BaseNormsFormatTestCase; /** * Tests Lucene80NormsFormat */ public class TestLucene80NormsFormat extends BaseNormsFormatTestCase { - private final Codec codec = new Lucene84Codec(); + private final Codec codec = new Lucene86Codec(); @Override protected Codec getCodec() { diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene86/TestLucene86PointsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene86/TestLucene86PointsFormat.java new file mode 100644 index 000000000000..8d5ce08f346d --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene86/TestLucene86PointsFormat.java @@ -0,0 +1,393 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.codecs.lucene86; + +import java.io.IOException; +import java.util.Arrays; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.PointsFormat; +import org.apache.lucene.codecs.PointsReader; +import org.apache.lucene.codecs.PointsWriter; +import org.apache.lucene.document.BinaryPoint; +import org.apache.lucene.document.Document; +import org.apache.lucene.index.BasePointsFormatTestCase; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.MockRandomMergePolicy; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.TestUtil; +import org.apache.lucene.util.bkd.BKDWriter; + +public class TestLucene86PointsFormat extends BasePointsFormatTestCase { + + private final Codec codec; + private final int maxPointsInLeafNode; + + public TestLucene86PointsFormat() { + // standard issue + Codec defaultCodec = new Lucene86Codec(); + if (random().nextBoolean()) { + // randomize parameters + maxPointsInLeafNode = TestUtil.nextInt(random(), 50, 500); + double maxMBSortInHeap = 3.0 + (3*random().nextDouble()); + if (VERBOSE) { + System.out.println("TEST: using Lucene60PointsFormat with maxPointsInLeafNode=" + maxPointsInLeafNode + " and maxMBSortInHeap=" + maxMBSortInHeap); + } + + // sneaky impersonation! + codec = new FilterCodec(defaultCodec.getName(), defaultCodec) { + @Override + public PointsFormat pointsFormat() { + return new PointsFormat() { + @Override + public PointsWriter fieldsWriter(SegmentWriteState writeState) throws IOException { + return new Lucene86PointsWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap); + } + + @Override + public PointsReader fieldsReader(SegmentReadState readState) throws IOException { + return new Lucene86PointsReader(readState); + } + }; + } + }; + } else { + // standard issue + codec = defaultCodec; + maxPointsInLeafNode = BKDWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE; + } + } + + @Override + protected Codec getCodec() { + return codec; + } + + @Override + public void testMergeStability() throws Exception { + assumeFalse("TODO: mess with the parameters and test gets angry!", codec instanceof FilterCodec); + super.testMergeStability(); + } + + public void testEstimatePointCount() throws IOException { + Directory dir = newDirectory(); + IndexWriterConfig iwc = newIndexWriterConfig(); + // Avoid mockRandomMP since it may cause non-optimal merges that make the + // number of points per leaf hard to predict + while (iwc.getMergePolicy() instanceof MockRandomMergePolicy) { + iwc.setMergePolicy(newMergePolicy()); + } + IndexWriter w = new IndexWriter(dir, iwc); + byte[] pointValue = new byte[3]; + byte[] uniquePointValue = new byte[3]; + random().nextBytes(uniquePointValue); + final int numDocs = TEST_NIGHTLY ? atLeast(10000) : atLeast(500); // at night, make sure we have several leaves + final boolean multiValues = random().nextBoolean(); + for (int i = 0; i < numDocs; ++i) { + Document doc = new Document(); + if (i == numDocs / 2) { + doc.add(new BinaryPoint("f", uniquePointValue)); + } else { + final int numValues = (multiValues) ? TestUtil.nextInt(random(), 2, 100) : 1; + for (int j = 0; j < numValues; j ++) { + do { + random().nextBytes(pointValue); + } while (Arrays.equals(pointValue, uniquePointValue)); + doc.add(new BinaryPoint("f", pointValue)); + } + } + w.addDocument(doc); + } + w.forceMerge(1); + final IndexReader r = DirectoryReader.open(w); + w.close(); + final LeafReader lr = getOnlyLeafReader(r); + PointValues points = lr.getPointValues("f"); + + // If all points match, then the point count is numLeaves * maxPointsInLeafNode + final int numLeaves = (int) Math.ceil((double) points.size() / maxPointsInLeafNode); + + IntersectVisitor allPointsVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return Relation.CELL_INSIDE_QUERY; + } + }; + + assertEquals(numLeaves * maxPointsInLeafNode, points.estimatePointCount(allPointsVisitor)); + assertEquals(numDocs, points.estimateDocCount(allPointsVisitor)); + + IntersectVisitor noPointsVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return Relation.CELL_OUTSIDE_QUERY; + } + }; + + // Return 0 if no points match + assertEquals(0, points.estimatePointCount(noPointsVisitor)); + assertEquals(0, points.estimateDocCount(noPointsVisitor)); + + IntersectVisitor onePointMatchVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + if (Arrays.compareUnsigned(uniquePointValue, 0, 3, maxPackedValue, 0, 3) > 0 || + Arrays.compareUnsigned(uniquePointValue, 0, 3, minPackedValue, 0, 3) < 0) { + return Relation.CELL_OUTSIDE_QUERY; + } + return Relation.CELL_CROSSES_QUERY; + } + }; + + // If only one point matches, then the point count is (maxPointsInLeafNode + 1) / 2 + // in general, or maybe 2x that if the point is a split value + final long pointCount = points.estimatePointCount(onePointMatchVisitor); + assertTrue(""+pointCount, + pointCount == (maxPointsInLeafNode + 1) / 2 || // common case + pointCount == 2*((maxPointsInLeafNode + 1) / 2)); // if the point is a split value + + final long docCount = points.estimateDocCount(onePointMatchVisitor); + + if (multiValues) { + assertEquals(docCount, (long) (docCount * (1d - Math.pow( (numDocs - pointCount) / points.size() , points.size() / docCount)))); + } else { + assertEquals(Math.min(pointCount, numDocs), docCount); + } + r.close(); + dir.close(); + } + + // The tree is always balanced in the N dims case, and leaves are + // not all full so things are a bit different + public void testEstimatePointCount2Dims() throws IOException { + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); + byte[][] pointValue = new byte[2][]; + pointValue[0] = new byte[3]; + pointValue[1] = new byte[3]; + byte[][] uniquePointValue = new byte[2][]; + uniquePointValue[0] = new byte[3]; + uniquePointValue[1] = new byte[3]; + random().nextBytes(uniquePointValue[0]); + random().nextBytes(uniquePointValue[1]); + final int numDocs = TEST_NIGHTLY? atLeast(10000) : atLeast(1000); // in nightly, make sure we have several leaves + final boolean multiValues = random().nextBoolean(); + for (int i = 0; i < numDocs; ++i) { + Document doc = new Document(); + if (i == numDocs / 2) { + doc.add(new BinaryPoint("f", uniquePointValue)); + } else { + final int numValues = (multiValues) ? TestUtil.nextInt(random(), 2, 100) : 1; + for (int j = 0; j < numValues; j ++) { + do { + random().nextBytes(pointValue[0]); + random().nextBytes(pointValue[1]); + } while (Arrays.equals(pointValue[0], uniquePointValue[0]) || Arrays.equals(pointValue[1], uniquePointValue[1])); + doc.add(new BinaryPoint("f", pointValue)); + } + } + w.addDocument(doc); + } + w.forceMerge(1); + final IndexReader r = DirectoryReader.open(w); + w.close(); + final LeafReader lr = getOnlyLeafReader(r); + PointValues points = lr.getPointValues("f"); + + IntersectVisitor allPointsVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return Relation.CELL_INSIDE_QUERY; + } + }; + + // If all points match, then the point count is numLeaves * maxPointsInLeafNode + final int numLeaves = (int) Math.ceil((double) points.size() / maxPointsInLeafNode); + + assertEquals(numLeaves * maxPointsInLeafNode, points.estimatePointCount(allPointsVisitor)); + assertEquals(numDocs, points.estimateDocCount(allPointsVisitor)); + + IntersectVisitor noPointsVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return Relation.CELL_OUTSIDE_QUERY; + } + }; + + // Return 0 if no points match + assertEquals(0, points.estimatePointCount(noPointsVisitor)); + assertEquals(0, points.estimateDocCount(noPointsVisitor)); + + IntersectVisitor onePointMatchVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + for (int dim = 0; dim < 2; ++dim) { + if (Arrays.compareUnsigned(uniquePointValue[dim], 0, 3, maxPackedValue, dim * 3, dim * 3 + 3) > 0 || + Arrays.compareUnsigned(uniquePointValue[dim], 0, 3, minPackedValue, dim * 3, dim * 3 + 3) < 0) { + return Relation.CELL_OUTSIDE_QUERY; + } + } + return Relation.CELL_CROSSES_QUERY; + } + }; + + final long pointCount = points.estimatePointCount(onePointMatchVisitor); + // The number of matches needs to be multiple of count per leaf + final long countPerLeaf = (maxPointsInLeafNode + 1) / 2; + assertTrue(""+pointCount, pointCount % countPerLeaf == 0); + // in extreme cases, a point can be be shared by 4 leaves + assertTrue(""+pointCount, pointCount / countPerLeaf <= 4 && pointCount / countPerLeaf >= 1); + + final long docCount = points.estimateDocCount(onePointMatchVisitor); + if (multiValues) { + assertEquals(docCount, (long) (docCount * (1d - Math.pow( (numDocs - pointCount) / points.size() , points.size() / docCount)))); + } else { + assertEquals(Math.min(pointCount, numDocs), docCount); + } + r.close(); + dir.close(); + } + + public void testDocCountEdgeCases() { + PointValues values = getPointValues(Long.MAX_VALUE, 1, Long.MAX_VALUE); + long docs = values.estimateDocCount(null); + assertEquals(1, docs); + values = getPointValues(Long.MAX_VALUE, 1, 1); + docs = values.estimateDocCount(null); + assertEquals(1, docs); + values = getPointValues(Long.MAX_VALUE, Integer.MAX_VALUE, Long.MAX_VALUE); + docs = values.estimateDocCount(null); + assertEquals(Integer.MAX_VALUE, docs); + values = getPointValues(Long.MAX_VALUE, Integer.MAX_VALUE, Long.MAX_VALUE / 2); + docs = values.estimateDocCount(null); + assertEquals(Integer.MAX_VALUE, docs); + values = getPointValues(Long.MAX_VALUE, Integer.MAX_VALUE, 1); + docs = values.estimateDocCount(null); + assertEquals(1, docs); + } + + public void testRandomDocCount() { + for (int i = 0; i < 100; i++) { + long size = TestUtil.nextLong(random(), 1, Long.MAX_VALUE); + int maxDoc = (size > Integer.MAX_VALUE) ? Integer.MAX_VALUE : Math.toIntExact(size); + int docCount = TestUtil.nextInt(random(), 1, maxDoc); + long estimatedPointCount = TestUtil.nextLong(random(), 0, size); + PointValues values = getPointValues(size, docCount, estimatedPointCount); + long docs = values.estimateDocCount(null); + assertTrue(docs <= estimatedPointCount); + assertTrue(docs <= maxDoc); + assertTrue(docs >= estimatedPointCount / (size/docCount)); + } + } + + + private PointValues getPointValues(long size, int docCount, long estimatedPointCount) { + return new PointValues() { + @Override + public void intersect(IntersectVisitor visitor) { + throw new UnsupportedOperationException(); + } + + @Override + public long estimatePointCount(IntersectVisitor visitor) { + return estimatedPointCount; + } + + @Override + public byte[] getMinPackedValue() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public byte[] getMaxPackedValue() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int getNumDimensions() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int getNumIndexDimensions() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int getBytesPerDimension() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public long size() { + return size; + } + + @Override + public int getDocCount() { + return docCount; + } + }; + } + +} diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene86/TestLucene86SegmentInfoFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene86/TestLucene86SegmentInfoFormat.java new file mode 100644 index 000000000000..e462d3f4ac45 --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene86/TestLucene86SegmentInfoFormat.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.codecs.lucene86; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.index.BaseSegmentInfoFormatTestCase; +import org.apache.lucene.util.TestUtil; +import org.apache.lucene.util.Version; + +public class TestLucene86SegmentInfoFormat extends BaseSegmentInfoFormatTestCase { + + @Override + protected Version[] getVersions() { + return new Version[] { Version.LATEST }; + } + + @Override + protected Codec getCodec() { + return TestUtil.getDefaultCodec(); + } + +} diff --git a/lucene/core/src/test/org/apache/lucene/document/TestDocument.java b/lucene/core/src/test/org/apache/lucene/document/TestDocument.java index 50c1ed0a919a..ac5c43452b54 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestDocument.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestDocument.java @@ -214,14 +214,10 @@ public void testGetValuesForIndexedDocument() throws Exception { public void testGetValues() { Document doc = makeDocumentWithFields(); - assertEquals(new String[] {"test1", "test2"}, - doc.getValues("keyword")); - assertEquals(new String[] {"test1", "test2"}, - doc.getValues("text")); - assertEquals(new String[] {"test1", "test2"}, - doc.getValues("unindexed")); - assertEquals(new String[0], - doc.getValues("nope")); + assertArrayEquals(new String[] {"test1", "test2"}, doc.getValues("keyword")); + assertArrayEquals(new String[] {"test1", "test2"}, doc.getValues("text")); + assertArrayEquals(new String[] {"test1", "test2"}, doc.getValues("unindexed")); + assertArrayEquals(new String[0], doc.getValues("nope")); } public void testPositionIncrementMultiFields() throws Exception { diff --git a/lucene/core/src/test/org/apache/lucene/geo/TestTessellator.java b/lucene/core/src/test/org/apache/lucene/geo/TestTessellator.java index 0d1f270776e5..2410ba13a5db 100644 --- a/lucene/core/src/test/org/apache/lucene/geo/TestTessellator.java +++ b/lucene/core/src/test/org/apache/lucene/geo/TestTessellator.java @@ -573,6 +573,14 @@ public void testComplexPolygon40() throws Exception { } } + public void testComplexPolygon41() throws Exception { + String wkt = "POLYGON((-1.569137181294115 54.4855283059375, -1.5692505240440333 54.48535373128068, -1.5684753656387294 54.48534438253056, -1.568606793880459 54.485674703738624, -1.5694141387939453 54.48611720532629, -1.569137181294115 54.4855283059375)," + + "(-1.569137181294115 54.4855283059375, -1.5690783030431206 54.48545352137167, -1.5689449291711688 54.48547663706703, -1.569137181294115 54.4855283059375)," + + "(-1.5689449291711688 54.48547663706703, -1.5689437289004642 54.48535482680399, -1.5687730514221028 54.48538045082698, -1.5689449291711688 54.48547663706703)," + + "(-1.5689449291711688 54.48547663706703, -1.5689879483854345 54.485580118416785, -1.5687756358893499 54.485612860811244, -1.568765285875931 54.485496217554285, -1.5689449291711688 54.48547663706703))"; + checkPolygon(wkt); + } + private void checkPolygon(String wkt) throws Exception { Polygon polygon = (Polygon) SimpleWKTShapeParser.parse(wkt); List tessellation = Tessellator.tessellate(polygon); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectBitFlips.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectBitFlips.java new file mode 100644 index 000000000000..f63387b74d03 --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectBitFlips.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.index; + + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; + +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.LineFileDocs; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; +import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; +import org.apache.lucene.util.TestUtil; + +/** + * Test that the default codec detects bit flips at open or checkIntegrity time. + */ +@SuppressFileSystems("ExtrasFS") +@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-9356") +public class TestAllFilesDetectBitFlips extends LuceneTestCase { + + public void test() throws Exception { + doTest(false); + } + + public void testCFS() throws Exception { + doTest(true); + } + + public void doTest(boolean cfs) throws Exception { + Directory dir = newDirectory(); + + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); + conf.setCodec(TestUtil.getDefaultCodec()); + + if (cfs == false) { + conf.setUseCompoundFile(false); + conf.getMergePolicy().setNoCFSRatio(0.0); + } + + RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf); + // Use LineFileDocs so we (hopefully) get most Lucene features + // tested, e.g. IntPoint was recently added to it: + LineFileDocs docs = new LineFileDocs(random()); + for (int i = 0; i < 100; i++) { + riw.addDocument(docs.nextDoc()); + if (random().nextInt(7) == 0) { + riw.commit(); + } + if (random().nextInt(20) == 0) { + riw.deleteDocuments(new Term("docid", Integer.toString(i))); + } + if (random().nextInt(15) == 0) { + riw.updateNumericDocValue(new Term("docid", Integer.toString(i)), "docid_intDV", Long.valueOf(i)); + } + } + if (TEST_NIGHTLY == false) { + riw.forceMerge(1); + } + riw.close(); + checkBitFlips(dir); + dir.close(); + } + + private void checkBitFlips(Directory dir) throws IOException { + for(String name : dir.listAll()) { + if (name.equals(IndexWriter.WRITE_LOCK_NAME) == false) { + corruptFile(dir, name); + } + } + } + + private void corruptFile(Directory dir, String victim) throws IOException { + try (BaseDirectoryWrapper dirCopy = newDirectory()) { + dirCopy.setCheckIndexOnClose(false); + + long victimLength = dir.fileLength(victim); + long flipOffset = TestUtil.nextLong(random(), 0, victimLength - 1); + + if (VERBOSE) { + System.out.println("TEST: now corrupt file " + victim + " by changing byte at offset " + flipOffset + " (length= " + victimLength + ")"); + } + + for(String name : dir.listAll()) { + if (name.equals(victim) == false) { + dirCopy.copyFrom(dir, name, name, IOContext.DEFAULT); + } else { + try (IndexOutput out = dirCopy.createOutput(name, IOContext.DEFAULT); + IndexInput in = dir.openInput(name, IOContext.DEFAULT)) { + out.copyBytes(in, flipOffset); + out.writeByte((byte) (in.readByte() + TestUtil.nextInt(random(), 0x01, 0xFF))); + out.copyBytes(in, victimLength - flipOffset - 1); + } + try (IndexInput in = dirCopy.openInput(name, IOContext.DEFAULT)) { + try { + CodecUtil.checksumEntireFile(in); + System.out.println("TEST: changing a byte in " + victim + " did not update the checksum)"); + return; + } catch (CorruptIndexException e) { + // ok + } + } + } + dirCopy.sync(Collections.singleton(name)); + } + + // corruption must be detected + expectThrowsAnyOf(Arrays.asList(CorruptIndexException.class, IndexFormatTooOldException.class, IndexFormatTooNewException.class), + () -> { + try (IndexReader reader = DirectoryReader.open(dirCopy)) { + for (LeafReaderContext context : reader.leaves()) { + context.reader().checkIntegrity(); + } + } + } + ); + } + } +} diff --git a/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java b/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java index 050aed7864a9..a31194caf7ac 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java @@ -113,9 +113,13 @@ protected boolean isOK(Throwable th) { ioe.printStackTrace(System.out); } failure.clearDoFail(); - assertTrue(writer.isClosed()); + // make sure we are closed or closing - if we are unlucky a merge does + // the actual closing for us. this is rare but might happen since the + // tragicEvent is checked by IFD and that might throw during a merge + expectThrows(AlreadyClosedException.class, writer::ensureOpen); // Abort should have closed the deleter: assertTrue(writer.isDeleterClosed()); + writer.close(); // now wait for the close to actually happen if a merge thread did the close. break outer; } } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java index 4b82800fd188..7fdad3ba2de9 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java @@ -538,7 +538,7 @@ public CodecReader wrapForMerge(CodecReader reader) throws IOException { } @Override - public void mergeFinished() throws IOException { + public void mergeFinished(boolean success) throws IOException { Throwable th = null; for (ParallelLeafReader r : parallelReaders) { try { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java index ae944de1bc32..042e2a80ff58 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java @@ -2146,7 +2146,7 @@ public void testBadSort() throws Exception { IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { iwc.setIndexSort(Sort.RELEVANCE); }); - assertEquals("invalid SortField type: must be one of [STRING, INT, FLOAT, LONG, DOUBLE] but got: ", expected.getMessage()); + assertEquals("Cannot sort index with sort field ", expected.getMessage()); } // you can't change the index sort on an existing index: @@ -2498,6 +2498,7 @@ public void testRandom3() throws Exception { System.out.println(" float=" + docValues.floatValue); System.out.println(" double=" + docValues.doubleValue); System.out.println(" bytes=" + new BytesRef(docValues.bytesValue)); + System.out.println(" mvf=" + Arrays.toString(docValues.floatValues)); } Document doc = new Document(); @@ -2741,7 +2742,7 @@ public void testWrongSortFieldType() throws Exception { Document doc = new Document(); doc.add(dvs.get(j)); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> w.addDocument(doc)); - assertThat(exc.getMessage(), containsString("invalid doc value type")); + assertThat(exc.getMessage(), containsString("expected field [field] to be ")); doc.clear(); doc.add(dvs.get(i)); w.addDocument(doc); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java index 8fb1ce5d75f7..5c6164a254ca 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java @@ -344,7 +344,7 @@ private static int getSegmentCount(Directory dir) throws IOException { // Make sure it's OK to change RAM buffer size and // maxBufferedDocs in a write session public void testChangingRAMBuffer() throws IOException { - Directory dir = newDirectory(); + Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); writer.getConfig().setMaxBufferedDocs(10); writer.getConfig().setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH); @@ -607,7 +607,7 @@ public void testVariableSchema() throws Exception { doc.add(newField("content4", contents, customType)); type = customType; } else - type = TextField.TYPE_NOT_STORED; + type = TextField.TYPE_NOT_STORED; doc.add(newTextField("content1", contents, Field.Store.NO)); doc.add(newField("content3", "", customType)); doc.add(newField("content5", "", type)); @@ -663,13 +663,13 @@ public void testEmptyFieldName() throws IOException { writer.close(); dir.close(); } - + public void testEmptyFieldNameTerms() throws IOException { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(newTextField("", "a b c", Field.Store.NO)); - writer.addDocument(doc); + writer.addDocument(doc); writer.close(); DirectoryReader reader = DirectoryReader.open(dir); LeafReader subreader = getOnlyLeafReader(reader); @@ -681,7 +681,7 @@ public void testEmptyFieldNameTerms() throws IOException { reader.close(); dir.close(); } - + public void testEmptyFieldNameWithEmptyTerm() throws IOException { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); @@ -690,7 +690,7 @@ public void testEmptyFieldNameWithEmptyTerm() throws IOException { doc.add(newStringField("", "a", Field.Store.NO)); doc.add(newStringField("", "b", Field.Store.NO)); doc.add(newStringField("", "c", Field.Store.NO)); - writer.addDocument(doc); + writer.addDocument(doc); writer.close(); DirectoryReader reader = DirectoryReader.open(dir); LeafReader subreader = getOnlyLeafReader(reader); @@ -834,7 +834,7 @@ public void testDeadlock() throws Exception { customType.setStoreTermVectors(true); customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorOffsets(true); - + doc.add(newField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType)); writer.addDocument(doc); writer.addDocument(doc); @@ -922,7 +922,7 @@ public void run() { // open/close slowly sometimes dir.setUseSlowOpenClosers(true); - + // throttle a little dir.setThrottling(MockDirectoryWrapper.Throttling.SOMETIMES); @@ -1148,7 +1148,7 @@ public void testIndexStoreCombos() throws Exception { FieldType customType = new FieldType(StoredField.TYPE); customType.setTokenized(true); - + Field f = new Field("binary", b, 10, 17, customType); // TODO: this is evil, changing the type after creating the field: customType.setIndexOptions(IndexOptions.DOCS); @@ -1157,7 +1157,7 @@ public void testIndexStoreCombos() throws Exception { f.setTokenStream(doc1field1); FieldType customType2 = new FieldType(TextField.TYPE_STORED); - + Field f2 = newField("string", "value", customType2); final MockTokenizer doc1field2 = new MockTokenizer(MockTokenizer.WHITESPACE, false); doc1field2.setReader(new StringReader("doc1field2")); @@ -1233,7 +1233,7 @@ public void testNoDocsIndex() throws Throwable { public void testDeleteUnusedFiles() throws Exception { assumeFalse("test relies on exact filenames", Codec.getDefault() instanceof SimpleTextCodec); assumeWorkingMMapOnWindows(); - + for(int iter=0;iter<2;iter++) { // relies on windows semantics Path path = createTempDir(); @@ -1250,7 +1250,7 @@ public void testDeleteUnusedFiles() throws Exception { } MergePolicy mergePolicy = newLogMergePolicy(true); - + // This test expects all of its segments to be in CFS mergePolicy.setNoCFSRatio(1.0); mergePolicy.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); @@ -1338,7 +1338,7 @@ public void testDeleteUnusedFiles2() throws Exception { customType.setStoreTermVectors(true); customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorOffsets(true); - + doc.add(newField("c", "val", customType)); writer.addDocument(doc); writer.commit(); @@ -1379,7 +1379,7 @@ public void testEmptyDirRollback() throws Exception { // indexed, flushed (but not committed) and then IW rolls back, then no // files are left in the Directory. Directory dir = newDirectory(); - + String[] origFiles = dir.listAll(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(2) @@ -1409,8 +1409,8 @@ public void testEmptyDirRollback() throws Exception { // Adding just one document does not call flush yet. int computedExtraFileCount = 0; for (String file : dir.listAll()) { - if (IndexWriter.WRITE_LOCK_NAME.equals(file) || - file.startsWith(IndexFileNames.SEGMENTS) || + if (IndexWriter.WRITE_LOCK_NAME.equals(file) || + file.startsWith(IndexFileNames.SEGMENTS) || IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches()) { if (file.lastIndexOf('.') < 0 // don't count stored fields and term vectors in, or any temporary files they might @@ -1458,7 +1458,7 @@ public void testNoUnwantedTVFiles() throws Exception { FieldType customType3 = new FieldType(TextField.TYPE_STORED); customType3.setTokenized(false); customType3.setOmitNorms(true); - + for (int i=0; i<2; i++) { Document doc = new Document(); doc.add(new Field("id", Integer.toString(i)+BIG, customType3)); @@ -1478,7 +1478,7 @@ public void testNoUnwantedTVFiles() throws Exception { SegmentReader sr = (SegmentReader) ctx.reader(); assertFalse(sr.getFieldInfos().hasVectors()); } - + r0.close(); dir.close(); } @@ -1501,7 +1501,7 @@ public StringSplitTokenizer() { @Override public final boolean incrementToken() { - clearAttributes(); + clearAttributes(); if (upto < tokens.length) { termAtt.setEmpty(); termAtt.append(tokens[upto]); @@ -1724,7 +1724,7 @@ public void testPrepareCommitThenRollback2() throws Exception { r.close(); dir.close(); } - + public void testDontInvokeAnalyzerForUnAnalyzedFields() throws Exception { Analyzer analyzer = new Analyzer() { @Override @@ -1759,13 +1759,13 @@ public int getOffsetGap(String fieldName) { w.close(); dir.close(); } - + //LUCENE-1468 -- make sure opening an IndexWriter with // create=true does not remove non-index files - + public void testOtherFiles() throws Throwable { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); iw.addDocument(new Document()); iw.close(); @@ -1774,15 +1774,15 @@ public void testOtherFiles() throws Throwable { IndexOutput out = dir.createOutput("myrandomfile", newIOContext(random())); out.writeByte((byte) 42); out.close(); - + new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))).close(); - + assertTrue(slowFileExists(dir, "myrandomfile")); } finally { dir.close(); } } - + // LUCENE-3849 public void testStopwordsPosIncHole() throws Exception { Directory dir = newDirectory(); @@ -1811,7 +1811,7 @@ protected TokenStreamComponents createComponents(String fieldName) { ir.close(); dir.close(); } - + // LUCENE-3849 public void testStopwordsPosIncHole2() throws Exception { // use two stopfilters for testing here @@ -1843,23 +1843,23 @@ protected TokenStreamComponents createComponents(String fieldName) { ir.close(); dir.close(); } - + // LUCENE-4575 public void testCommitWithUserDataOnly() throws Exception { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(null)); writer.commit(); // first commit to complete IW create transaction. - + // this should store the commit data, even though no other changes were made writer.setLiveCommitData(new HashMap() {{ put("key", "value"); }}.entrySet()); writer.commit(); - + DirectoryReader r = DirectoryReader.open(dir); assertEquals("value", r.getIndexCommit().getUserData().get("key")); r.close(); - + // now check setCommitData and prepareCommit/commit sequence writer.setLiveCommitData(new HashMap() {{ put("key", "value1"); @@ -1873,7 +1873,7 @@ public void testCommitWithUserDataOnly() throws Exception { r = DirectoryReader.open(dir); assertEquals("value1", r.getIndexCommit().getUserData().get("key")); r.close(); - + // now should commit the second commitData - there was a bug where // IndexWriter.finishCommit overrode the second commitData writer.commit(); @@ -1881,7 +1881,7 @@ public void testCommitWithUserDataOnly() throws Exception { assertEquals("IndexWriter.finishCommit may have overridden the second commitData", "value2", r.getIndexCommit().getUserData().get("key")); r.close(); - + writer.close(); dir.close(); } @@ -1896,7 +1896,7 @@ private Map getLiveCommitData(IndexWriter writer) { } return data; } - + @Test public void testGetCommitData() throws Exception { Directory dir = newDirectory(); @@ -1906,16 +1906,16 @@ public void testGetCommitData() throws Exception { }}.entrySet()); assertEquals("value", getLiveCommitData(writer).get("key")); writer.close(); - + // validate that it's also visible when opening a new IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(null) .setOpenMode(OpenMode.APPEND)); assertEquals("value", getLiveCommitData(writer).get("key")); writer.close(); - + dir.close(); } - + public void testNullAnalyzer() throws IOException { Directory dir = newDirectory(); IndexWriterConfig iwConf = newIndexWriterConfig(null); @@ -1942,7 +1942,7 @@ public void testNullAnalyzer() throws IOException { iw.close(); dir.close(); } - + public void testNullDocument() throws IOException { Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir); @@ -1967,7 +1967,7 @@ public void testNullDocument() throws IOException { iw.close(); dir.close(); } - + public void testNullDocuments() throws IOException { Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir); @@ -1992,7 +1992,7 @@ public void testNullDocuments() throws IOException { iw.close(); dir.close(); } - + public void testIterableFieldThrowsException() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); @@ -2000,7 +2000,7 @@ public void testIterableFieldThrowsException() throws IOException { int docCount = 0; int docId = 0; Set liveIds = new HashSet<>(); - for (int i = 0; i < iters; i++) { + for (int i = 0; i < iters; i++) { int numDocs = atLeast(4); for (int j = 0; j < numDocs; j++) { String id = Integer.toString(docId++); @@ -2008,7 +2008,7 @@ public void testIterableFieldThrowsException() throws IOException { fields.add(new StringField("id", id, Field.Store.YES)); fields.add(new StringField("foo", TestUtil.randomSimpleString(random()), Field.Store.NO)); docId++; - + boolean success = false; try { w.addDocument(new RandomFailingIterable(fields, random())); @@ -2040,7 +2040,7 @@ public void testIterableFieldThrowsException() throws IOException { w.close(); IOUtils.close(reader, dir); } - + public void testIterableThrowsException() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); @@ -2088,7 +2088,7 @@ public void testIterableThrowsException() throws IOException { w.close(); IOUtils.close(reader, dir); } - + public void testIterableThrowsException2() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); @@ -2128,7 +2128,7 @@ public RandomFailingIterable(Iterable list, Random random) { this.list = list; this.failOn = random.nextInt(5); } - + @Override public Iterator iterator() { final Iterator docIter = list.iterator(); @@ -2254,7 +2254,7 @@ public void testHasUncommittedChanges() throws IOException { writer.close(); dir.close(); } - + public void testMergeAllDeleted() throws IOException { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); @@ -2477,12 +2477,12 @@ public void testIds() throws Exception { IndexWriter w = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))); w.addDocument(new Document()); w.close(); - + SegmentInfos sis = SegmentInfos.readLatestCommit(d); byte[] id1 = sis.getId(); assertNotNull(id1); assertEquals(StringHelper.ID_LENGTH, id1.length); - + byte[] id2 = sis.info(0).info.getId(); byte[] sciId2 = sis.info(0).getId(); assertNotNull(id2); @@ -2514,7 +2514,7 @@ public void testIds() throws Exception { ids.add(id); } } - + public void testEmptyNorm() throws Exception { Directory d = newDirectory(); IndexWriter w = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))); @@ -2579,7 +2579,7 @@ public void testNRTSegmentsFile() throws Exception { assertEquals(1, r2.getIndexCommit().getGeneration()); assertEquals("segments_1", r2.getIndexCommit().getSegmentsFileName()); r2.close(); - + // make a change and another commit w.addDocument(new Document()); w.commit(); @@ -2866,7 +2866,7 @@ public void testLeftoverTempFiles() throws Exception { IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random())); IndexWriter w = new IndexWriter(dir, iwc); w.close(); - + IndexOutput out = dir.createTempOutput("_0", "bkd", IOContext.DEFAULT); String tempName = out.getName(); out.close(); @@ -3151,7 +3151,7 @@ public void testSoftUpdateDocuments() throws IOException { expectThrows(IllegalArgumentException.class, () -> { writer.softUpdateDocument(null, new Document(), new NumericDocValuesField("soft_delete", 1)); }); - + expectThrows(IllegalArgumentException.class, () -> { writer.softUpdateDocument(new Term("id", "1"), new Document()); }); @@ -4167,4 +4167,76 @@ public void testSegmentCommitInfoId() throws IOException { } } } + + public void testMergeZeroDocsMergeIsClosedOnce() throws IOException { + LogDocMergePolicy keepAllSegments = new LogDocMergePolicy() { + @Override + public boolean keepFullyDeletedSegment(IOSupplier readerIOSupplier) { + return true; + } + }; + try (Directory dir = newDirectory()) { + try (IndexWriter writer = new IndexWriter(dir, + new IndexWriterConfig().setMergePolicy(new OneMergeWrappingMergePolicy(keepAllSegments, merge -> { + SetOnce onlyFinishOnce = new SetOnce<>(); + return new MergePolicy.OneMerge(merge.segments) { + @Override + public void mergeFinished(boolean success) { + onlyFinishOnce.set(true); + } + }; + })))) { + Document doc = new Document(); + doc.add(new StringField("id", "1", Field.Store.NO)); + writer.addDocument(doc); + writer.flush(); + writer.addDocument(doc); + writer.flush(); + writer.deleteDocuments(new Term("id", "1")); + writer.flush(); + assertEquals(2, writer.getSegmentCount()); + assertEquals(0, writer.getDocStats().numDocs); + assertEquals(2, writer.getDocStats().maxDoc); + writer.forceMerge(1); + } + } + } + + public void testMergeOnCommitKeepFullyDeletedSegments() throws Exception { + Directory dir = newDirectory(); + IndexWriterConfig iwc = newIndexWriterConfig(); + iwc.setMaxCommitMergeWaitSeconds(30); + iwc.mergePolicy = new FilterMergePolicy(newMergePolicy()) { + @Override + public boolean keepFullyDeletedSegment(IOSupplier readerIOSupplier) { + return true; + } + + @Override + public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger, + SegmentInfos segmentInfos, + MergeContext mergeContext) { + List fullyDeletedSegments = segmentInfos.asList().stream() + .filter(s -> s.info.maxDoc() - s.getDelCount() == 0) + .collect(Collectors.toList()); + if (fullyDeletedSegments.isEmpty()) { + return null; + } + MergeSpecification spec = new MergeSpecification(); + spec.add(new OneMerge(fullyDeletedSegments)); + return spec; + } + }; + IndexWriter w = new IndexWriter(dir, iwc); + Document d = new Document(); + d.add(new StringField("id", "1", Field.Store.YES)); + w.addDocument(d); + w.commit(); + w.updateDocument(new Term("id", "1"), d); + w.commit(); + try (DirectoryReader reader = w.getReader()) { + assertEquals(1, reader.numDocs()); + } + IOUtils.close(w, dir); + } } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java index bb16884269cc..e7dbfd294da4 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java @@ -702,8 +702,8 @@ private void doTestOperationsOnDiskFull(boolean updates) throws IOException { } dir.close(); - // Try again with 10 more bytes of free space: - diskFree += 10; + // Try again with more bytes of free space: + diskFree += Math.max(10, diskFree >>> 3); } startDir.close(); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java index ce591a280c6e..2577f6b28ecb 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java @@ -18,17 +18,42 @@ import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexWriterConfig.OpenMode; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; public class TestIndexWriterMergePolicy extends LuceneTestCase { - + + private static final MergePolicy MERGE_ON_COMMIT_POLICY = new LogDocMergePolicy() { + @Override + public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext) { + // Optimize down to a single segment on commit + if (mergeTrigger == MergeTrigger.COMMIT && segmentInfos.size() > 1) { + List nonMergingSegments = new ArrayList<>(); + for (SegmentCommitInfo sci : segmentInfos) { + if (mergeContext.getMergingSegments().contains(sci) == false) { + nonMergingSegments.add(sci); + } + } + if (nonMergingSegments.size() > 1) { + MergeSpecification mergeSpecification = new MergeSpecification(); + mergeSpecification.add(new OneMerge(nonMergingSegments)); + return mergeSpecification; + } + } + return null; + } + }; + // Test the normal case public void testNormalCase() throws IOException { Directory dir = newDirectory(); @@ -278,6 +303,50 @@ public void testSetters() { assertSetters(new LogDocMergePolicy()); } + // Test basic semantics of merge on commit + public void testMergeOnCommit() throws IOException { + Directory dir = newDirectory(); + + IndexWriter firstWriter = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); + for (int i = 0; i < 5; i++) { + TestIndexWriter.addDoc(firstWriter); + firstWriter.flush(); + } + DirectoryReader firstReader = DirectoryReader.open(firstWriter); + assertEquals(5, firstReader.leaves().size()); + firstReader.close(); + firstWriter.close(); // When this writer closes, it does not merge on commit. + + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(MERGE_ON_COMMIT_POLICY).setMaxCommitMergeWaitSeconds(30); + + + IndexWriter writerWithMergePolicy = new IndexWriter(dir, iwc); + writerWithMergePolicy.commit(); // No changes. Commit doesn't trigger a merge. + + DirectoryReader unmergedReader = DirectoryReader.open(writerWithMergePolicy); + assertEquals(5, unmergedReader.leaves().size()); + unmergedReader.close(); + + TestIndexWriter.addDoc(writerWithMergePolicy); + writerWithMergePolicy.commit(); // Doc added, do merge on commit. + assertEquals(1, writerWithMergePolicy.getSegmentCount()); // + + DirectoryReader mergedReader = DirectoryReader.open(writerWithMergePolicy); + assertEquals(1, mergedReader.leaves().size()); + mergedReader.close(); + + try (IndexReader reader = writerWithMergePolicy.getReader()) { + IndexSearcher searcher = new IndexSearcher(reader); + assertEquals(6, reader.numDocs()); + assertEquals(6, searcher.count(new MatchAllDocsQuery())); + } + + writerWithMergePolicy.close(); + dir.close(); + } + private void assertSetters(MergePolicy lmp) { lmp.setMaxCFSSegmentSizeMB(2.0); assertEquals(2.0, lmp.getMaxCFSSegmentSizeMB(), EPSILON); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java index 5f8650fe0aaa..228c34366db5 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java @@ -809,11 +809,16 @@ private Directory getAssertNoDeletesDirectory(Directory directory) { // Stress test reopen during add/delete public void testDuringAddDelete() throws Exception { Directory dir1 = newDirectory(); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(newLogMergePolicy(2)); + if (TEST_NIGHTLY) { + // if we have a ton of iterations we need to make sure we don't do unnecessary + // extra flushing otherwise we will timeout on nightly + iwc.setRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB); + iwc.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); + } final IndexWriter writer = new IndexWriter( - dir1, - newIndexWriterConfig(new MockAnalyzer(random())) - .setMergePolicy(newLogMergePolicy(2)) - ); + dir1,iwc); // create the index createIndexNoClose(false, "test", writer); @@ -822,7 +827,7 @@ public void testDuringAddDelete() throws Exception { DirectoryReader r = writer.getReader(); final int iters = TEST_NIGHTLY ? 1000 : 10; - final List excs = Collections.synchronizedList(new ArrayList()); + final List excs = Collections.synchronizedList(new ArrayList<>()); final Thread[] threads = new Thread[numThreads]; final AtomicInteger remainingThreads = new AtomicInteger(numThreads); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestMergePolicy.java new file mode 100644 index 000000000000..e5f5635e1f19 --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/index/TestMergePolicy.java @@ -0,0 +1,159 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.index; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.StringHelper; +import org.apache.lucene.util.TestUtil; +import org.apache.lucene.util.Version; + +public class TestMergePolicy extends LuceneTestCase { + + public void testWaitForOneMerge() throws IOException, InterruptedException { + try (Directory dir = newDirectory()) { + MergePolicy.MergeSpecification ms = createRandomMergeSpecification(dir, 1 + random().nextInt(10)); + for (MergePolicy.OneMerge m : ms.merges) { + assertFalse(m.hasCompletedSuccessfully().isPresent()); + } + Thread t = new Thread(() -> { + try { + for (MergePolicy.OneMerge m : ms.merges) { + m.mergeFinished(true); + } + } catch (IOException e) { + throw new AssertionError(e); + } + }); + t.start(); + assertTrue(ms.await(100, TimeUnit.HOURS)); + for (MergePolicy.OneMerge m : ms.merges) { + assertTrue(m.hasCompletedSuccessfully().get()); + } + t.join(); + } + } + + public void testTimeout() throws IOException, InterruptedException { + try (Directory dir = newDirectory()) { + MergePolicy.MergeSpecification ms = createRandomMergeSpecification(dir, 3); + for (MergePolicy.OneMerge m : ms.merges) { + assertFalse(m.hasCompletedSuccessfully().isPresent()); + } + Thread t = new Thread(() -> { + try { + ms.merges.get(0).mergeFinished(true); + } catch (IOException e) { + throw new AssertionError(e); + } + }); + t.start(); + assertFalse(ms.await(10, TimeUnit.MILLISECONDS)); + assertFalse(ms.merges.get(1).hasCompletedSuccessfully().isPresent()); + t.join(); + } + } + + public void testTimeoutLargeNumberOfMerges() throws IOException, InterruptedException { + try (Directory dir = newDirectory()) { + MergePolicy.MergeSpecification ms = createRandomMergeSpecification(dir, 10000); + for (MergePolicy.OneMerge m : ms.merges) { + assertFalse(m.hasCompletedSuccessfully().isPresent()); + } + AtomicInteger i = new AtomicInteger(0); + AtomicBoolean stop = new AtomicBoolean(false); + Thread t = new Thread(() -> { + while (stop.get() == false) { + try { + ms.merges.get(i.getAndIncrement()).mergeFinished(true); + Thread.sleep(1); + } catch (IOException | InterruptedException e) { + throw new AssertionError(e); + } + } + }); + t.start(); + assertFalse(ms.await(10, TimeUnit.MILLISECONDS)); + stop.set(true); + t.join(); + for (int j = 0; j < ms.merges.size(); j++) { + if (j < i.get()) { + assertTrue(ms.merges.get(j).hasCompletedSuccessfully().get()); + } else { + assertFalse(ms.merges.get(j).hasCompletedSuccessfully().isPresent()); + } + } + } + } + + @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-9408") + public void testFinishTwice() throws IOException { + try (Directory dir = newDirectory()) { + MergePolicy.MergeSpecification spec = createRandomMergeSpecification(dir, 1); + MergePolicy.OneMerge oneMerge = spec.merges.get(0); + oneMerge.mergeFinished(true); + expectThrows(IllegalStateException.class, () -> oneMerge.mergeFinished(false)); + } + } + + public void testTotalMaxDoc() throws IOException { + try (Directory dir = newDirectory()) { + MergePolicy.MergeSpecification spec = createRandomMergeSpecification(dir, 1); + int docs = 0; + MergePolicy.OneMerge oneMerge = spec.merges.get(0); + for (SegmentCommitInfo info : oneMerge.segments) { + docs += info.info.maxDoc(); + } + assertEquals(docs, oneMerge.totalMaxDoc); + } + } + + private static MergePolicy.MergeSpecification createRandomMergeSpecification(Directory dir, int numMerges) { + MergePolicy.MergeSpecification ms = new MergePolicy.MergeSpecification(); + for (int ii = 0; ii < numMerges; ++ii) { + final SegmentInfo si = new SegmentInfo( + dir, // dir + Version.LATEST, // version + Version.LATEST, // min version + TestUtil.randomSimpleString(random()), // name + random().nextInt(1000), // maxDoc + random().nextBoolean(), // isCompoundFile + null, // codec + Collections.emptyMap(), // diagnostics + TestUtil.randomSimpleString(// id + random(), + StringHelper.ID_LENGTH, + StringHelper.ID_LENGTH).getBytes(StandardCharsets.US_ASCII), + Collections.emptyMap(), // attributes + null /* indexSort */); + final List segments = new LinkedList(); + segments.add(new SegmentCommitInfo(si, 0, 0, 0, 0, 0, StringHelper.randomId())); + ms.add(new MergePolicy.OneMerge(segments)); + } + return ms; + } +} diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java index ee778ed90027..d982953a2f62 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java @@ -396,7 +396,7 @@ public void testIllegalTooManyDimensions() throws Exception { public void testDifferentCodecs1() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random())); - iwc.setCodec(Codec.forName("Lucene84")); + iwc.setCodec(Codec.forName("Lucene86")); IndexWriter w = new IndexWriter(dir, iwc); Document doc = new Document(); doc.add(new IntPoint("int", 1)); @@ -427,7 +427,7 @@ public void testDifferentCodecs2() throws Exception { w.close(); iwc = new IndexWriterConfig(new MockAnalyzer(random())); - iwc.setCodec(Codec.forName("Lucene84")); + iwc.setCodec(Codec.forName("Lucene86")); w = new IndexWriter(dir, iwc); doc = new Document(); doc.add(new IntPoint("int", 1)); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java index 19d821481e00..4570f0906001 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java @@ -18,15 +18,21 @@ import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.mockfile.ExtrasFS; import org.apache.lucene.search.Sort; import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.StringHelper; +import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.Version; import java.io.IOException; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -178,5 +184,64 @@ public void testIDChangesOnAdvance() throws IOException { assertEquals("clone changed but shouldn't", StringHelper.idToString(id), StringHelper.idToString(clone.getId())); } } + + public void testBitFlippedTriggersCorruptIndexException() throws IOException { + BaseDirectoryWrapper dir = newDirectory(); + dir.setCheckIndexOnClose(false); + byte id[] = StringHelper.randomId(); + Codec codec = Codec.getDefault(); + + SegmentInfos sis = new SegmentInfos(Version.LATEST.major); + SegmentInfo info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_0", 1, false, Codec.getDefault(), + Collections.emptyMap(), id, Collections.emptyMap(), null); + info.setFiles(Collections.emptySet()); + codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); + SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, 0, -1, -1, -1, StringHelper.randomId()); + sis.add(commitInfo); + + info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_1", 1, false, Codec.getDefault(), + Collections.emptyMap(), id, Collections.emptyMap(), null); + info.setFiles(Collections.emptySet()); + codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); + commitInfo = new SegmentCommitInfo(info, 0, 0,-1, -1, -1, StringHelper.randomId()); + sis.add(commitInfo); + + sis.commit(dir); + + BaseDirectoryWrapper corruptDir = newDirectory(); + corruptDir.setCheckIndexOnClose(false); + boolean corrupt = false; + for (String file : dir.listAll()) { + if (file.startsWith(IndexFileNames.SEGMENTS)) { + try (IndexInput in = dir.openInput(file, IOContext.DEFAULT); + IndexOutput out = corruptDir.createOutput(file, IOContext.DEFAULT)) { + final long corruptIndex = TestUtil.nextLong(random(), 0, in.length() - 1); + out.copyBytes(in, corruptIndex); + final int b = Byte.toUnsignedInt(in.readByte()) + TestUtil.nextInt(random(), 0x01, 0xff); + out.writeByte((byte) b); + out.copyBytes(in, in.length() - in.getFilePointer()); + } + try (IndexInput in = corruptDir.openInput(file, IOContext.DEFAULT)) { + CodecUtil.checksumEntireFile(in); + if (VERBOSE) { + System.out.println("TEST: Altering the file did not update the checksum, aborting..."); + } + return; + } catch (CorruptIndexException e) { + // ok + } + corrupt = true; + } else if (ExtrasFS.isExtra(file) == false) { + corruptDir.copyFrom(dir, file, file, IOContext.DEFAULT); + } + } + assertTrue("No segments file found", corrupt); + + expectThrowsAnyOf( + Arrays.asList(CorruptIndexException.class, IndexFormatTooOldException.class, IndexFormatTooNewException.class), + () -> SegmentInfos.readLatestCommit(corruptDir)); + dir.close(); + corruptDir.close(); + } } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTermsHashPerField.java b/lucene/core/src/test/org/apache/lucene/index/TestTermsHashPerField.java new file mode 100644 index 000000000000..a8a879bf5c10 --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/index/TestTermsHashPerField.java @@ -0,0 +1,209 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.index; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; +import org.apache.lucene.util.ByteBlockPool; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.Counter; +import org.apache.lucene.util.IntBlockPool; +import org.apache.lucene.util.LuceneTestCase; + +public class TestTermsHashPerField extends LuceneTestCase { + + private static TermsHashPerField createNewHash(AtomicInteger newCalled, AtomicInteger addCalled) { + IntBlockPool intBlockPool = new IntBlockPool(); + ByteBlockPool byteBlockPool = new ByteBlockPool(new ByteBlockPool.DirectAllocator()); + ByteBlockPool termBlockPool = new ByteBlockPool(new ByteBlockPool.DirectAllocator()); + + TermsHashPerField hash = new TermsHashPerField(1, intBlockPool, byteBlockPool, termBlockPool, Counter.newCounter(), + null, "testfield", IndexOptions.DOCS_AND_FREQS) { + + private FreqProxTermsWriterPerField.FreqProxPostingsArray freqProxPostingsArray; + + @Override + void newTerm(int termID, int docID) { + newCalled.incrementAndGet(); + FreqProxTermsWriterPerField.FreqProxPostingsArray postings = freqProxPostingsArray; + postings.lastDocIDs[termID] = docID; + postings.lastDocCodes[termID] = docID << 1; + postings.termFreqs[termID] = 1; + } + + @Override + void addTerm(int termID, int docID) { + addCalled.incrementAndGet(); + FreqProxTermsWriterPerField.FreqProxPostingsArray postings = freqProxPostingsArray; + if (docID != postings.lastDocIDs[termID]) { + if (1 == postings.termFreqs[termID]) { + writeVInt(0, postings.lastDocCodes[termID]|1); + } else { + writeVInt(0, postings.lastDocCodes[termID]); + writeVInt(0, postings.termFreqs[termID]); + } + postings.termFreqs[termID] = 1; + postings.lastDocCodes[termID] = (docID - postings.lastDocIDs[termID]) << 1; + postings.lastDocIDs[termID] = docID; + } else { + postings.termFreqs[termID] = Math.addExact(postings.termFreqs[termID], 1); + } + } + + @Override + void newPostingsArray() { + freqProxPostingsArray = (FreqProxTermsWriterPerField.FreqProxPostingsArray) postingsArray; + } + + @Override + ParallelPostingsArray createPostingsArray(int size) { + return new FreqProxTermsWriterPerField.FreqProxPostingsArray(size, true, false, false); + } + }; + return hash; + } + + boolean assertDocAndFreq(ByteSliceReader reader, FreqProxTermsWriterPerField.FreqProxPostingsArray postingsArray, int prevDoc, int termId, int doc, int frequency) throws IOException { + int docId = prevDoc; + int freq; + boolean eof = reader.eof(); + if (eof) { + docId = postingsArray.lastDocIDs[termId]; + freq = postingsArray.termFreqs[termId]; + } else { + int code = reader.readVInt(); + docId += code >>> 1; + if ((code & 1) != 0) { + freq = 1; + } else { + freq = reader.readVInt(); + } + } + assertEquals("docID mismatch eof: " + eof, doc, docId); + assertEquals("freq mismatch eof: " + eof, frequency, freq); + return eof; + } + + public void testAddAndUpdateTerm() throws IOException { + AtomicInteger newCalled = new AtomicInteger(0); + AtomicInteger addCalled = new AtomicInteger(0); + TermsHashPerField hash = createNewHash(newCalled, addCalled); + hash.start(null, true); + + hash.add(new BytesRef("start"), 0); // tid = 0; + hash.add(new BytesRef("foo"), 0); // tid = 1; + hash.add(new BytesRef("bar"), 0); // tid = 2; + hash.finish(); + hash.add(new BytesRef("bar"), 1); + hash.add(new BytesRef("foobar"), 1); // tid = 3; + hash.add(new BytesRef("bar"), 1); + hash.add(new BytesRef("bar"), 1); + hash.add(new BytesRef("foobar"), 1); + hash.add(new BytesRef("verylongfoobarbaz"), 1); // tid = 4; + hash.finish(); + hash.add(new BytesRef("verylongfoobarbaz"), 2); + hash.add(new BytesRef("boom"), 2); // tid = 5; + hash.finish(); + hash.add(new BytesRef("verylongfoobarbaz"), 3); + hash.add(new BytesRef("end"), 3); // tid = 6; + hash.finish(); + + assertEquals(7, newCalled.get()); + assertEquals(6, addCalled.get()); + final ByteSliceReader reader = new ByteSliceReader(); + hash.initReader(reader, 0, 0); + assertTrue(assertDocAndFreq(reader, (FreqProxTermsWriterPerField.FreqProxPostingsArray) hash.postingsArray, 0, 0, 0, 1)); + hash.initReader(reader, 1, 0); + assertTrue(assertDocAndFreq(reader, (FreqProxTermsWriterPerField.FreqProxPostingsArray) hash.postingsArray, 0, 1, 0, 1)); + hash.initReader(reader, 2, 0); + assertFalse(assertDocAndFreq(reader, (FreqProxTermsWriterPerField.FreqProxPostingsArray) hash.postingsArray, 0, 2, 0, 1)); + assertTrue(assertDocAndFreq(reader, (FreqProxTermsWriterPerField.FreqProxPostingsArray) hash.postingsArray, 2, 2, 1, 3)); + hash.initReader(reader, 3, 0); + assertTrue(assertDocAndFreq(reader, (FreqProxTermsWriterPerField.FreqProxPostingsArray) hash.postingsArray, 0, 3, 1, 2)); + hash.initReader(reader, 4, 0); + assertFalse(assertDocAndFreq(reader, (FreqProxTermsWriterPerField.FreqProxPostingsArray) hash.postingsArray, 0, 4, 1, 1)); + assertFalse(assertDocAndFreq(reader, (FreqProxTermsWriterPerField.FreqProxPostingsArray) hash.postingsArray, 1, 4, 2, 1)); + assertTrue(assertDocAndFreq(reader, (FreqProxTermsWriterPerField.FreqProxPostingsArray) hash.postingsArray, 2, 4, 3, 1)); + hash.initReader(reader, 5, 0); + assertTrue(assertDocAndFreq(reader, (FreqProxTermsWriterPerField.FreqProxPostingsArray) hash.postingsArray, 0, 5, 2, 1)); + hash.initReader(reader, 6, 0); + assertTrue(assertDocAndFreq(reader, (FreqProxTermsWriterPerField.FreqProxPostingsArray) hash.postingsArray, 0, 6, 3, 1)); + } + + public void testAddAndUpdateRandom() throws IOException { + AtomicInteger newCalled = new AtomicInteger(0); + AtomicInteger addCalled = new AtomicInteger(0); + TermsHashPerField hash = createNewHash(newCalled, addCalled); + hash.start(null, true); + class Posting { + int termId = -1; + final TreeMap docAndFreq = new TreeMap<>(); + } + Map postingMap = new HashMap<>(); + int numStrings = 1 + random().nextInt(200); + for (int i = 0; i < numStrings; i++) { + String randomString = RandomStrings.randomRealisticUnicodeOfCodepointLengthBetween(random(), 1, 10); + postingMap.putIfAbsent(new BytesRef(randomString), new Posting()); + } + List bytesRefs = Arrays.asList(postingMap.keySet().toArray(new BytesRef[0])); + Collections.sort(bytesRefs); + int numDocs = 1 + random().nextInt(200); + int termOrd = 0; + for (int i = 0; i < numDocs; i++) { + int numTerms = 1 + random().nextInt(200); + int doc = i; + for (int j = 0; i < numTerms; i++) { + BytesRef ref = RandomPicks.randomFrom(random(), bytesRefs); + Posting posting = postingMap.get(ref); + if (posting.termId == -1) { + posting.termId = termOrd++; + } + posting.docAndFreq.putIfAbsent(doc, 0); + posting.docAndFreq.compute(doc, (key, oldVal) -> oldVal+1); + hash.add(ref, doc); + } + hash.finish(); + } + List values = postingMap.values().stream().filter( x -> x.termId != -1) + .collect(Collectors.toList()); + Collections.shuffle(values, random()); // term order doesn't matter + final ByteSliceReader reader = new ByteSliceReader(); + for (Posting p : values) { + hash.initReader(reader, p.termId, 0); + boolean eof = false; + int prefDoc = 0; + for (Map.Entry entry : p.docAndFreq.entrySet()) { + assertFalse("the reader must not be EOF here", eof); + eof = assertDocAndFreq(reader, (FreqProxTermsWriterPerField.FreqProxPostingsArray) hash.postingsArray, + prefDoc, p.termId, entry.getKey(), entry.getValue()); + prefDoc = entry.getKey(); + } + assertTrue("the last posting must be EOF on the reader", eof); + } + } +} diff --git a/lucene/core/src/test/org/apache/lucene/search/MultiCollectorTest.java b/lucene/core/src/test/org/apache/lucene/search/MultiCollectorTest.java index c3b4f42650ac..80a5a9a99583 100644 --- a/lucene/core/src/test/org/apache/lucene/search/MultiCollectorTest.java +++ b/lucene/core/src/test/org/apache/lucene/search/MultiCollectorTest.java @@ -163,4 +163,176 @@ public void testCacheScoresIfNecessary() throws IOException { reader.close(); dir.close(); } + + public void testScorerWrappingForTopScores() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir); + iw.addDocument(new Document()); + DirectoryReader reader = iw.getReader(); + iw.close(); + final LeafReaderContext ctx = reader.leaves().get(0); + Collector c1 = collector(ScoreMode.TOP_SCORES, MultiCollector.MinCompetitiveScoreAwareScorable.class); + Collector c2 = collector(ScoreMode.TOP_SCORES, MultiCollector.MinCompetitiveScoreAwareScorable.class); + MultiCollector.wrap(c1, c2).getLeafCollector(ctx).setScorer(new ScoreAndDoc()); + + c1 = collector(ScoreMode.TOP_SCORES, ScoreCachingWrappingScorer.class); + c2 = collector(ScoreMode.COMPLETE, ScoreCachingWrappingScorer.class); + MultiCollector.wrap(c1, c2).getLeafCollector(ctx).setScorer(new ScoreAndDoc()); + + reader.close(); + dir.close(); + } + + public void testMinCompetitiveScore() throws IOException { + float[] currentMinScores = new float[3]; + float[] minCompetitiveScore = new float[1]; + Scorable scorer = new Scorable() { + + @Override + public float score() throws IOException { + return 0; + } + + @Override + public int docID() { + return 0; + } + + @Override + public void setMinCompetitiveScore(float minScore) throws IOException { + minCompetitiveScore[0] = minScore; + } + }; + Scorable s0 = new MultiCollector.MinCompetitiveScoreAwareScorable(scorer, 0, currentMinScores); + Scorable s1 = new MultiCollector.MinCompetitiveScoreAwareScorable(scorer, 1, currentMinScores); + Scorable s2 = new MultiCollector.MinCompetitiveScoreAwareScorable(scorer, 2, currentMinScores); + assertEquals(0f, minCompetitiveScore[0], 0); + s0.setMinCompetitiveScore(0.5f); + assertEquals(0f, minCompetitiveScore[0], 0); + s1.setMinCompetitiveScore(0.8f); + assertEquals(0f, minCompetitiveScore[0], 0); + s2.setMinCompetitiveScore(0.3f); + assertEquals(0.3f, minCompetitiveScore[0], 0); + s2.setMinCompetitiveScore(0.1f); + assertEquals(0.3f, minCompetitiveScore[0], 0); + s1.setMinCompetitiveScore(Float.MAX_VALUE); + assertEquals(0.3f, minCompetitiveScore[0], 0); + s2.setMinCompetitiveScore(Float.MAX_VALUE); + assertEquals(0.5f, minCompetitiveScore[0], 0); + s0.setMinCompetitiveScore(Float.MAX_VALUE); + assertEquals(Float.MAX_VALUE, minCompetitiveScore[0], 0); + } + + public void testCollectionTermination() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir); + iw.addDocument(new Document()); + DirectoryReader reader = iw.getReader(); + iw.close(); + final LeafReaderContext ctx = reader.leaves().get(0); + DummyCollector c1 = new TerminatingDummyCollector(1, ScoreMode.COMPLETE); + DummyCollector c2 = new TerminatingDummyCollector(2, ScoreMode.COMPLETE); + + Collector mc = MultiCollector.wrap(c1, c2); + LeafCollector lc = mc.getLeafCollector(ctx); + lc.setScorer(new ScoreAndDoc()); + lc.collect(0); // OK + assertTrue("c1's collect should be called", c1.collectCalled); + assertTrue("c2's collect should be called", c2.collectCalled); + c1.collectCalled = false; + c2.collectCalled = false; + lc.collect(1); // OK, but c1 should terminate + assertFalse("c1 should be removed already", c1.collectCalled); + assertTrue("c2's collect should be called", c2.collectCalled); + c2.collectCalled = false; + + expectThrows(CollectionTerminatedException.class, () -> { + lc.collect(2); + }); + assertFalse("c1 should be removed already", c1.collectCalled); + assertFalse("c2 should be removed already", c2.collectCalled); + + reader.close(); + dir.close(); + } + + public void testSetScorerOnCollectionTerminationSkipNonCompetitive() throws IOException { + doTestSetScorerOnCollectionTermination(true); + } + + public void testSetScorerOnCollectionTerminationSkipNoSkips() throws IOException { + doTestSetScorerOnCollectionTermination(false); + } + + private void doTestSetScorerOnCollectionTermination(boolean allowSkipNonCompetitive) throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir); + iw.addDocument(new Document()); + DirectoryReader reader = iw.getReader(); + iw.close(); + final LeafReaderContext ctx = reader.leaves().get(0); + + DummyCollector c1 = new TerminatingDummyCollector(1, allowSkipNonCompetitive? ScoreMode.TOP_SCORES : ScoreMode.COMPLETE); + DummyCollector c2 = new TerminatingDummyCollector(2, allowSkipNonCompetitive? ScoreMode.TOP_SCORES : ScoreMode.COMPLETE); + + Collector mc = MultiCollector.wrap(c1, c2); + LeafCollector lc = mc.getLeafCollector(ctx); + assertFalse(c1.setScorerCalled); + assertFalse(c2.setScorerCalled); + lc.setScorer(new ScoreAndDoc()); + assertTrue(c1.setScorerCalled); + assertTrue(c2.setScorerCalled); + c1.setScorerCalled = false; + c2.setScorerCalled = false; + lc.collect(0); // OK + + lc.setScorer(new ScoreAndDoc()); + assertTrue(c1.setScorerCalled); + assertTrue(c2.setScorerCalled); + c1.setScorerCalled = false; + c2.setScorerCalled = false; + + lc.collect(1); // OK, but c1 should terminate + lc.setScorer(new ScoreAndDoc()); + assertFalse(c1.setScorerCalled); + assertTrue(c2.setScorerCalled); + c2.setScorerCalled = false; + + expectThrows(CollectionTerminatedException.class, () -> { + lc.collect(2); + }); + lc.setScorer(new ScoreAndDoc()); + assertFalse(c1.setScorerCalled); + assertFalse(c2.setScorerCalled); + + reader.close(); + dir.close(); + } + + private static class TerminatingDummyCollector extends DummyCollector { + + private final int terminateOnDoc; + private final ScoreMode scoreMode; + + public TerminatingDummyCollector(int terminateOnDoc, ScoreMode scoreMode) { + super(); + this.terminateOnDoc = terminateOnDoc; + this.scoreMode = scoreMode; + } + + @Override + public void collect(int doc) throws IOException { + if (doc == terminateOnDoc) { + throw new CollectionTerminatedException(); + } + super.collect(doc); + } + + @Override + public ScoreMode scoreMode() { + return scoreMode; + } + + } + } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java index 2ccfd9aa8cb7..3400f0e6dd67 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java @@ -96,7 +96,7 @@ public static void beforeClass() throws Exception { IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); // randomized codecs are sometimes too costly for this test: - iwc.setCodec(Codec.forName("Lucene84")); + iwc.setCodec(Codec.forName("Lucene86")); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer= new RandomIndexWriter(random(), directory, iwc); // we'll make a ton of docs, disable store/norms/vectors @@ -141,7 +141,7 @@ public static void beforeClass() throws Exception { iwc = newIndexWriterConfig(new MockAnalyzer(random())); // we need docID order to be preserved: // randomized codecs are sometimes too costly for this test: - iwc.setCodec(Codec.forName("Lucene84")); + iwc.setCodec(Codec.forName("Lucene86")); iwc.setMergePolicy(newLogMergePolicy()); try (IndexWriter w = new IndexWriter(singleSegmentDirectory, iwc)) { w.forceMerge(1, true); @@ -167,7 +167,7 @@ public static void beforeClass() throws Exception { iwc = newIndexWriterConfig(new MockAnalyzer(random())); // randomized codecs are sometimes too costly for this test: - iwc.setCodec(Codec.forName("Lucene84")); + iwc.setCodec(Codec.forName("Lucene86")); RandomIndexWriter w = new RandomIndexWriter(random(), dir2, iwc); w.addIndexes(copy); copy.close(); @@ -179,7 +179,7 @@ public static void beforeClass() throws Exception { iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000)); // randomized codecs are sometimes too costly for this test: - iwc.setCodec(Codec.forName("Lucene84")); + iwc.setCodec(Codec.forName("Lucene86")); RandomIndexWriter w = new RandomIndexWriter(random(), dir2, iwc); doc = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java index a95ad39cf59a..4e9bf9dcfdb1 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java @@ -24,6 +24,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Supplier; import com.carrotsearch.randomizedtesting.RandomizedTest; import org.apache.lucene.analysis.MockAnalyzer; @@ -36,15 +38,15 @@ import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; +import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.apache.lucene.util.automaton.LevenshteinAutomata; import org.apache.lucene.util.automaton.Operations; @@ -222,7 +224,46 @@ public void testFuzziness() throws Exception { reader.close(); directory.close(); } - + + public void testPrefixLengthEqualStringLength() throws Exception { + Directory directory = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), directory); + addDoc("b*a", writer); + addDoc("b*ab", writer); + addDoc("b*abc", writer); + addDoc("b*abcd", writer); + String multibyte = "아프리카코끼리속"; + addDoc(multibyte, writer); + IndexReader reader = writer.getReader(); + IndexSearcher searcher = newSearcher(reader); + writer.close(); + + int maxEdits = 0; + int prefixLength = 3; + FuzzyQuery query = new FuzzyQuery(new Term("field", "b*a"), maxEdits, prefixLength); + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; + assertEquals(1, hits.length); + + maxEdits = 1; + query = new FuzzyQuery(new Term("field", "b*a"), maxEdits, prefixLength); + hits = searcher.search(query, 1000).scoreDocs; + assertEquals(2, hits.length); + + maxEdits = 2; + query = new FuzzyQuery(new Term("field", "b*a"), maxEdits, prefixLength); + hits = searcher.search(query, 1000).scoreDocs; + assertEquals(3, hits.length); + + maxEdits = 1; + prefixLength = multibyte.length() - 1; + query = new FuzzyQuery(new Term("field", multibyte.substring(0, prefixLength)), maxEdits, prefixLength); + hits = searcher.search(query, 1000).scoreDocs; + assertEquals(1, hits.length); + + reader.close(); + directory.close(); + } + public void test2() throws Exception { Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)); @@ -409,7 +450,6 @@ public void testBoostOnlyRewrite() throws Exception { public void testGiga() throws Exception { - MockAnalyzer analyzer = new MockAnalyzer(random()); Directory index = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), index); @@ -441,6 +481,7 @@ public void testGiga() throws Exception { assertEquals(1, hits.length); assertEquals("Giga byte", searcher.doc(hits[0].doc).get("field")); r.close(); + w.close(); index.close(); } @@ -515,54 +556,13 @@ public void testErrorMessage() { final String value = randomRealisticMultiByteUnicode(length); FuzzyTermsEnum.FuzzyTermsException expected = expectThrows(FuzzyTermsEnum.FuzzyTermsException.class, () -> { - new FuzzyQuery(new Term("field", value)).getTermsEnum(new Terms() { - @Override - public TermsEnum iterator() { - return TermsEnum.EMPTY; - } - - @Override - public long size() { - return 0; - } - - @Override - public long getSumTotalTermFreq() { - throw new UnsupportedOperationException(); - } - - @Override - public long getSumDocFreq() { - throw new UnsupportedOperationException(); - } - - @Override - public int getDocCount() { - throw new UnsupportedOperationException(); - } - - @Override - public boolean hasFreqs() { - throw new UnsupportedOperationException(); - } - - @Override - public boolean hasOffsets() { - throw new UnsupportedOperationException(); - } - - @Override - public boolean hasPositions() { - throw new UnsupportedOperationException(); - } - - @Override - public boolean hasPayloads() { - throw new UnsupportedOperationException(); - } - }); + new FuzzyAutomatonBuilder(value, 2, 0, true).buildMaxEditAutomaton(); }); assertThat(expected.getMessage(), containsString(value)); + + expected = expectThrows(FuzzyTermsEnum.FuzzyTermsException.class, + () -> new FuzzyAutomatonBuilder(value, 2, 0, true).buildAutomatonSet()); + assertThat(expected.getMessage(), containsString(value)); } private void addDoc(String text, RandomIndexWriter writer) throws IOException { @@ -600,6 +600,7 @@ public void testRandom() throws Exception { w.addDocument(doc); } DirectoryReader r = w.getReader(); + w.close(); //System.out.println("TEST: reader=" + r); IndexSearcher s = newSearcher(r); int iters = atLeast(200); @@ -677,7 +678,7 @@ public void testRandom() throws Exception { } } - IOUtils.close(r, w, dir); + IOUtils.close(r, dir); } private static class TermAndScore implements Comparable { @@ -777,4 +778,31 @@ private static IntsRef toIntsRef(String s) { } return ref; } + + public void testVisitor() { + FuzzyQuery q = new FuzzyQuery(new Term("field", "blob"), 2); + AtomicBoolean visited = new AtomicBoolean(false); + q.visit(new QueryVisitor() { + @Override + public void consumeTermsMatching(Query query, String field, Supplier automaton) { + visited.set(true); + ByteRunAutomaton a = automaton.get(); + assertMatches(a, "blob"); + assertMatches(a, "bolb"); + assertMatches(a, "blobby"); + assertNoMatches(a, "bolbby"); + } + }); + assertTrue(visited.get()); + } + + private static void assertMatches(ByteRunAutomaton automaton, String text) { + BytesRef b = new BytesRef(text); + assertTrue(automaton.run(b.bytes, b.offset, b.length)); + } + + private static void assertNoMatches(ByteRunAutomaton automaton, String text) { + BytesRef b = new BytesRef(text); + assertFalse(automaton.run(b.bytes, b.offset, b.length)); + } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java index 7993beb7427e..ef023752a66b 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java @@ -181,14 +181,17 @@ public ScoreMode scoreMode() { thread.join(); } - if (error.get() != null) { - throw error.get(); + try { + if (error.get() != null) { + throw error.get(); + } + queryCache.assertConsistent(); + } finally { + mgr.close(); + w.close(); + dir.close(); + queryCache.assertConsistent(); } - queryCache.assertConsistent(); - mgr.close(); - w.close(); - dir.close(); - queryCache.assertConsistent(); } public void testLRUEviction() throws Exception { diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java index 69e1e1039b22..f4a2543925f9 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java @@ -28,13 +28,12 @@ import org.apache.lucene.index.Term; import org.apache.lucene.store.Directory; import org.junit.AfterClass; +import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import java.io.IOException; -import junit.framework.Assert; - public class TestMultiTermConstantScore extends BaseTestRangeFilter { /** threshold for comparing floats */ diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java index 3c6e29403e52..7b0c41896e4e 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java @@ -38,8 +38,8 @@ import org.apache.lucene.codecs.PointsFormat; import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.PointsWriter; -import org.apache.lucene.codecs.lucene60.Lucene60PointsReader; -import org.apache.lucene.codecs.lucene60.Lucene60PointsWriter; +import org.apache.lucene.codecs.lucene86.Lucene86PointsReader; +import org.apache.lucene.codecs.lucene86.Lucene86PointsWriter; import org.apache.lucene.document.BinaryPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoublePoint; @@ -1173,12 +1173,12 @@ public PointsFormat pointsFormat() { return new PointsFormat() { @Override public PointsWriter fieldsWriter(SegmentWriteState writeState) throws IOException { - return new Lucene60PointsWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap); + return new Lucene86PointsWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap); } @Override public PointsReader fieldsReader(SegmentReadState readState) throws IOException { - return new Lucene60PointsReader(readState); + return new Lucene86PointsReader(readState); } }; } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestRegexpQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestRegexpQuery.java index 6a8e183e0d45..767ee20a7d45 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestRegexpQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestRegexpQuery.java @@ -50,7 +50,7 @@ public void setUp() throws Exception { directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory); Document doc = new Document(); - doc.add(newTextField(FN, "the quick brown fox jumps over the lazy ??? dog 493432 49344", Field.Store.NO)); + doc.add(newTextField(FN, "the quick brown fox jumps over the lazy ??? dog 493432 49344 [foo] 12.3 \\", Field.Store.NO)); writer.addDocument(doc); reader = writer.getReader(); writer.close(); @@ -90,6 +90,41 @@ public void testNumericRange() throws IOException { assertEquals(0, regexQueryNrHits("<493433-600000>")); } + public void testCharacterClasses() throws IOException { + assertEquals(0, regexQueryNrHits("\\d")); + assertEquals(1, regexQueryNrHits("\\d*")); + assertEquals(1, regexQueryNrHits("\\d{6}")); + assertEquals(1, regexQueryNrHits("[a\\d]{6}")); + assertEquals(1, regexQueryNrHits("\\d{2,7}")); + assertEquals(0, regexQueryNrHits("\\d{4}")); + assertEquals(0, regexQueryNrHits("\\dog")); + assertEquals(1, regexQueryNrHits("493\\d32")); + + assertEquals(1, regexQueryNrHits("\\wox")); + assertEquals(1, regexQueryNrHits("493\\w32")); + assertEquals(1, regexQueryNrHits("\\?\\?\\?")); + assertEquals(1, regexQueryNrHits("\\?\\W\\?")); + assertEquals(1, regexQueryNrHits("\\?\\S\\?")); + + assertEquals(1, regexQueryNrHits("\\[foo\\]")); + assertEquals(1, regexQueryNrHits("\\[\\w{3}\\]")); + + assertEquals(0, regexQueryNrHits("\\s.*")); // no matches because all whitespace stripped + assertEquals(1, regexQueryNrHits("\\S*ck")); //matches quick + assertEquals(1, regexQueryNrHits("[\\d\\.]{3,10}")); // matches 12.3 + assertEquals(1, regexQueryNrHits("\\d{1,3}(\\.(\\d{1,2}))+")); // matches 12.3 + + assertEquals(1, regexQueryNrHits("\\\\")); + assertEquals(1, regexQueryNrHits("\\\\.*")); + + IllegalArgumentException expected = expectThrows( + IllegalArgumentException.class, () -> { + regexQueryNrHits("\\p"); + } + ); + assertTrue(expected.getMessage().contains("invalid character class")); + } + public void testRegexComplement() throws IOException { assertEquals(1, regexQueryNrHits("4934~[3]")); // not the empty lang, i.e. match all docs diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java b/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java index a615a6a7fbb6..4b284dfd31b0 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java @@ -119,6 +119,7 @@ public void run() { thread.join(); } } + docs.close(); r.close(); dir.close(); } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java b/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java index 9dc9843d5106..59a8de9027fb 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java @@ -295,7 +295,7 @@ void assertPage(int pageStart, TopDocs all, TopDocs paged) throws IOException { assertEquals(sd1.score, sd2.score, 0f); if (sd1 instanceof FieldDoc) { assertTrue(sd2 instanceof FieldDoc); - assertEquals(((FieldDoc) sd1).fields, ((FieldDoc) sd2).fields); + assertArrayEquals(((FieldDoc) sd1).fields, ((FieldDoc) sd2).fields); } } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java b/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java index b92386669163..1d8edccf092d 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java @@ -310,6 +310,7 @@ public void testReferenceDecrementIllegally() throws Exception { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( new MockAnalyzer(random())).setMergeScheduler(new ConcurrentMergeScheduler())); + @SuppressWarnings("resource") SearcherManager sm = new SearcherManager(writer, false, false, new SearcherFactory()); writer.addDocument(new Document()); writer.commit(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java index e460e26ddba3..65986d968485 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java @@ -21,6 +21,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.StringField; +import org.apache.lucene.index.CompositeReaderContext; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterLeafReader; @@ -47,9 +48,13 @@ public void testEquals() throws IOException { QueryUtils.checkUnequal( new TermQuery(new Term("foo", "bar")), new TermQuery(new Term("foo", "baz"))); + final CompositeReaderContext context; + try (MultiReader multiReader = new MultiReader()) { + context = multiReader.getContext(); + } QueryUtils.checkEqual( new TermQuery(new Term("foo", "bar")), - new TermQuery(new Term("foo", "bar"), TermStates.build(new MultiReader().getContext(), new Term("foo", "bar"), true))); + new TermQuery(new Term("foo", "bar"), TermStates.build(context, new Term("foo", "bar"), true))); } public void testCreateWeightDoesNotSeekIfScoresAreNotNeeded() throws IOException { diff --git a/lucene/core/src/test/org/apache/lucene/util/TestCharsRef.java b/lucene/core/src/test/org/apache/lucene/util/TestCharsRef.java index 079b3b774b53..fd9d639ea888 100644 --- a/lucene/core/src/test/org/apache/lucene/util/TestCharsRef.java +++ b/lucene/core/src/test/org/apache/lucene/util/TestCharsRef.java @@ -20,6 +20,8 @@ public class TestCharsRef extends LuceneTestCase { + + @SuppressWarnings("deprecation") public void testUTF16InUTF8Order() { final int numStrings = atLeast(1000); BytesRef utf8[] = new BytesRef[numStrings]; diff --git a/lucene/core/src/test/org/apache/lucene/util/TestOfflineSorter.java b/lucene/core/src/test/org/apache/lucene/util/TestOfflineSorter.java index 927af148afc7..f902cd58cc05 100644 --- a/lucene/core/src/test/org/apache/lucene/util/TestOfflineSorter.java +++ b/lucene/core/src/test/org/apache/lucene/util/TestOfflineSorter.java @@ -17,7 +17,6 @@ package org.apache.lucene.util; -import java.io.EOFException; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Path; @@ -351,14 +350,12 @@ protected void corruptFile() throws IOException { IndexOutput unsorted = dir.createTempOutput("unsorted", "tmp", IOContext.DEFAULT); writeAll(unsorted, generateFixed(5*1024)); - // This corruption made OfflineSorter fail with its own exception, but we verify it also went and added (as suppressed) that the - // checksum was wrong: - EOFException e = expectThrows(EOFException.class, () -> { + // This corruption made OfflineSorter fail with its own exception, but we verify and throw a CorruptIndexException + // instead when checksums don't match. + CorruptIndexException e = expectThrows(CorruptIndexException.class, () -> { new OfflineSorter(dir, "foo").sort(unsorted.getName()); }); - assertEquals(1, e.getSuppressed().length); - assertTrue(e.getSuppressed()[0] instanceof CorruptIndexException); - assertTrue(e.getSuppressed()[0].getMessage().contains("checksum failed (hardware problem?)")); + assertTrue(e.getMessage().contains("checksum failed (hardware problem?)")); } } @@ -436,12 +433,10 @@ protected void corruptFile() throws IOException { IndexOutput unsorted = dir.createTempOutput("unsorted", "tmp", IOContext.DEFAULT); writeAll(unsorted, generateFixed((int) (OfflineSorter.MB * 3))); - EOFException e = expectThrows(EOFException.class, () -> { + CorruptIndexException e = expectThrows(CorruptIndexException.class, () -> { new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(1), 10, -1, null, 0).sort(unsorted.getName()); }); - assertEquals(1, e.getSuppressed().length); - assertTrue(e.getSuppressed()[0] instanceof CorruptIndexException); - assertTrue(e.getSuppressed()[0].getMessage().contains("checksum failed (hardware problem?)")); + assertTrue(e.getMessage().contains("checksum failed (hardware problem?)")); } } diff --git a/lucene/core/src/test/org/apache/lucene/util/TestVersion.java b/lucene/core/src/test/org/apache/lucene/util/TestVersion.java index e19c61508128..02d566ea10ae 100644 --- a/lucene/core/src/test/org/apache/lucene/util/TestVersion.java +++ b/lucene/core/src/test/org/apache/lucene/util/TestVersion.java @@ -23,6 +23,7 @@ import java.util.Locale; import java.util.Random; +@SuppressWarnings("deprecation") public class TestVersion extends LuceneTestCase { public void testOnOrAfter() throws Exception { diff --git a/lucene/core/src/test/org/apache/lucene/util/automaton/TestRegExp.java b/lucene/core/src/test/org/apache/lucene/util/automaton/TestRegExp.java index 7d24939c3478..ce36eacfb10d 100644 --- a/lucene/core/src/test/org/apache/lucene/util/automaton/TestRegExp.java +++ b/lucene/core/src/test/org/apache/lucene/util/automaton/TestRegExp.java @@ -17,8 +17,12 @@ package org.apache.lucene.util.automaton; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + public class TestRegExp extends LuceneTestCase { /** @@ -83,4 +87,148 @@ public void testRepeatWithEmptyLanguage() throws Exception { a = new RegExp("#?").toAutomaton(1000); assertTrue(a.toString().length() > 0); } + + public void testCoreJavaParity() { + // Generate random doc values and random regular expressions + // and check for same matching behaviour as Java's Pattern class. + for (int i = 0; i < 1000; i++) { + checkRandomExpression(randomDocValue(1 + random().nextInt(30))); + } + } + + public void testIllegalBackslashChars() { + String illegalChars = "abcefghijklmnopqrtuvxyzABCEFGHIJKLMNOPQRTUVXYZ"; + for (int i = 0; i < illegalChars.length(); i++) { + String illegalExpression = "\\" + illegalChars.charAt(i); + IllegalArgumentException expected = expectThrows( + IllegalArgumentException.class, () -> { + new RegExp(illegalExpression); + } + ); + assertTrue(expected.getMessage().contains("invalid character class")); + } + } + + public void testLegalBackslashChars() { + String legalChars = "dDsSWw0123456789[]*&^$@!{}\\/"; + for (int i = 0; i < legalChars.length(); i++) { + String legalExpression = "\\" + legalChars.charAt(i); + new RegExp(legalExpression); + } + } + + static String randomDocValue(int minLength) { + String charPalette = "AAAaaaBbbCccc123456 \t"; + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < minLength; i++) { + sb.append(charPalette.charAt(randomInt(charPalette.length() - 1))); + } + return sb.toString(); + } + + private static int randomInt(int bound) { + return bound == 0 ? 0 : random().nextInt(bound); + } + + protected String checkRandomExpression(String docValue) { + // Generate and test a random regular expression which should match the given docValue + StringBuilder result = new StringBuilder(); + // Pick a part of the string to change + int substitutionPoint = randomInt(docValue.length() - 1); + int substitutionLength = 1 + randomInt(Math.min(10, docValue.length() - substitutionPoint)); + + // Add any head to the result, unchanged + if (substitutionPoint > 0) { + result.append(docValue.substring(0, substitutionPoint)); + } + + // Modify the middle... + String replacementPart = docValue.substring(substitutionPoint, substitutionPoint + substitutionLength); + int mutation = random().nextInt(13); + switch (mutation) { + case 0: + // OR with random alpha of same length + result.append("(" + replacementPart + "|d" + randomDocValue(replacementPart.length()) + ")"); + break; + case 1: + // OR with non-existant value + result.append("(" + replacementPart + "|doesnotexist)"); + break; + case 2: + // OR with another randomised regex (used to create nested levels of expression). + result.append("(" + checkRandomExpression(replacementPart) + "|doesnotexist)"); + break; + case 3: + // Star-replace all ab sequences. + result.append(replacementPart.replaceAll("ab", ".*")); + break; + case 4: + // .-replace all b chars + result.append(replacementPart.replaceAll("b", ".")); + break; + case 5: + // length-limited stars {1,2} + result.append(".{1," + replacementPart.length() + "}"); + break; + case 6: + // replace all chars with . + result.append(replacementPart.replaceAll(".", ".")); + break; + case 7: + // OR with uppercase chars eg [aA] (many of these sorts of expression in the wild.. + char[] chars = replacementPart.toCharArray(); + for (char c : chars) { + result.append("[" + c + Character.toUpperCase(c) + "]"); + } + break; + case 8: + // NOT a character - replace all b's with "not a" + result.append(replacementPart.replaceAll("b", "[^a]")); + break; + case 9: + // Make whole part repeatable 1 or more times + result.append("(" + replacementPart + ")+"); + break; + case 10: + // Make whole part repeatable 0 or more times + result.append("(" + replacementPart + ")?"); + break; + case 11: + // Make any digits replaced by character class + result.append(replacementPart.replaceAll("\\d", "\\\\d")); + break; + case 12: + // Make any whitespace chars replaced by not word class + result.append(replacementPart.replaceAll("\\s", "\\\\W")); + break; + case 13: + // Make any whitespace chars replace by whitespace class + result.append(replacementPart.replaceAll("\\s", "\\\\s")); + break; + default: + break; + } + // add any remaining tail, unchanged + if (substitutionPoint + substitutionLength <= docValue.length() - 1) { + result.append(docValue.substring(substitutionPoint + substitutionLength)); + } + + String regexPattern = result.toString(); + // Assert our randomly generated regex actually matches the provided raw input using java's expression matcher + Pattern pattern = Pattern.compile(regexPattern); + Matcher matcher = pattern.matcher(docValue); + assertTrue("Java regex " + regexPattern + " did not match doc value " + docValue, matcher.matches()); + + RegExp regex = new RegExp(regexPattern); + Automaton automaton = regex.toAutomaton(); + ByteRunAutomaton bytesMatcher = new ByteRunAutomaton(automaton); + BytesRef br = new BytesRef(docValue); + assertTrue( + "[" + regexPattern + "]should match [" + docValue + "]" + substitutionPoint + "-" + substitutionLength + "/" + + docValue.length(), + bytesMatcher.run(br.bytes, br.offset, br.length) + ); + return regexPattern; + } + } diff --git a/lucene/core/src/test/org/apache/lucene/util/bkd/Test2BBKDPoints.java b/lucene/core/src/test/org/apache/lucene/util/bkd/Test2BBKDPoints.java index deccdf0498a6..27b5511a9822 100644 --- a/lucene/core/src/test/org/apache/lucene/util/bkd/Test2BBKDPoints.java +++ b/lucene/core/src/test/org/apache/lucene/util/bkd/Test2BBKDPoints.java @@ -58,12 +58,14 @@ public void test1D() throws Exception { } } IndexOutput out = dir.createOutput("1d.bkd", IOContext.DEFAULT); - long indexFP = w.finish(out); + Runnable finalizer = w.finish(out, out, out); + long indexFP = out.getFilePointer(); + finalizer.run(); out.close(); IndexInput in = dir.openInput("1d.bkd", IOContext.DEFAULT); in.seek(indexFP); - BKDReader r = new BKDReader(in); + BKDReader r = new BKDReader(in, in, in); CheckIndex.VerifyPointsVisitor visitor = new CheckIndex.VerifyPointsVisitor("1d", numDocs, r); r.intersect(visitor); assertEquals(r.size(), visitor.getPointCountSeen()); @@ -98,12 +100,14 @@ public void test2D() throws Exception { } } IndexOutput out = dir.createOutput("2d.bkd", IOContext.DEFAULT); - long indexFP = w.finish(out); + Runnable finalizer = w.finish(out, out, out); + long indexFP = out.getFilePointer(); + finalizer.run(); out.close(); IndexInput in = dir.openInput("2d.bkd", IOContext.DEFAULT); in.seek(indexFP); - BKDReader r = new BKDReader(in); + BKDReader r = new BKDReader(in, in, in); CheckIndex.VerifyPointsVisitor visitor = new CheckIndex.VerifyPointsVisitor("2d", numDocs, r); r.intersect(visitor); assertEquals(r.size(), visitor.getPointCountSeen()); diff --git a/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java b/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java index 38d91f20f9b0..f9bb9ea56b59 100644 --- a/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java +++ b/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java @@ -46,8 +46,6 @@ import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.TestUtil; -import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; - public class TestBKD extends LuceneTestCase { public void testBasicInts1D() throws Exception { @@ -61,12 +59,14 @@ public void testBasicInts1D() throws Exception { long indexFP; try (IndexOutput out = dir.createOutput("bkd", IOContext.DEFAULT)) { - indexFP = w.finish(out); + Runnable finalizer = w.finish(out, out, out); + indexFP = out.getFilePointer(); + finalizer.run(); } try (IndexInput in = dir.openInput("bkd", IOContext.DEFAULT)) { in.seek(indexFP); - BKDReader r = new BKDReader(in, randomBoolean()); + BKDReader r = new BKDReader(in, in, in); // Simple 1D range query: final int queryMin = 42; @@ -163,12 +163,14 @@ public void testRandomIntsNDims() throws Exception { long indexFP; try (IndexOutput out = dir.createOutput("bkd", IOContext.DEFAULT)) { - indexFP = w.finish(out); + Runnable finalizer = w.finish(out, out, out); + indexFP = out.getFilePointer(); + finalizer.run(); } try (IndexInput in = dir.openInput("bkd", IOContext.DEFAULT)) { in.seek(indexFP); - BKDReader r = new BKDReader(in, randomBoolean()); + BKDReader r = new BKDReader(in, in, in); byte[] minPackedValue = r.getMinPackedValue(); byte[] maxPackedValue = r.getMaxPackedValue(); @@ -290,13 +292,15 @@ public void testBigIntNDims() throws Exception { } long indexFP; - try (IndexOutput out = dir.createOutput("bkd", IOContext.DEFAULT)) { - indexFP = w.finish(out); + try (IndexOutput out = dir.createOutput("bkd", IOContext.DEFAULT)) { + Runnable finalizer = w.finish(out, out, out); + indexFP = out.getFilePointer(); + finalizer.run(); } try (IndexInput in = dir.openInput("bkd", IOContext.DEFAULT)) { in.seek(indexFP); - BKDReader r = new BKDReader(in, randomBoolean()); + BKDReader r = new BKDReader(in, in, in); int iters = atLeast(100); for(int iter=0;iter 0) { - toMerge.add(w.finish(out)); + Runnable finalizer = w.finish(out, out, out); + toMerge.add(out.getFilePointer()); + finalizer.run(); final int curDocIDBase = lastDocIDBase; docMaps.add(new MergeState.DocMap() { @Override @@ -788,21 +796,25 @@ public int get(int docID) { List readers = new ArrayList<>(); for(long fp : toMerge) { in.seek(fp); - readers.add(new BKDReader(in, randomBoolean())); + readers.add(new BKDReader(in, in, in)); } out = dir.createOutput("bkd2", IOContext.DEFAULT); - indexFP = w.merge(out, docMaps, readers); + Runnable finalizer = w.merge(out, out, out, docMaps, readers); + indexFP = out.getFilePointer(); + finalizer.run(); out.close(); in.close(); in = dir.openInput("bkd2", IOContext.DEFAULT); } else { - indexFP = w.finish(out); + Runnable finalizer = w.finish(out, out, out); + indexFP = out.getFilePointer(); + finalizer.run(); out.close(); in = dir.openInput("bkd", IOContext.DEFAULT); } in.seek(indexFP); - BKDReader r = new BKDReader(in, randomBoolean()); + BKDReader r = new BKDReader(in, in, in); int iters = atLeast(100); for(int iter=0;iter maxPointsInLeafNode) { - actualMaxPointsInLeafNode = (actualMaxPointsInLeafNode + 1) / 2; - } + BKDReader points = new BKDReader(pointsIn, pointsIn, pointsIn); // If all points match, then the point count is numLeaves * maxPointsInLeafNode - final int numLeaves = Integer.highestOneBit((numValues - 1) / actualMaxPointsInLeafNode) << 1; - assertEquals(numLeaves * actualMaxPointsInLeafNode, + int numLeaves = numValues / maxPointsInLeafNode; + if (numValues % maxPointsInLeafNode != 0) { + numLeaves++; + } + assertEquals(numLeaves * maxPointsInLeafNode, points.estimatePointCount(new IntersectVisitor() { @Override public void visit(int docID, byte[] packedValue) throws IOException {} @@ -1363,8 +1383,8 @@ public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { } }); assertTrue(""+pointCount, - pointCount == (actualMaxPointsInLeafNode + 1) / 2 || // common case - pointCount == 2*((actualMaxPointsInLeafNode + 1) / 2)); // if the point is a split value + pointCount == (maxPointsInLeafNode + 1) / 2 || // common case + pointCount == 2*((maxPointsInLeafNode + 1) / 2)); // if the point is a split value pointsIn.close(); dir.close(); @@ -1452,7 +1472,7 @@ public byte getByteAt(int i, int k) { BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP, numValues); expectThrows(IllegalStateException.class, () -> { try (IndexOutput out = dir.createOutput("bkd", IOContext.DEFAULT)) { - w.writeField(out, "test_field_name", reader); + w.writeField(out, out, out, "test_field_name", reader); } finally { w.close(); dir.close(); @@ -1563,7 +1583,7 @@ public int getDocCount() { } }; try (IndexOutput out = dir.createOutput("bkd", IOContext.DEFAULT)) { - IllegalStateException ex = expectThrows(IllegalStateException.class, () -> { w.writeField(out, "", val);}); + IllegalStateException ex = expectThrows(IllegalStateException.class, () -> { w.writeField(out, out, out, "", val);}); assertEquals("totalPointCount=10 was passed when we were created, but we just hit 11 values", ex.getMessage()); w.close(); } diff --git a/lucene/core/src/test/org/apache/lucene/util/fst/Test2BFST.java b/lucene/core/src/test/org/apache/lucene/util/fst/Test2BFST.java index 9ee6947b2e3f..8725b032af1f 100644 --- a/lucene/core/src/test/org/apache/lucene/util/fst/Test2BFST.java +++ b/lucene/core/src/test/org/apache/lucene/util/fst/Test2BFST.java @@ -119,10 +119,10 @@ public void test() throws Exception { if (verify == 0) { System.out.println("\nTEST: save/load FST and re-verify"); IndexOutput out = dir.createOutput("fst", IOContext.DEFAULT); - fst.save(out); + fst.save(out, out); out.close(); IndexInput in = dir.openInput("fst", IOContext.DEFAULT); - fst = new FST<>(in, outputs); + fst = new FST<>(in, in, outputs); in.close(); } else { dir.deleteFile("fst"); @@ -198,10 +198,10 @@ public void test() throws Exception { if (verify == 0) { System.out.println("\nTEST: save/load FST and re-verify"); IndexOutput out = dir.createOutput("fst", IOContext.DEFAULT); - fst.save(out); + fst.save(out, out); out.close(); IndexInput in = dir.openInput("fst", IOContext.DEFAULT); - fst = new FST<>(in, outputs); + fst = new FST<>(in, in, outputs); in.close(); } else { dir.deleteFile("fst"); @@ -256,7 +256,9 @@ public void test() throws Exception { // forward lookup: assertEquals(output, Util.get(fst, input).longValue()); // reverse lookup: - assertEquals(input, Util.getByOutput(fst, output)); + @SuppressWarnings("deprecation") + IntsRef inputResult = Util.getByOutput(fst, output); + assertEquals(input, inputResult); output += 1 + r.nextInt(10); nextInput(r, ints); } @@ -284,10 +286,10 @@ public void test() throws Exception { if (verify == 0) { System.out.println("\nTEST: save/load FST and re-verify"); IndexOutput out = dir.createOutput("fst", IOContext.DEFAULT); - fst.save(out); + fst.save(out, out); out.close(); IndexInput in = dir.openInput("fst", IOContext.DEFAULT); - fst = new FST<>(in, outputs); + fst = new FST<>(in, in, outputs); in.close(); } else { dir.deleteFile("fst"); diff --git a/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTDirectAddressing.java b/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTDirectAddressing.java index 25ea6f6fe0e4..6cf2d76f3f14 100644 --- a/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTDirectAddressing.java +++ b/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTDirectAddressing.java @@ -174,7 +174,7 @@ public static void main(String... args) throws Exception { private static void countFSTArcs(String fstFilePath) throws IOException { byte[] buf = Files.readAllBytes(Paths.get(fstFilePath)); DataInput in = new ByteArrayDataInput(buf); - FST fst = new FST<>(in, ByteSequenceOutputs.getSingleton()); + FST fst = new FST<>(in, in, ByteSequenceOutputs.getSingleton()); BytesRefFSTEnum fstEnum = new BytesRefFSTEnum<>(fst); int binarySearchArcCount = 0, directAddressingArcCount = 0, listArcCount = 0; while(fstEnum.next() != null) { @@ -228,7 +228,7 @@ private static void recompileAndWalk(String fstFilePath) throws IOException { System.out.println("Reading FST"); long startTimeMs = System.currentTimeMillis(); - FST originalFst = new FST<>(in, CharSequenceOutputs.getSingleton()); + FST originalFst = new FST<>(in, in, CharSequenceOutputs.getSingleton()); long endTimeMs = System.currentTimeMillis(); System.out.println("time = " + (endTimeMs - startTimeMs) + " ms"); diff --git a/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java b/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java index e081f6c2f5e6..10319f9eb54e 100644 --- a/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java +++ b/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java @@ -529,7 +529,7 @@ public void run(int limit, boolean verify, boolean verifyByOutput) throws IOExce Directory dir = FSDirectory.open(dirOut); IndexOutput out = dir.createOutput("fst.bin", IOContext.DEFAULT); - fst.save(out); + fst.save(out, out); out.close(); System.out.println("Saved FST to fst.bin."); @@ -570,7 +570,7 @@ public void run(int limit, boolean verify, boolean verifyByOutput) throws IOExce } else { // Get by output final Long output = (Long) getOutput(intsRef.get(), ord); - @SuppressWarnings("unchecked") final IntsRef actual = Util.getByOutput((FST) fst, output.longValue()); + @SuppressWarnings({"unchecked", "deprecation"}) final IntsRef actual = Util.getByOutput((FST) fst, output.longValue()); if (actual == null) { throw new RuntimeException("unexpected null input from output=" + output); } @@ -833,13 +833,17 @@ public void testSimple() throws Exception { assertEquals(b, seekResult.input); assertEquals(42, (long) seekResult.output); - assertEquals(Util.toIntsRef(new BytesRef("c"), new IntsRefBuilder()), - Util.getByOutput(fst, 13824324872317238L)); - assertNull(Util.getByOutput(fst, 47)); - assertEquals(Util.toIntsRef(new BytesRef("b"), new IntsRefBuilder()), - Util.getByOutput(fst, 42)); - assertEquals(Util.toIntsRef(new BytesRef("a"), new IntsRefBuilder()), - Util.getByOutput(fst, 17)); + @SuppressWarnings("deprecation") IntsRef byOutput = Util.getByOutput(fst, 13824324872317238L); + assertEquals(Util.toIntsRef(new BytesRef("c"), new IntsRefBuilder()), byOutput); + + @SuppressWarnings("deprecation") IntsRef byOutput47 = Util.getByOutput(fst, 47); + assertNull(byOutput47); + + @SuppressWarnings("deprecation") IntsRef byOutput42 = Util.getByOutput(fst, 42); + assertEquals(Util.toIntsRef(new BytesRef("b"), new IntsRefBuilder()), byOutput42); + + @SuppressWarnings("deprecation") IntsRef byOutput17 = Util.getByOutput(fst, 17); + assertEquals(Util.toIntsRef(new BytesRef("a"), new IntsRefBuilder()), byOutput17); } public void testPrimaryKeys() throws Exception { @@ -1191,11 +1195,11 @@ public void testNonFinalStopNode() throws Exception { // Make sure it still works after save/load: Directory dir = newDirectory(); IndexOutput out = dir.createOutput("fst", IOContext.DEFAULT); - fst.save(out); + fst.save(out, out); out.close(); IndexInput in = dir.openInput("fst", IOContext.DEFAULT); - final FST fst2 = new FST<>(in, outputs); + final FST fst2 = new FST<>(in, in, outputs); checkStopNodes(fst2, outputs); in.close(); dir.close(); diff --git a/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java b/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java index 27ac460e9c4e..497052baed99 100644 --- a/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java +++ b/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java @@ -836,7 +836,9 @@ public void testEncodeDecode() { final long[] blocks = new long[blocksOffset + blocksLen]; for (int i = 0; i < blocks.length; ++i) { blocks[i] = random().nextLong(); - if (format == PackedInts.Format.PACKED_SINGLE_BLOCK && 64 % bpv != 0) { + @SuppressWarnings("deprecation") + PackedInts.Format PACKED_SINGLE_BLOCK = PackedInts.Format.PACKED_SINGLE_BLOCK; + if (format == PACKED_SINGLE_BLOCK && 64 % bpv != 0) { // clear highest bits for packed final int toClear = 64 % bpv; blocks[i] = (blocks[i] << toClear) >>> toClear; diff --git a/lucene/demo/build.gradle b/lucene/demo/build.gradle index 2871774bbf88..8407fa6b5d63 100644 --- a/lucene/demo/build.gradle +++ b/lucene/demo/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Simple example code for Apache Lucene' + dependencies { implementation project(':lucene:core') implementation project(':lucene:facet') diff --git a/lucene/expressions/build.gradle b/lucene/expressions/build.gradle index 62d197830e76..7ba76a7f3a97 100644 --- a/lucene/expressions/build.gradle +++ b/lucene/expressions/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Dynamically computed values to sort/facet/search on based on a pluggable grammar' + dependencies { api project(':lucene:core') diff --git a/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionValueSource.java b/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionValueSource.java index f4fa894af8a6..1f8713dd38f3 100644 --- a/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionValueSource.java +++ b/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionValueSource.java @@ -174,7 +174,7 @@ public DoubleValuesSource rewrite(IndexSearcher searcher) throws IOException { DoubleValuesSource[] rewritten = new DoubleValuesSource[variables.length]; for (int i = 0; i < variables.length; i++) { rewritten[i] = variables[i].rewrite(searcher); - changed |= (rewritten[i] == variables[i]); + changed |= (rewritten[i] != variables[i]); } if (changed) { return new ExpressionValueSource(rewritten, expression, needsScores); diff --git a/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionValueSource.java b/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionValueSource.java index d5dbabee5478..3b5589b2d7e3 100644 --- a/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionValueSource.java +++ b/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionValueSource.java @@ -28,9 +28,12 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.DoubleValuesSource; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; +import java.io.IOException; + public class TestExpressionValueSource extends LuceneTestCase { DirectoryReader reader; Directory dir; @@ -125,4 +128,59 @@ public void testDoubleValuesSourceEquals() throws Exception { assertFalse(vs1.equals(vs4)); } + public void testRewrite() throws Exception { + Expression expr = JavascriptCompiler.compile("a"); + + ExpressionValueSource rewritingExpressionSource = new ExpressionValueSource( + new DoubleValuesSource[]{createDoubleValuesSourceMock(true)}, + expr, + false); + ExpressionValueSource notRewritingExpressionSource = new ExpressionValueSource( + new DoubleValuesSource[]{createDoubleValuesSourceMock(false)}, + expr, + false); + + assertNotSame(rewritingExpressionSource, rewritingExpressionSource.rewrite(null)); + assertSame(notRewritingExpressionSource, notRewritingExpressionSource.rewrite(null)); + } + + private static DoubleValuesSource createDoubleValuesSourceMock(boolean rewriting) { + return new DoubleValuesSource() { + @Override + public DoubleValues getValues(LeafReaderContext ctx, DoubleValues scores) throws IOException { + return null; + } + + @Override + public boolean needsScores() { + return false; + } + + @Override + public DoubleValuesSource rewrite(IndexSearcher reader) throws IOException { + return rewriting ? createDoubleValuesSourceMock(true) : this; + } + + @Override + public int hashCode() { + return 0; + } + + @Override + public boolean equals(Object obj) { + return false; + } + + @Override + public String toString() { + return null; + } + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return false; + } + }; + } + } diff --git a/lucene/facet/build.gradle b/lucene/facet/build.gradle index c0ffc91e9298..6b6a6ef137e8 100644 --- a/lucene/facet/build.gradle +++ b/lucene/facet/build.gradle @@ -18,6 +18,7 @@ apply plugin: 'java-library' +description = 'Faceted indexing and search capabilities' dependencies { api project(':lucene:core') diff --git a/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java b/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java index 15df7e3d957b..6dbacf19578f 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java @@ -299,7 +299,7 @@ public DrillSidewaysResult search(ScoreDoc after, DrillDownQuery query, int topN if (executor != null) { // We have an executor, let use the multi-threaded version final CollectorManager collectorManager = - new CollectorManager() { + new CollectorManager<>() { @Override public TopScoreDocCollector newCollector() throws IOException { @@ -404,6 +404,7 @@ private DrillDownQuery getDrillDownQuery(final DrillDownQuery query, Query[] que } /** Runs a search, using a {@link CollectorManager} to gather and merge search results */ + @SuppressWarnings("unchecked") public ConcurrentDrillSidewaysResult search(final DrillDownQuery query, final CollectorManager hitCollectorManager) throws IOException { diff --git a/lucene/facet/src/java/org/apache/lucene/facet/FacetsConfig.java b/lucene/facet/src/java/org/apache/lucene/facet/FacetsConfig.java index f958af187805..84f69d7bc0a4 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/FacetsConfig.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/FacetsConfig.java @@ -496,8 +496,15 @@ private void checkTaxoWriter(TaxonomyWriter taxoWriter) { } } - // Joins the path components together: - private static final char DELIM_CHAR = '\u001F'; + /** + * Character used to join the category path components together into a single + * drill down term for indexing. Applications and unit-tests can reference this for + * creating their own drill-down terms, or use existing APIs (for example, + * {@link #pathToString}). + * + * @lucene.internal + */ + public static final char DELIM_CHAR = '\u001F'; // Escapes any occurrence of the path component inside the label: private static final char ESCAPE_CHAR = '\u001E'; diff --git a/lucene/grouping/build.gradle b/lucene/grouping/build.gradle index b18d3d9cf4dd..b0838e3dfd23 100644 --- a/lucene/grouping/build.gradle +++ b/lucene/grouping/build.gradle @@ -18,6 +18,8 @@ apply plugin: 'java-library' +description = 'Collectors for grouping search results' + dependencies { api project(':lucene:core') diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java index 23601ca994b0..bbeb2ee204b4 100644 --- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java @@ -67,6 +67,10 @@ * @lucene.experimental */ +// TODO: TopGroups.merge() won't work with TopGroups returned by this collector, because +// each block will be on a different shard. Add a specialized merge() static method +// to this collector? + public class BlockGroupingCollector extends SimpleCollector { private int[] pendingSubDocs; diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRange.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRange.java new file mode 100644 index 000000000000..df34f6b1e0df --- /dev/null +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRange.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.search.grouping; + +import java.util.Objects; + +/** + * Represents a contiguous range of double values, with an inclusive minimum and + * exclusive maximum + */ +public class DoubleRange { + + /** The inclusive minimum value of this range */ + public double min; + /** The exclusive maximum value of this range */ + public double max; + + /** + * Creates a new double range, running from {@code min} inclusive to {@code max} exclusive + */ + public DoubleRange(double min, double max) { + this.min = min; + this.max = max; + } + + @Override + public String toString() { + return "DoubleRange(" + min + ", " + max + ")"; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DoubleRange that = (DoubleRange) o; + return Double.compare(that.min, min) == 0 && + Double.compare(that.max, max) == 0; + } + + @Override + public int hashCode() { + return Objects.hash(min, max); + } +} diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRangeFactory.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRangeFactory.java new file mode 100644 index 000000000000..3ea4606a6431 --- /dev/null +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRangeFactory.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.search.grouping; + +/** + * Groups double values into ranges + */ +public class DoubleRangeFactory { + + private final double min; + private final double width; + private final double max; + + /** + * Creates a new DoubleRangeFactory + * @param min a minimum value; all doubles below this value are grouped into a single range + * @param width a standard width; all ranges between {@code min} and {@code max} are this wide, + * with the exception of the final range which may be up to this width. Ranges + * are inclusive at the lower end, and exclusive at the upper end. + * @param max a maximum value; all doubles above this value are grouped into a single range + */ + public DoubleRangeFactory(double min, double width, double max) { + this.min = min; + this.width = width; + this.max = max; + } + + /** + * Finds the DoubleRange that a value should be grouped into + * @param value the value to group + * @param reuse an existing DoubleRange object to reuse + */ + public DoubleRange getRange(double value, DoubleRange reuse) { + if (reuse == null) + reuse = new DoubleRange(Double.MIN_VALUE, Double.MAX_VALUE); + if (value < min) { + reuse.max = min; + reuse.min = Double.MIN_VALUE; + return reuse; + } + if (value >= max) { + reuse.min = max; + reuse.max = Double.MAX_VALUE; + return reuse; + } + double bucket = Math.floor((value - min) / width); + reuse.min = min + (bucket * width); + reuse.max = reuse.min + width; + return reuse; + } + +} diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRangeGroupSelector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRangeGroupSelector.java new file mode 100644 index 000000000000..4a6a65a72366 --- /dev/null +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRangeGroupSelector.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.search.grouping; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DoubleValues; +import org.apache.lucene.search.DoubleValuesSource; +import org.apache.lucene.search.Scorable; + +/** + * A GroupSelector implementation that groups documents by double values + */ +public class DoubleRangeGroupSelector extends GroupSelector { + + private final DoubleValuesSource source; + private final DoubleRangeFactory rangeFactory; + + private Set inSecondPass; + private boolean includeEmpty = true; + private boolean positioned; + private DoubleRange current; + + private LeafReaderContext context; + private DoubleValues values; + + /** + * Creates a new DoubleRangeGroupSelector + * @param source a DoubleValuesSource to retrieve double values per document + * @param rangeFactory a DoubleRangeFactory that defines how to group the double values into range buckets + */ + public DoubleRangeGroupSelector(DoubleValuesSource source, DoubleRangeFactory rangeFactory) { + this.source = source; + this.rangeFactory = rangeFactory; + } + + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + this.context = readerContext; + } + + @Override + public void setScorer(Scorable scorer) throws IOException { + this.values = source.getValues(context, DoubleValuesSource.fromScorer(scorer)); + } + + @Override + public State advanceTo(int doc) throws IOException { + positioned = values.advanceExact(doc); + if (positioned == false) { + return includeEmpty ? State.ACCEPT : State.SKIP; + } + this.current = rangeFactory.getRange(values.doubleValue(), this.current); + if (inSecondPass == null) { + return State.ACCEPT; + } + return inSecondPass.contains(this.current) ? State.ACCEPT : State.SKIP; + } + + @Override + public DoubleRange currentValue() throws IOException { + return positioned ? this.current : null; + } + + @Override + public DoubleRange copyValue() throws IOException { + return positioned ? new DoubleRange(this.current.min, this.current.max) : null; + } + + @Override + public void setGroups(Collection> searchGroups) { + inSecondPass = new HashSet<>(); + includeEmpty = false; + for (SearchGroup group : searchGroups) { + if (group.groupValue == null) + includeEmpty = true; + else + inSecondPass.add(group.groupValue); + } + } +} diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/FirstPassGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/FirstPassGroupingCollector.java index 6a745b8e7352..f5b05974e416 100644 --- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/FirstPassGroupingCollector.java +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/FirstPassGroupingCollector.java @@ -151,6 +151,7 @@ public Collection> getTopGroups(int groupOffset) throws IOExcepti @Override public void setScorer(Scorable scorer) throws IOException { + groupSelector.setScorer(scorer); for (LeafFieldComparator comparator : leafComparators) { comparator.setScorer(scorer); } diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupSelector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupSelector.java index dbb09329f8ae..92962a4d4570 100644 --- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupSelector.java +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupSelector.java @@ -21,6 +21,7 @@ import java.util.Collection; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorable; /** * Defines a group, for use by grouping collectors @@ -43,6 +44,11 @@ public enum State { SKIP, ACCEPT } */ public abstract void setNextReader(LeafReaderContext readerContext) throws IOException; + /** + * Set the current Scorer + */ + public abstract void setScorer(Scorable scorer) throws IOException; + /** * Advance the GroupSelector's iterator to the given document */ @@ -53,12 +59,12 @@ public enum State { SKIP, ACCEPT } * * N.B. this object may be reused, for a persistent version use {@link #copyValue()} */ - public abstract T currentValue(); + public abstract T currentValue() throws IOException; /** * @return a copy of the group value of the current document */ - public abstract T copyValue(); + public abstract T copyValue() throws IOException; /** * Set a restriction on the group values returned by this selector diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java index b88fb743f087..25ed3770addc 100644 --- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java @@ -71,6 +71,14 @@ public GroupingSearch(String groupField) { this(new TermGroupSelector(groupField), null); } + /** + * Constructs a GroupingSearch instance that groups documents using a {@link GroupSelector} + * @param groupSelector a {@link GroupSelector} that defines groups for this GroupingSearch + */ + public GroupingSearch(GroupSelector groupSelector) { + this(groupSelector, null); + } + /** * Constructs a GroupingSearch instance that groups documents by function using a {@link ValueSource} * instance. @@ -78,7 +86,7 @@ public GroupingSearch(String groupField) { * @param groupFunction The function to group by specified as {@link ValueSource} * @param valueSourceContext The context of the specified groupFunction */ - public GroupingSearch(ValueSource groupFunction, Map valueSourceContext) { + public GroupingSearch(ValueSource groupFunction, Map valueSourceContext) { this(new ValueSourceGroupSelector(groupFunction, valueSourceContext), null); } diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRange.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRange.java new file mode 100644 index 000000000000..7b6c845aa4a9 --- /dev/null +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRange.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.search.grouping; + +import java.util.Objects; + +/** + * Represents a contiguous range of long values, with an inclusive minimum and + * exclusive maximum + */ +public class LongRange { + + /** The inclusive minimum value of this range */ + public long min; + /** The exclusive maximum value of this range */ + public long max; + + /** + * Creates a new double range, running from {@code min} inclusive to {@code max} exclusive + */ + public LongRange(long min, long max) { + this.min = min; + this.max = max; + } + + @Override + public String toString() { + return "LongRange(" + min + ", " + max + ")"; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LongRange that = (LongRange) o; + return that.min == min && that.max == max; + } + + @Override + public int hashCode() { + return Objects.hash(min, max); + } +} diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRangeFactory.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRangeFactory.java new file mode 100644 index 000000000000..be66647a5e99 --- /dev/null +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRangeFactory.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.search.grouping; + +/** + * Groups double values into ranges + */ +public class LongRangeFactory { + + private final long min; + private final long width; + private final long max; + + /** + * Creates a new LongRangeFactory + * @param min a minimum value; all longs below this value are grouped into a single range + * @param width a standard width; all ranges between {@code min} and {@code max} are this wide, + * with the exception of the final range which may be up to this width. Ranges + * are inclusive at the lower end, and exclusive at the upper end. + * @param max a maximum value; all longs above this value are grouped into a single range + */ + public LongRangeFactory(long min, long width, long max) { + this.min = min; + this.width = width; + this.max = max; + } + + /** + * Finds the LongRange that a value should be grouped into + * @param value the value to group + * @param reuse an existing LongRange object to reuse + */ + public LongRange getRange(long value, LongRange reuse) { + if (reuse == null) + reuse = new LongRange(Long.MIN_VALUE, Long.MAX_VALUE); + if (value < min) { + reuse.max = min; + reuse.min = Long.MIN_VALUE; + return reuse; + } + if (value >= max) { + reuse.min = max; + reuse.max = Long.MAX_VALUE; + return reuse; + } + long bucket = (value - min) / width; + reuse.min = min + (bucket * width); + reuse.max = reuse.min + width; + return reuse; + } + +} diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRangeGroupSelector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRangeGroupSelector.java new file mode 100644 index 000000000000..7dd0c238d925 --- /dev/null +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRangeGroupSelector.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.search.grouping; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DoubleValuesSource; +import org.apache.lucene.search.LongValues; +import org.apache.lucene.search.LongValuesSource; +import org.apache.lucene.search.Scorable; + +/** + * A GroupSelector implementation that groups documents by long values + */ +public class LongRangeGroupSelector extends GroupSelector { + + private final LongValuesSource source; + private final LongRangeFactory rangeFactory; + + private Set inSecondPass; + private boolean includeEmpty = true; + private boolean positioned; + private LongRange current; + + private LeafReaderContext context; + private LongValues values; + + /** + * Creates a new LongRangeGroupSelector + * @param source a LongValuesSource to retrieve long values per document + * @param rangeFactory a LongRangeFactory that defines how to group the long values into range buckets + */ + public LongRangeGroupSelector(LongValuesSource source, LongRangeFactory rangeFactory) { + this.source = source; + this.rangeFactory = rangeFactory; + } + + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + this.context = readerContext; + } + + @Override + public void setScorer(Scorable scorer) throws IOException { + this.values = source.getValues(context, DoubleValuesSource.fromScorer(scorer)); + } + + @Override + public State advanceTo(int doc) throws IOException { + positioned = values.advanceExact(doc); + if (positioned == false) { + return includeEmpty ? State.ACCEPT : State.SKIP; + } + this.current = rangeFactory.getRange(values.longValue(), this.current); + if (inSecondPass == null) { + return State.ACCEPT; + } + return inSecondPass.contains(this.current) ? State.ACCEPT : State.SKIP; + } + + @Override + public LongRange currentValue() throws IOException { + return positioned ? this.current : null; + } + + @Override + public LongRange copyValue() throws IOException { + return positioned ? new LongRange(this.current.min, this.current.max) : null; + } + + @Override + public void setGroups(Collection> searchGroups) { + inSecondPass = new HashSet<>(); + includeEmpty = false; + for (SearchGroup group : searchGroups) { + if (group.groupValue == null) + includeEmpty = true; + else + inSecondPass.add(group.groupValue); + } + } +} diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/SecondPassGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/SecondPassGroupingCollector.java index 0d5fc9daa26c..dc7d0aabde76 100644 --- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/SecondPassGroupingCollector.java +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/SecondPassGroupingCollector.java @@ -78,6 +78,7 @@ public ScoreMode scoreMode() { @Override public void setScorer(Scorable scorer) throws IOException { + groupSelector.setScorer(scorer); groupReducer.setScorer(scorer); } diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java index 5b8f77c2ce83..65213b2dc9a4 100644 --- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.search.Scorable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefHash; @@ -64,6 +65,9 @@ public void setNextReader(LeafReaderContext readerContext) throws IOException { } } + @Override + public void setScorer(Scorable scorer) throws IOException { } + @Override public State advanceTo(int doc) throws IOException { if (this.docValues.advanceExact(doc) == false) { diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/ValueSourceGroupSelector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/ValueSourceGroupSelector.java index 249016018855..54373de8be25 100644 --- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/ValueSourceGroupSelector.java +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/ValueSourceGroupSelector.java @@ -26,6 +26,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; +import org.apache.lucene.search.Scorable; import org.apache.lucene.util.mutable.MutableValue; /** @@ -34,7 +35,7 @@ public class ValueSourceGroupSelector extends GroupSelector { private final ValueSource valueSource; - private final Map context; + private final Map context; private Set secondPassGroups; @@ -43,7 +44,7 @@ public class ValueSourceGroupSelector extends GroupSelector { * @param valueSource the ValueSource to group by * @param context a context map for the ValueSource */ - public ValueSourceGroupSelector(ValueSource valueSource, Map context) { + public ValueSourceGroupSelector(ValueSource valueSource, Map context) { this.valueSource = valueSource; this.context = context; } @@ -56,6 +57,9 @@ public void setNextReader(LeafReaderContext readerContext) throws IOException { this.filler = values.getValueFiller(); } + @Override + public void setScorer(Scorable scorer) throws IOException { } + @Override public State advanceTo(int doc) throws IOException { this.filler.fillValue(doc); @@ -67,7 +71,7 @@ public State advanceTo(int doc) throws IOException { } @Override - public MutableValue currentValue() { + public MutableValue currentValue() throws IOException { return filler.getValue(); } diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/package-info.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/package-info.java index 7e3745e61f28..36d94a5a92e3 100644 --- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/package-info.java +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/package-info.java @@ -28,11 +28,9 @@ *

Grouping requires a number of inputs:

* *
    - *
  • groupField: this is the field used for grouping. - * For example, if you use the author field then each - * group has all books by the same author. Documents that don't - * have this field are grouped under a single group with - * a null group value. + *
  • groupSelector: this defines how groups are created + * from values per-document. The grouping module ships with + * selectors for grouping by term, and by long and double ranges. * *
  • groupSort: how the groups are sorted. For sorting * purposes, each group is "represented" by the highest-sorted @@ -80,6 +78,10 @@ * the value of a {@link org.apache.lucene.index.SortedDocValues} field
  • *
  • {@link org.apache.lucene.search.grouping.ValueSourceGroupSelector} groups based on * the value of a {@link org.apache.lucene.queries.function.ValueSource}
  • + *
  • {@link org.apache.lucene.search.grouping.DoubleRangeGroupSelector} groups based on + * the value of a {@link org.apache.lucene.search.DoubleValuesSource}
  • + *
  • {@link org.apache.lucene.search.grouping.LongRangeGroupSelector} groups based on + * the value of a {@link org.apache.lucene.search.LongValuesSource}
  • *
* *

Known limitations:

@@ -137,17 +139,10 @@ * writer.addDocuments(oneGroup); * * - * Then, at search time, do this up front: + * Then, at search time: * *
- *   // Set this once in your app & save away for reusing across all queries:
- *   Filter groupEndDocs = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("groupEnd", "x"))));
- * 
- * - * Finally, do this per search: - * - *
- *   // Per search:
+ *   Query groupEndDocs = new TermQuery(new Term("groupEnd", "x"));
  *   BlockGroupingCollector c = new BlockGroupingCollector(groupSort, groupOffset+topNGroups, needsScores, groupEndDocs);
  *   s.search(new TermQuery(new Term("content", searchTerm)), c);
  *   TopGroups groupsResult = c.getTopGroups(withinGroupSort, groupOffset, docOffset, docOffset+docsPerGroup, fillFields);
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/AbstractGroupingTestCase.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/AbstractGroupingTestCase.java
index 3c3b9f900d60..e25666734db3 100644
--- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/AbstractGroupingTestCase.java
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/AbstractGroupingTestCase.java
@@ -16,6 +16,15 @@
  */
 package org.apache.lucene.search.grouping;
 
+import java.io.Closeable;
+import java.io.IOException;
+
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 
@@ -36,4 +45,40 @@ protected String generateRandomNonEmptyString() {
     } while ("".equals(randomValue));
     return randomValue;
   }
+
+  protected static void assertScoreDocsEquals(ScoreDoc[] expected, ScoreDoc[] actual) {
+    assertEquals(expected.length, actual.length);
+    for (int i = 0; i < expected.length; i++) {
+      assertEquals(expected[i].doc, actual[i].doc);
+      assertEquals(expected[i].score, actual[i].score, 0);
+    }
+  }
+
+  protected static class Shard implements Closeable {
+
+    final Directory directory;
+    final RandomIndexWriter writer;
+    IndexSearcher searcher;
+
+    Shard() throws IOException {
+      this.directory = newDirectory();
+      this.writer = new RandomIndexWriter(random(), directory,
+          newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
+    }
+
+    IndexSearcher getIndexSearcher() throws IOException {
+      if (searcher == null) {
+        searcher = new IndexSearcher(this.writer.getReader());
+      }
+      return searcher;
+    }
+
+    @Override
+    public void close() throws IOException {
+      if (searcher != null) {
+        searcher.getIndexReader().close();
+      }
+      IOUtils.close(writer, directory);
+    }
+  }
 }
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/BaseGroupSelectorTestCase.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/BaseGroupSelectorTestCase.java
new file mode 100644
index 000000000000..bb2a946f2a47
--- /dev/null
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/BaseGroupSelectorTestCase.java
@@ -0,0 +1,365 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.BytesRef;
+
+public abstract class BaseGroupSelectorTestCase extends AbstractGroupingTestCase {
+
+  protected abstract void addGroupField(Document document, int id);
+
+  protected abstract GroupSelector getGroupSelector();
+
+  protected abstract Query filterQuery(T groupValue);
+
+  public void testSortByRelevance() throws IOException {
+
+    Shard shard = new Shard();
+    indexRandomDocs(shard.writer);
+
+    String[] query = new String[]{ "foo", "bar", "baz" };
+    Query topLevel = new TermQuery(new Term("text", query[random().nextInt(query.length)]));
+
+    IndexSearcher searcher = shard.getIndexSearcher();
+    GroupingSearch grouper = new GroupingSearch(getGroupSelector());
+    grouper.setGroupDocsLimit(10);
+    TopGroups topGroups = grouper.search(searcher, topLevel, 0, 5);
+    TopDocs topDoc = searcher.search(topLevel, 1);
+    for (int i = 0; i < topGroups.groups.length; i++) {
+      // Each group should have a result set equal to that returned by the top-level query,
+      // filtered by the group value.
+      Query filtered = new BooleanQuery.Builder()
+          .add(topLevel, BooleanClause.Occur.MUST)
+          .add(filterQuery(topGroups.groups[i].groupValue), BooleanClause.Occur.FILTER)
+          .build();
+      TopDocs td = searcher.search(filtered, 10);
+      assertScoreDocsEquals(topGroups.groups[i].scoreDocs, td.scoreDocs);
+      if (i == 0) {
+        assertEquals(td.scoreDocs[0].doc, topDoc.scoreDocs[0].doc);
+        assertEquals(td.scoreDocs[0].score, topDoc.scoreDocs[0].score, 0);
+      }
+    }
+
+    shard.close();
+  }
+
+  public void testSortGroups() throws IOException {
+
+    Shard shard = new Shard();
+    indexRandomDocs(shard.writer);
+    IndexSearcher searcher = shard.getIndexSearcher();
+
+    String[] query = new String[]{ "foo", "bar", "baz" };
+    Query topLevel = new TermQuery(new Term("text", query[random().nextInt(query.length)]));
+
+    GroupingSearch grouper = new GroupingSearch(getGroupSelector());
+    grouper.setGroupDocsLimit(10);
+    Sort sort = new Sort(new SortField("sort1", SortField.Type.STRING), new SortField("sort2", SortField.Type.LONG));
+    grouper.setGroupSort(sort);
+    TopGroups topGroups = grouper.search(searcher, topLevel, 0, 5);
+    TopDocs topDoc = searcher.search(topLevel, 1, sort);
+    for (int i = 0; i < topGroups.groups.length; i++) {
+      // We're sorting the groups by a defined Sort, but each group itself should be ordered
+      // by doc relevance, and should be equal to the results of a top-level query filtered
+      // by the group value
+      Query filtered = new BooleanQuery.Builder()
+          .add(topLevel, BooleanClause.Occur.MUST)
+          .add(filterQuery(topGroups.groups[i].groupValue), BooleanClause.Occur.FILTER)
+          .build();
+      TopDocs td = searcher.search(filtered, 10);
+      assertScoreDocsEquals(topGroups.groups[i].scoreDocs, td.scoreDocs);
+      // The top group should have sort values equal to the sort values of the top doc of
+      // a top-level search sorted by the same Sort; subsequent groups should have sort values
+      // that compare lower than their predecessor.
+      if (i > 0) {
+        assertSortsBefore(topGroups.groups[i - 1], topGroups.groups[i]);
+      } else {
+        assertArrayEquals(((FieldDoc)topDoc.scoreDocs[0]).fields, topGroups.groups[0].groupSortValues);
+      }
+    }
+
+    shard.close();
+  }
+
+  public void testSortWithinGroups() throws IOException {
+
+    Shard shard = new Shard();
+    indexRandomDocs(shard.writer);
+    IndexSearcher searcher = shard.getIndexSearcher();
+
+    String[] query = new String[]{ "foo", "bar", "baz" };
+    Query topLevel = new TermQuery(new Term("text", query[random().nextInt(query.length)]));
+
+    GroupingSearch grouper = new GroupingSearch(getGroupSelector());
+    grouper.setGroupDocsLimit(10);
+    Sort sort = new Sort(new SortField("sort1", SortField.Type.STRING), new SortField("sort2", SortField.Type.LONG));
+    grouper.setSortWithinGroup(sort);
+
+    TopGroups topGroups = grouper.search(searcher, topLevel, 0, 5);
+    TopDocs topDoc = searcher.search(topLevel, 1);
+
+    for (int i = 0; i < topGroups.groups.length; i++) {
+      // Check top-level ordering by score: first group's maxScore should be equal to the
+      // top score returned by a simple search with no grouping; subsequent groups should
+      // all have equal or lower maxScores
+      if (i == 0) {
+        assertEquals(topDoc.scoreDocs[0].score, topGroups.groups[0].maxScore, 0);
+      } else {
+        assertTrue(topGroups.groups[i].maxScore <= topGroups.groups[i - 1].maxScore);
+      }
+      // Groups themselves are ordered by a defined Sort, and each should give the same result as
+      // the top-level query, filtered by the group value, with the same Sort
+      Query filtered = new BooleanQuery.Builder()
+          .add(topLevel, BooleanClause.Occur.MUST)
+          .add(filterQuery(topGroups.groups[i].groupValue), BooleanClause.Occur.FILTER)
+          .build();
+      TopDocs td = searcher.search(filtered, 10, sort);
+      assertScoreDocsEquals(td.scoreDocs, topGroups.groups[i].scoreDocs);
+    }
+
+    shard.close();
+
+  }
+
+  public void testGroupHeads() throws IOException {
+
+    Shard shard = new Shard();
+    indexRandomDocs(shard.writer);
+    IndexSearcher searcher = shard.getIndexSearcher();
+
+    String[] query = new String[]{ "foo", "bar", "baz" };
+    Query topLevel = new TermQuery(new Term("text", query[random().nextInt(query.length)]));
+
+    GroupSelector groupSelector = getGroupSelector();
+    GroupingSearch grouping = new GroupingSearch(groupSelector);
+    grouping.setAllGroups(true);
+    grouping.setAllGroupHeads(true);
+
+    grouping.search(searcher, topLevel, 0, 1);
+    Collection matchingGroups = grouping.getAllMatchingGroups();
+
+    // The number of hits from the top-level query should equal the sum of
+    // the number of hits from the query filtered by each group value in turn
+    int totalHits = searcher.count(topLevel);
+    int groupHits = 0;
+    for (T groupValue : matchingGroups) {
+      Query filtered = new BooleanQuery.Builder()
+          .add(topLevel, BooleanClause.Occur.MUST)
+          .add(filterQuery(groupValue), BooleanClause.Occur.FILTER)
+          .build();
+      groupHits += searcher.count(filtered);
+    }
+    assertEquals(totalHits, groupHits);
+
+    Bits groupHeads = grouping.getAllGroupHeads();
+    int cardinality = 0;
+    for (int i = 0; i < groupHeads.length(); i++) {
+      if (groupHeads.get(i)) {
+        cardinality++;
+      }
+    }
+    assertEquals(matchingGroups.size(), cardinality);   // We should have one set bit per matching group
+
+    // Each group head should correspond to the topdoc of a search filtered by
+    // that group
+    for (T groupValue : matchingGroups) {
+      Query filtered = new BooleanQuery.Builder()
+          .add(topLevel, BooleanClause.Occur.MUST)
+          .add(filterQuery(groupValue), BooleanClause.Occur.FILTER)
+          .build();
+      TopDocs td = searcher.search(filtered, 1);
+      assertTrue(groupHeads.get(td.scoreDocs[0].doc));
+    }
+
+    shard.close();
+  }
+
+  public void testGroupHeadsWithSort() throws IOException {
+
+    Shard shard = new Shard();
+    indexRandomDocs(shard.writer);
+    IndexSearcher searcher = shard.getIndexSearcher();
+
+    String[] query = new String[]{ "foo", "bar", "baz" };
+    Query topLevel = new TermQuery(new Term("text", query[random().nextInt(query.length)]));
+
+    Sort sort = new Sort(new SortField("sort1", SortField.Type.STRING), new SortField("sort2", SortField.Type.LONG));
+    GroupSelector groupSelector = getGroupSelector();
+    GroupingSearch grouping = new GroupingSearch(groupSelector);
+    grouping.setAllGroups(true);
+    grouping.setAllGroupHeads(true);
+    grouping.setSortWithinGroup(sort);
+
+    grouping.search(searcher, topLevel, 0, 1);
+    Collection matchingGroups = grouping.getAllMatchingGroups();
+
+    Bits groupHeads = grouping.getAllGroupHeads();
+    int cardinality = 0;
+    for (int i = 0; i < groupHeads.length(); i++) {
+      if (groupHeads.get(i)) {
+        cardinality++;
+      }
+    }
+    assertEquals(matchingGroups.size(), cardinality);   // We should have one set bit per matching group
+
+    // Each group head should correspond to the topdoc of a search filtered by
+    // that group using the same within-group sort
+    for (T groupValue : matchingGroups) {
+      Query filtered = new BooleanQuery.Builder()
+          .add(topLevel, BooleanClause.Occur.MUST)
+          .add(filterQuery(groupValue), BooleanClause.Occur.FILTER)
+          .build();
+      TopDocs td = searcher.search(filtered, 1, sort);
+      assertTrue(groupHeads.get(td.scoreDocs[0].doc));
+    }
+
+    shard.close();
+  }
+
+  public void testShardedGrouping() throws IOException {
+
+    Shard control = new Shard();
+
+    int shardCount = random().nextInt(3) + 2; // between 2 and 4 shards
+    Shard[] shards = new Shard[shardCount];
+    for (int i = 0; i < shardCount; i++) {
+      shards[i] = new Shard();
+    }
+
+    String[] texts = new String[]{ "foo", "bar", "bar baz", "foo foo bar" };
+
+    // Create a bunch of random documents, and index them - once into the control index,
+    // and once into a randomly picked shard.
+
+    int numDocs = atLeast(200);
+    for (int i = 0; i < numDocs; i++) {
+      Document doc = new Document();
+      doc.add(new NumericDocValuesField("id", i));
+      doc.add(new TextField("name", Integer.toString(i), Field.Store.YES));
+      doc.add(new TextField("text", texts[random().nextInt(texts.length)], Field.Store.NO));
+      doc.add(new SortedDocValuesField("sort1", new BytesRef("sort" + random().nextInt(4))));
+      doc.add(new NumericDocValuesField("sort2", random().nextLong()));
+      addGroupField(doc, i);
+      control.writer.addDocument(doc);
+      int shard = random().nextInt(shardCount);
+      shards[shard].writer.addDocument(doc);
+    }
+
+    String[] query = new String[]{ "foo", "bar", "baz" };
+    Query topLevel = new TermQuery(new Term("text", query[random().nextInt(query.length)]));
+
+    Sort sort = new Sort(new SortField("sort1", SortField.Type.STRING), new SortField("sort2", SortField.Type.LONG));
+
+    // A grouped query run in two phases against the control should give us the same
+    // result as the query run against shards and merged back together after each phase.
+
+    FirstPassGroupingCollector singletonFirstPass = new FirstPassGroupingCollector<>(getGroupSelector(), sort, 5);
+    control.getIndexSearcher().search(topLevel, singletonFirstPass);
+    Collection> singletonGroups = singletonFirstPass.getTopGroups(0);
+
+    List>> shardGroups = new ArrayList<>();
+    for (Shard shard : shards) {
+      FirstPassGroupingCollector fc = new FirstPassGroupingCollector<>(getGroupSelector(), sort, 5);
+      shard.getIndexSearcher().search(topLevel, fc);
+      shardGroups.add(fc.getTopGroups(0));
+    }
+    Collection> mergedGroups = SearchGroup.merge(shardGroups, 0, 5, sort);
+    assertEquals(singletonGroups, mergedGroups);
+
+    TopGroupsCollector singletonSecondPass = new TopGroupsCollector<>(getGroupSelector(), singletonGroups, sort,
+        Sort.RELEVANCE, 5, true);
+    control.getIndexSearcher().search(topLevel, singletonSecondPass);
+    TopGroups singletonTopGroups = singletonSecondPass.getTopGroups(0);
+
+    // TODO why does SearchGroup.merge() take a list but TopGroups.merge() take an array?
+    @SuppressWarnings("unchecked")
+    TopGroups[] shardTopGroups = (TopGroups[]) new TopGroups[shards.length];
+    int j = 0;
+    for (Shard shard : shards) {
+      TopGroupsCollector sc = new TopGroupsCollector<>(getGroupSelector(), mergedGroups, sort, Sort.RELEVANCE, 5, true);
+      shard.getIndexSearcher().search(topLevel, sc);
+      shardTopGroups[j] = sc.getTopGroups(0);
+      j++;
+    }
+    TopGroups mergedTopGroups = TopGroups.merge(shardTopGroups, sort, Sort.RELEVANCE, 0, 5, TopGroups.ScoreMergeMode.None);
+    assertNotNull(mergedTopGroups);
+
+    assertEquals(singletonTopGroups.totalGroupedHitCount, mergedTopGroups.totalGroupedHitCount);
+    assertEquals(singletonTopGroups.totalHitCount, mergedTopGroups.totalHitCount);
+    assertEquals(singletonTopGroups.totalGroupCount, mergedTopGroups.totalGroupCount);
+    assertEquals(singletonTopGroups.groups.length, mergedTopGroups.groups.length);
+    for (int i = 0; i < singletonTopGroups.groups.length; i++) {
+      assertEquals(singletonTopGroups.groups[i].groupValue, mergedTopGroups.groups[i].groupValue);
+      assertEquals(singletonTopGroups.groups[i].scoreDocs.length, mergedTopGroups.groups[i].scoreDocs.length);
+    }
+
+    control.close();
+    for (Shard shard : shards) {
+      shard.close();
+    }
+
+  }
+
+  private void indexRandomDocs(RandomIndexWriter w) throws IOException {
+    String[] texts = new String[]{ "foo", "bar", "bar baz", "foo foo bar" };
+
+    int numDocs = atLeast(200);
+    for (int i = 0; i < numDocs; i++) {
+      Document doc = new Document();
+      doc.add(new NumericDocValuesField("id", i));
+      doc.add(new TextField("name", Integer.toString(i), Field.Store.YES));
+      doc.add(new TextField("text", texts[random().nextInt(texts.length)], Field.Store.NO));
+      doc.add(new SortedDocValuesField("sort1", new BytesRef("sort" + random().nextInt(4))));
+      doc.add(new NumericDocValuesField("sort2", random().nextLong()));
+      addGroupField(doc, i);
+      w.addDocument(doc);
+    }
+  }
+
+  private void assertSortsBefore(GroupDocs first, GroupDocs second) {
+    Object[] groupSortValues = second.groupSortValues;
+    Object[] prevSortValues = first.groupSortValues;
+    assertTrue(((BytesRef)prevSortValues[0]).compareTo((BytesRef)groupSortValues[0]) <= 0);
+    if (prevSortValues[0].equals(groupSortValues[0])) {
+      assertTrue((long)prevSortValues[1] <= (long)groupSortValues[1]);
+    }
+  }
+
+}
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/BlockGroupingTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/BlockGroupingTest.java
new file mode 100644
index 000000000000..fc03dfdd84a5
--- /dev/null
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/BlockGroupingTest.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.util.BytesRef;
+
+public class BlockGroupingTest extends AbstractGroupingTestCase {
+
+  public void testSimple() throws IOException {
+
+    Shard shard = new Shard();
+    indexRandomDocs(shard.writer);
+    IndexSearcher searcher = shard.getIndexSearcher();
+
+    Query blockEndQuery = new TermQuery(new Term("blockEnd", "true"));
+    GroupingSearch grouper = new GroupingSearch(blockEndQuery);
+    grouper.setGroupDocsLimit(10);
+
+    Query topLevel = new TermQuery(new Term("text", "grandmother"));
+    TopGroups tg = grouper.search(searcher, topLevel, 0, 5);
+
+    // We're sorting by score, so the score of the top group should be the same as the
+    // score of the top document from the same query with no grouping
+    TopDocs topDoc = searcher.search(topLevel, 1);
+    assertEquals(topDoc.scoreDocs[0].score, tg.groups[0].scoreDocs[0].score, 0);
+    assertEquals(topDoc.scoreDocs[0].doc, tg.groups[0].scoreDocs[0].doc);
+
+    for (int i = 0; i < tg.groups.length; i++) {
+      String bookName = searcher.doc(tg.groups[i].scoreDocs[0].doc).get("book");
+      // The contents of each group should be equal to the results of a search for
+      // that group alone
+      Query filtered = new BooleanQuery.Builder()
+          .add(topLevel, BooleanClause.Occur.MUST)
+          .add(new TermQuery(new Term("book", bookName)), BooleanClause.Occur.FILTER)
+          .build();
+      TopDocs td = searcher.search(filtered, 10);
+      assertScoreDocsEquals(td.scoreDocs, tg.groups[i].scoreDocs);
+    }
+
+    shard.close();
+
+  }
+
+  public void testTopLevelSort() throws IOException {
+
+    Shard shard = new Shard();
+    indexRandomDocs(shard.writer);
+    IndexSearcher searcher = shard.getIndexSearcher();
+
+    Sort sort = new Sort(new SortField("length", SortField.Type.LONG));
+
+    Query blockEndQuery = new TermQuery(new Term("blockEnd", "true"));
+    GroupingSearch grouper = new GroupingSearch(blockEndQuery);
+    grouper.setGroupDocsLimit(10);
+    grouper.setGroupSort(sort);     // groups returned sorted by length, chapters within group sorted by relevancy
+
+    Query topLevel = new TermQuery(new Term("text", "grandmother"));
+    TopGroups tg = grouper.search(searcher, topLevel, 0, 5);
+
+    // The sort value of the top doc in the top group should be the same as the sort value
+    // of the top result from the same search done with no grouping
+    TopDocs topDoc = searcher.search(topLevel, 1, sort);
+    assertEquals(((FieldDoc)topDoc.scoreDocs[0]).fields[0], tg.groups[0].groupSortValues[0]);
+
+    for (int i = 0; i < tg.groups.length; i++) {
+      String bookName = searcher.doc(tg.groups[i].scoreDocs[0].doc).get("book");
+      // The contents of each group should be equal to the results of a search for
+      // that group alone, sorted by score
+      Query filtered = new BooleanQuery.Builder()
+          .add(topLevel, BooleanClause.Occur.MUST)
+          .add(new TermQuery(new Term("book", bookName)), BooleanClause.Occur.FILTER)
+          .build();
+      TopDocs td = searcher.search(filtered, 10);
+      assertScoreDocsEquals(td.scoreDocs, tg.groups[i].scoreDocs);
+      if (i > 1) {
+        assertSortsBefore(tg.groups[i - 1], tg.groups[i]);
+      }
+    }
+
+    shard.close();
+
+  }
+
+  public void testWithinGroupSort() throws IOException {
+
+    Shard shard = new Shard();
+    indexRandomDocs(shard.writer);
+    IndexSearcher searcher = shard.getIndexSearcher();
+
+    Sort sort = new Sort(new SortField("length", SortField.Type.LONG));
+
+    Query blockEndQuery = new TermQuery(new Term("blockEnd", "true"));
+    GroupingSearch grouper = new GroupingSearch(blockEndQuery);
+    grouper.setGroupDocsLimit(10);
+    grouper.setSortWithinGroup(sort);     // groups returned sorted by relevancy, chapters within group sorted by length
+
+    Query topLevel = new TermQuery(new Term("text", "grandmother"));
+    TopGroups tg = grouper.search(searcher, topLevel, 0, 5);
+
+    // We're sorting by score, so the score of the top group should be the same as the
+    // score of the top document from the same query with no grouping
+    TopDocs topDoc = searcher.search(topLevel, 1);
+    assertEquals(topDoc.scoreDocs[0].score, (float)tg.groups[0].groupSortValues[0], 0);
+
+    for (int i = 0; i < tg.groups.length; i++) {
+      String bookName = searcher.doc(tg.groups[i].scoreDocs[0].doc).get("book");
+      // The contents of each group should be equal to the results of a search for
+      // that group alone, sorted by length
+      Query filtered = new BooleanQuery.Builder()
+          .add(topLevel, BooleanClause.Occur.MUST)
+          .add(new TermQuery(new Term("book", bookName)), BooleanClause.Occur.FILTER)
+          .build();
+      TopDocs td = searcher.search(filtered, 10, sort);
+      assertFieldDocsEquals(td.scoreDocs, tg.groups[i].scoreDocs);
+      // We're sorting by score, so the group sort value for each group should be a float,
+      // and the value for the previous group should be higher or equal to the value for this one
+      if (i > 0) {
+        float prevScore = (float) tg.groups[i - 1].groupSortValues[0];
+        float thisScore = (float) tg.groups[i].groupSortValues[0];
+        assertTrue(prevScore >= thisScore);
+      }
+    }
+
+    shard.close();
+  }
+
+  private static void indexRandomDocs(RandomIndexWriter writer) throws IOException {
+    int bookCount = atLeast(20);
+    for (int i = 0; i < bookCount; i++) {
+      writer.addDocuments(createRandomBlock(i));
+    }
+  }
+
+  private static List createRandomBlock(int book) {
+    List block = new ArrayList<>();
+    String bookName = "book" + book;
+    int chapterCount = atLeast(10);
+    for (int j = 0; j < chapterCount; j++) {
+      Document doc = new Document();
+      String chapterName = "chapter" + j;
+      String chapterText = randomText();
+      doc.add(new TextField("book", bookName, Field.Store.YES));
+      doc.add(new TextField("chapter", chapterName, Field.Store.YES));
+      doc.add(new TextField("text", chapterText, Field.Store.NO));
+      doc.add(new NumericDocValuesField("length", chapterText.length()));
+      doc.add(new SortedDocValuesField("book", new BytesRef(bookName)));
+      if (j == chapterCount - 1) {
+        doc.add(new TextField("blockEnd", "true", Field.Store.NO));
+      }
+      block.add(doc);
+    }
+    return block;
+  }
+
+  private static final String[] TEXT = new String[]{
+      "It was the day my grandmother exploded",
+      "It was the best of times, it was the worst of times",
+      "It was a bright cold morning in April",
+      "It is a truth universally acknowledged",
+      "I have just returned from a visit to my landlord",
+      "I've been here and I've been there"
+  };
+
+  private static String randomText() {
+    StringBuilder sb = new StringBuilder(TEXT[random().nextInt(TEXT.length)]);
+    int sentences = random().nextInt(20);
+    for (int i = 0; i < sentences; i++) {
+      sb.append(" ").append(TEXT[random().nextInt(TEXT.length)]);
+    }
+    return sb.toString();
+  }
+
+  private void assertSortsBefore(GroupDocs first, GroupDocs second) {
+    Object[] groupSortValues = second.groupSortValues;
+    Object[] prevSortValues = first.groupSortValues;
+    assertTrue(((Long)prevSortValues[0]).compareTo((Long)groupSortValues[0]) <= 0);
+  }
+
+  protected static void assertFieldDocsEquals(ScoreDoc[] expected, ScoreDoc[] actual) {
+    assertEquals(expected.length, actual.length);
+    for (int i = 0; i < expected.length; i++) {
+      assertEquals(expected[i].doc, actual[i].doc);
+      FieldDoc e = (FieldDoc) expected[i];
+      FieldDoc a = (FieldDoc) actual[i];
+      assertArrayEquals(e.fields, a.fields);
+    }
+  }
+
+}
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/DoubleRangeGroupSelectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/DoubleRangeGroupSelectorTest.java
new file mode 100644
index 000000000000..c9385898ade5
--- /dev/null
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/DoubleRangeGroupSelectorTest.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.DocValuesFieldExistsQuery;
+import org.apache.lucene.search.DoubleValuesSource;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+
+public class DoubleRangeGroupSelectorTest extends BaseGroupSelectorTestCase {
+
+  @Override
+  protected void addGroupField(Document document, int id) {
+    if (rarely()) {
+      return;   // missing value
+    }
+    // numbers between 0 and 1000, groups are 100 wide from 100 to 900
+    double value = random().nextDouble() * 1000;
+    document.add(new DoublePoint("double", value));
+    document.add(new NumericDocValuesField("double", Double.doubleToLongBits(value)));
+  }
+
+  @Override
+  protected GroupSelector getGroupSelector() {
+    return new DoubleRangeGroupSelector(DoubleValuesSource.fromDoubleField("double"),
+        new DoubleRangeFactory(100, 100, 900));
+  }
+
+  @Override
+  protected Query filterQuery(DoubleRange groupValue) {
+    if (groupValue == null) {
+      return new BooleanQuery.Builder()
+          .add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER)
+          .add(new DocValuesFieldExistsQuery("double"), BooleanClause.Occur.MUST_NOT)
+          .build();
+    }
+    return DoublePoint.newRangeQuery("double", groupValue.min, Math.nextDown(groupValue.max));
+  }
+}
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/LongRangeGroupSelectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/LongRangeGroupSelectorTest.java
new file mode 100644
index 000000000000..6075f700a28f
--- /dev/null
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/LongRangeGroupSelectorTest.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.DocValuesFieldExistsQuery;
+import org.apache.lucene.search.LongValuesSource;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+
+public class LongRangeGroupSelectorTest extends BaseGroupSelectorTestCase {
+
+  @Override
+  protected void addGroupField(Document document, int id) {
+    if (rarely()) {
+      return; // missing value
+    }
+    // numbers between 0 and 1000, groups are 100 wide from 100 to 900
+    long value = random().nextInt(1000);
+    document.add(new LongPoint("long", value));
+    document.add(new NumericDocValuesField("long", value));
+  }
+
+  @Override
+  protected GroupSelector getGroupSelector() {
+    return new LongRangeGroupSelector(LongValuesSource.fromLongField("long"),
+        new LongRangeFactory(100, 100, 900));
+  }
+
+  @Override
+  protected Query filterQuery(LongRange groupValue) {
+    if (groupValue == null) {
+      return new BooleanQuery.Builder()
+          .add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER)
+          .add(new DocValuesFieldExistsQuery("long"), BooleanClause.Occur.MUST_NOT)
+          .build();
+    }
+    return LongPoint.newRangeQuery("long", groupValue.min, groupValue.max - 1);
+  }
+}
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TermGroupSelectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TermGroupSelectorTest.java
new file mode 100644
index 000000000000..831a0147d1d0
--- /dev/null
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TermGroupSelectorTest.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.DocValuesFieldExistsQuery;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.util.BytesRef;
+
+public class TermGroupSelectorTest extends BaseGroupSelectorTestCase {
+
+  @Override
+  protected void addGroupField(Document document, int id) {
+    if (rarely()) {
+      return;   // missing value
+    }
+    String groupValue = "group" + random().nextInt(10);
+    document.add(new SortedDocValuesField("groupField", new BytesRef(groupValue)));
+    document.add(new TextField("groupField", groupValue, Field.Store.NO));
+  }
+
+  @Override
+  protected GroupSelector getGroupSelector() {
+    return new TermGroupSelector("groupField");
+  }
+
+  @Override
+  protected Query filterQuery(BytesRef groupValue) {
+    if (groupValue == null) {
+      return new BooleanQuery.Builder()
+          .add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER)
+          .add(new DocValuesFieldExistsQuery("groupField"), BooleanClause.Occur.MUST_NOT)
+          .build();
+    }
+    return new TermQuery(new Term("groupField", groupValue));
+  }
+}
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestDoubleRangeFactory.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestDoubleRangeFactory.java
new file mode 100644
index 000000000000..d6e05c336426
--- /dev/null
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestDoubleRangeFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestDoubleRangeFactory extends LuceneTestCase {
+
+  public void test() {
+
+    DoubleRangeFactory factory = new DoubleRangeFactory(10, 10, 50);
+    DoubleRange scratch = new DoubleRange(0, 0);
+
+    assertEquals(new DoubleRange(Double.MIN_VALUE, 10), factory.getRange(4, scratch));
+    assertEquals(new DoubleRange(10, 20), factory.getRange(10, scratch));
+    assertEquals(new DoubleRange(20, 30), factory.getRange(20, scratch));
+    assertEquals(new DoubleRange(10, 20), factory.getRange(15, scratch));
+    assertEquals(new DoubleRange(30, 40), factory.getRange(35, scratch));
+    assertEquals(new DoubleRange(50, Double.MAX_VALUE), factory.getRange(50, scratch));
+    assertEquals(new DoubleRange(50, Double.MAX_VALUE), factory.getRange(500, scratch));
+
+  }
+
+}
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
index f1ce508e134a..4ef6c02d70e7 100644
--- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
@@ -287,9 +287,11 @@ private void compareGroupValue(String expected, GroupDocs group) {
 
   private Collection> getSearchGroups(FirstPassGroupingCollector c, int groupOffset) throws IOException {
     if (TermGroupSelector.class.isAssignableFrom(c.getGroupSelector().getClass())) {
+      @SuppressWarnings("unchecked")
       FirstPassGroupingCollector collector = (FirstPassGroupingCollector) c;
       return collector.getTopGroups(groupOffset);
     } else if (ValueSourceGroupSelector.class.isAssignableFrom(c.getGroupSelector().getClass())) {
+      @SuppressWarnings("unchecked")
       FirstPassGroupingCollector collector = (FirstPassGroupingCollector) c;
       Collection> mutableValueGroups = collector.getTopGroups(groupOffset);
       if (mutableValueGroups == null) {
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestLongRangeFactory.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestLongRangeFactory.java
new file mode 100644
index 000000000000..0677631476eb
--- /dev/null
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestLongRangeFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestLongRangeFactory extends LuceneTestCase {
+
+  public void test() {
+
+    LongRangeFactory factory = new LongRangeFactory(10, 10, 50);
+    LongRange scratch = new LongRange(0, 0);
+
+    assertEquals(new LongRange(Long.MIN_VALUE, 10), factory.getRange(4, scratch));
+    assertEquals(new LongRange(10, 20), factory.getRange(10, scratch));
+    assertEquals(new LongRange(20, 30), factory.getRange(20, scratch));
+    assertEquals(new LongRange(10, 20), factory.getRange(15, scratch));
+    assertEquals(new LongRange(30, 40), factory.getRange(35, scratch));
+    assertEquals(new LongRange(50, Long.MAX_VALUE), factory.getRange(50, scratch));
+    assertEquals(new LongRange(50, Long.MAX_VALUE), factory.getRange(500, scratch));
+
+  }
+
+}
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/ValueSourceGroupSelectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/ValueSourceGroupSelectorTest.java
new file mode 100644
index 000000000000..941ad8a26c19
--- /dev/null
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/ValueSourceGroupSelectorTest.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import java.util.HashMap;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queries.function.valuesource.SortedSetFieldSource;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.mutable.MutableValue;
+
+public class ValueSourceGroupSelectorTest extends BaseGroupSelectorTestCase {
+
+  @Override
+  protected void addGroupField(Document document, int id) {
+    String groupValue = "group" + random().nextInt(10);
+    document.add(new SortedDocValuesField("groupField", new BytesRef(groupValue)));
+    document.add(new TextField("groupField", groupValue, Field.Store.NO));
+  }
+
+  @Override
+  protected GroupSelector getGroupSelector() {
+    return new ValueSourceGroupSelector(new SortedSetFieldSource("groupField"), new HashMap<>());
+  }
+
+  @Override
+  protected Query filterQuery(MutableValue groupValue) {
+    return new TermQuery(new Term("groupField", groupValue.toObject().toString()));
+  }
+}
diff --git a/lucene/highlighter/build.gradle b/lucene/highlighter/build.gradle
index f369aef62c02..6e105d59b6e1 100644
--- a/lucene/highlighter/build.gradle
+++ b/lucene/highlighter/build.gradle
@@ -18,6 +18,8 @@
 
 apply plugin: 'java-library'
 
+description = 'Highlights search keywords in results'
+
 dependencies {
   api project(':lucene:core')
 
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java b/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java
index 74de2483b8a2..5d0dc94f271e 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java
@@ -61,6 +61,7 @@
 import org.apache.lucene.search.Weight;
 import org.apache.lucene.search.spans.SpanQuery;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.InPlaceMergeSorter;
 
 /**
@@ -643,6 +644,7 @@ protected Map highlightFieldsAsObjects(String[] fieldsIn, Quer
 
       batchDocIdx += fieldValsByDoc.size();
     }
+    IOUtils.close(indexReaderWithTermVecCache);
     assert docIdIter.docID() == DocIdSetIterator.NO_MORE_DOCS
         || docIdIter.nextDoc() == DocIdSetIterator.NO_MORE_DOCS;
 
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
index 0c3a0f65d95b..2e703175bd4f 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
@@ -1362,24 +1362,25 @@ public void testOverlapAnalyzer() throws Exception {
       public void run() throws Exception {
         HashMap synonyms = new HashMap<>();
         synonyms.put("football", "soccer,footie");
-        Analyzer analyzer = new SynonymAnalyzer(synonyms);
+        try (Analyzer analyzer = new SynonymAnalyzer(synonyms)) {
 
-        String s = "football-soccer in the euro 2004 footie competition";
+          String s = "football-soccer in the euro 2004 footie competition";
 
-        BooleanQuery.Builder query = new BooleanQuery.Builder();
-        query.add(new TermQuery(new Term("bookid", "football")), Occur.SHOULD);
-        query.add(new TermQuery(new Term("bookid", "soccer")), Occur.SHOULD);
-        query.add(new TermQuery(new Term("bookid", "footie")), Occur.SHOULD);
+          BooleanQuery.Builder query = new BooleanQuery.Builder();
+          query.add(new TermQuery(new Term("bookid", "football")), Occur.SHOULD);
+          query.add(new TermQuery(new Term("bookid", "soccer")), Occur.SHOULD);
+          query.add(new TermQuery(new Term("bookid", "footie")), Occur.SHOULD);
 
-        Highlighter highlighter = getHighlighter(query.build(), null, HighlighterTest.this);
+          Highlighter highlighter = getHighlighter(query.build(), null, HighlighterTest.this);
 
-        // Get 3 best fragments and separate with a "..."
-        TokenStream tokenStream = analyzer.tokenStream(null, s);
+          // Get 3 best fragments and separate with a "..."
+          TokenStream tokenStream = analyzer.tokenStream(null, s);
 
-        String result = highlighter.getBestFragments(tokenStream, s, 3, "...");
-        String expectedResult = "football-soccer in the euro 2004 footie competition";
-        assertTrue("overlapping analyzer should handle highlights OK, expected:" + expectedResult
-            + " actual:" + result, expectedResult.equals(result));
+          String result = highlighter.getBestFragments(tokenStream, s, 3, "...");
+          String expectedResult = "football-soccer in the euro 2004 footie competition";
+          assertTrue("overlapping analyzer should handle highlights OK, expected:" + expectedResult
+              + " actual:" + result, expectedResult.equals(result));
+        }
       }
 
     };
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java
index 825133c3f71f..30cf7117de8f 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java
@@ -391,6 +391,7 @@ public void testRandomizedRoundTrip() throws Exception {
       if (startOffsets[i] == startOffsets[i-1]) {
         if (VERBOSE)
           System.out.println("Skipping test because can't easily validate random token-stream is correct.");
+        rTokenStream.close();
         return;
       }
     }
@@ -438,6 +439,7 @@ public void testRandomizedRoundTrip() throws Exception {
 
     reader.close();
     dir.close();
+    rTokenStream.close();
   }
 
   public void testMaxStartOffsetConsistency() throws IOException {
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java
index b8ce3dd48408..115a51a96737 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java
@@ -105,17 +105,18 @@ protected void extractUnknownQuery(Query query, Map ter
    */
   private String highlightField(Query query, String fieldName,
       String text) throws IOException, InvalidTokenOffsetsException {
-    TokenStream tokenStream = new MockAnalyzer(random(), MockTokenizer.SIMPLE,
-        true, MockTokenFilter.ENGLISH_STOPSET).tokenStream(fieldName, text);
-    // Assuming "", "" used to highlight
-    SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
-    MyQueryScorer scorer = new MyQueryScorer(query, fieldName, FIELD_NAME);
-    Highlighter highlighter = new Highlighter(formatter, scorer);
-    highlighter.setTextFragmenter(new SimpleFragmenter(Integer.MAX_VALUE));
-
-    String rv = highlighter.getBestFragments(tokenStream, text, 1,
-        "(FIELD TEXT TRUNCATED)");
-    return rv.length() == 0 ? text : rv;
+    try (MockAnalyzer mockAnalyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE,true,
+        MockTokenFilter.ENGLISH_STOPSET); TokenStream tokenStream = mockAnalyzer.tokenStream(fieldName, text)) {
+      // Assuming "", "" used to highlight
+      SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
+      MyQueryScorer scorer = new MyQueryScorer(query, fieldName, FIELD_NAME);
+      Highlighter highlighter = new Highlighter(formatter, scorer);
+      highlighter.setTextFragmenter(new SimpleFragmenter(Integer.MAX_VALUE));
+
+      String rv = highlighter.getBestFragments(tokenStream, text, 1,
+          "(FIELD TEXT TRUNCATED)");
+      return rv.length() == 0 ? text : rv;
+    }
   }
 
   public static class MyWeightedSpanTermExtractor extends
diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index 8ab9fe8604b4..761cb72a1e53 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -10,7 +10,7 @@ com.carrotsearch.randomizedtesting.version = 2.7.6
 
 /com.carrotsearch.thirdparty/simple-xml-safe = 2.7.1
 
-/com.carrotsearch/hppc = 0.8.1
+/com.carrotsearch/hppc = 0.8.2
 
 /com.cybozu.labs/langdetect = 1.1-20120112
 /com.drewnoakes/metadata-extractor = 2.11.0
@@ -23,7 +23,7 @@ com.fasterxml.jackson.core.version = 2.10.1
 /com.fasterxml.jackson.core/jackson-databind = ${com.fasterxml.jackson.core.version}
 /com.fasterxml.jackson.dataformat/jackson-dataformat-smile = ${com.fasterxml.jackson.core.version}
 
-/com.github.ben-manes.caffeine/caffeine = 2.8.0
+/com.github.ben-manes.caffeine/caffeine = 2.8.4
 /com.github.virtuald/curvesapi = 1.06
 
 /com.github.zafarkhaja/java-semver = 0.9.0
@@ -52,10 +52,9 @@ com.sun.jersey.version = 1.19
 
 /com.tdunning/t-digest = 3.1
 /com.vaadin.external.google/android-json = 0.0.20131108.vaadin1
-/commons-cli/commons-cli = 1.2
+/commons-cli/commons-cli = 1.4
 /commons-codec/commons-codec = 1.13
 /commons-collections/commons-collections = 3.2.2
-/commons-fileupload/commons-fileupload = 1.3.3
 /commons-io/commons-io = 2.6
 /commons-logging/commons-logging = 1.1.3
 /de.l3s.boilerpipe/boilerpipe = 1.1.0
@@ -179,7 +178,7 @@ org.apache.kerby.version = 1.0.1
 /org.apache.kerby/kerby-pkix = ${org.apache.kerby.version}
 /org.apache.kerby/kerby-util = ${org.apache.kerby.version}
 
-org.apache.logging.log4j.version = 2.11.2
+org.apache.logging.log4j.version = 2.13.2
 /org.apache.logging.log4j/log4j-1.2-api = ${org.apache.logging.log4j.version}
 /org.apache.logging.log4j/log4j-api = ${org.apache.logging.log4j.version}
 /org.apache.logging.log4j/log4j-core = ${org.apache.logging.log4j.version}
diff --git a/lucene/join/build.gradle b/lucene/join/build.gradle
index eab862245879..143daa13711a 100644
--- a/lucene/join/build.gradle
+++ b/lucene/join/build.gradle
@@ -17,6 +17,8 @@
 
 apply plugin: 'java-library'
 
+description = 'Index-time and Query-time joins for normalized content'
+
 dependencies {
   api project(':lucene:core')
   testImplementation project(':lucene:test-framework')
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinSortField.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinSortField.java
index fef167e62f93..605e859543b0 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinSortField.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinSortField.java
@@ -30,6 +30,8 @@
 import org.apache.lucene.util.BitSet;
 import org.apache.lucene.util.NumericUtils;
 
+import static org.apache.lucene.search.join.BlockJoinSelector.toIter;
+
 /**
  * A special sort field that allows sorting parent docs based on nested / child level fields.
  * Based on the sort order it either takes the document with the lowest or highest field value into account.
@@ -118,7 +120,7 @@ protected SortedDocValues getSortedDocValues(LeafReaderContext context, String f
         if (children == null) {
           return DocValues.emptySorted();
         }
-        return BlockJoinSelector.wrap(sortedSet, type, parents, children);
+        return BlockJoinSelector.wrap(sortedSet, type, parents, toIter(children));
       }
 
     };
@@ -137,7 +139,7 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String
         if (children == null) {
           return DocValues.emptyNumeric();
         }
-        return BlockJoinSelector.wrap(sortedNumeric, type, parents, children);
+        return BlockJoinSelector.wrap(sortedNumeric, type, parents, toIter(children));
       }
     };
   }
@@ -155,7 +157,7 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String
         if (children == null) {
           return DocValues.emptyNumeric();
         }
-        return BlockJoinSelector.wrap(sortedNumeric, type, parents, children);
+        return BlockJoinSelector.wrap(sortedNumeric, type, parents, toIter(children));
       }
     };
   }
@@ -173,7 +175,7 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String
         if (children == null) {
           return DocValues.emptyNumeric();
         }
-        return new FilterNumericDocValues(BlockJoinSelector.wrap(sortedNumeric, type, parents, children)) {
+        return new FilterNumericDocValues(BlockJoinSelector.wrap(sortedNumeric, type, parents, toIter(children))) {
           @Override
           public long longValue() throws IOException {
             // undo the numericutils sortability
@@ -197,7 +199,7 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String
         if (children == null) {
           return DocValues.emptyNumeric();
         }
-        return new FilterNumericDocValues(BlockJoinSelector.wrap(sortedNumeric, type, parents, children)) {
+        return new FilterNumericDocValues(BlockJoinSelector.wrap(sortedNumeric, type, parents, toIter(children))) {
           @Override
           public long longValue() throws IOException {
             // undo the numericutils sortability
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java
index 838826fcbb4a..29e088bccbcc 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java
@@ -17,6 +17,7 @@
 package org.apache.lucene.search.join;
 
 import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
+import static org.apache.lucene.search.join.BlockJoinSelector.toIter;
 
 import java.io.IOException;
 import java.util.Arrays;
@@ -132,14 +133,14 @@ public void testSortedSelector() throws IOException {
     ords[12] = 10;
     ords[18] = 10;
 
-    final SortedDocValues mins = BlockJoinSelector.wrap(DocValues.singleton(new CannedSortedDocValues(ords)), BlockJoinSelector.Type.MIN, parents, children);
+    final SortedDocValues mins = BlockJoinSelector.wrap(DocValues.singleton(new CannedSortedDocValues(ords)), BlockJoinSelector.Type.MIN, parents, toIter(children));
     assertEquals(5, nextDoc(mins,5));
     assertEquals(3, mins.ordValue());
     assertEquals(15, nextDoc(mins,15));
     assertEquals(10, mins.ordValue());
     assertNoMoreDoc(mins, 20);
 
-    final SortedDocValues maxs = BlockJoinSelector.wrap(DocValues.singleton(new CannedSortedDocValues(ords)), BlockJoinSelector.Type.MAX, parents, children);
+    final SortedDocValues maxs = BlockJoinSelector.wrap(DocValues.singleton(new CannedSortedDocValues(ords)), BlockJoinSelector.Type.MAX, parents, toIter(children));
     assertEquals(5, nextDoc(maxs,5));
     assertEquals(7, maxs.ordValue());
     assertEquals(15, nextDoc(maxs,15));
@@ -246,14 +247,14 @@ public void testNumericSelector() throws Exception {
     docsWithValue.set(18);
     longs[18] = 10;
 
-    final NumericDocValues mins = BlockJoinSelector.wrap(DocValues.singleton(new CannedNumericDocValues(longs, docsWithValue)), BlockJoinSelector.Type.MIN, parents, children);
+    final NumericDocValues mins = BlockJoinSelector.wrap(DocValues.singleton(new CannedNumericDocValues(longs, docsWithValue)), BlockJoinSelector.Type.MIN, parents, toIter(children));
     assertEquals(5, nextDoc(mins,5));
     assertEquals(3, mins.longValue());
     assertEquals(15, nextDoc(mins,15));
     assertEquals(10, mins.longValue());
     assertNoMoreDoc(mins, 20);
 
-    final NumericDocValues maxs = BlockJoinSelector.wrap(DocValues.singleton(new CannedNumericDocValues(longs, docsWithValue)), BlockJoinSelector.Type.MAX, parents, children);
+    final NumericDocValues maxs = BlockJoinSelector.wrap(DocValues.singleton(new CannedNumericDocValues(longs, docsWithValue)), BlockJoinSelector.Type.MAX, parents, toIter(children));
     assertEquals(5, nextDoc(maxs, 5));
     assertEquals(7, maxs.longValue());
     assertEquals(15, nextDoc(maxs, 15));
diff --git a/lucene/licenses/hppc-0.8.1.jar.sha1 b/lucene/licenses/hppc-0.8.1.jar.sha1
deleted file mode 100644
index 7006e68f4482..000000000000
--- a/lucene/licenses/hppc-0.8.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ffc7ba8f289428b9508ab484b8001dea944ae603
diff --git a/lucene/licenses/hppc-0.8.2.jar.sha1 b/lucene/licenses/hppc-0.8.2.jar.sha1
new file mode 100644
index 000000000000..a73358b9c66f
--- /dev/null
+++ b/lucene/licenses/hppc-0.8.2.jar.sha1
@@ -0,0 +1 @@
+ccb3ef933ead6b5d766fa571582ddb9b447e48c4
diff --git a/lucene/licenses/log4j-api-2.11.2.jar.sha1 b/lucene/licenses/log4j-api-2.11.2.jar.sha1
deleted file mode 100644
index 0cdea100b72b..000000000000
--- a/lucene/licenses/log4j-api-2.11.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f5e9a2ffca496057d6891a3de65128efc636e26e
diff --git a/lucene/licenses/log4j-api-2.13.2.jar.sha1 b/lucene/licenses/log4j-api-2.13.2.jar.sha1
new file mode 100644
index 000000000000..a98264912453
--- /dev/null
+++ b/lucene/licenses/log4j-api-2.13.2.jar.sha1
@@ -0,0 +1 @@
+567ea514dedd8679c429c5b5b39b0d67b6464c3c
diff --git a/lucene/licenses/log4j-core-2.11.2.jar.sha1 b/lucene/licenses/log4j-core-2.11.2.jar.sha1
deleted file mode 100644
index ec2acae4df7f..000000000000
--- a/lucene/licenses/log4j-core-2.11.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6c2fb3f5b7cd27504726aef1b674b542a0c9cf53
diff --git a/lucene/licenses/log4j-core-2.13.2.jar.sha1 b/lucene/licenses/log4j-core-2.13.2.jar.sha1
new file mode 100644
index 000000000000..ce26d9c45a85
--- /dev/null
+++ b/lucene/licenses/log4j-core-2.13.2.jar.sha1
@@ -0,0 +1 @@
+8eb1fc1914eb2550bf3ddea26917c9a7cbb00593
diff --git a/lucene/luke/build.gradle b/lucene/luke/build.gradle
index aaa0d7336d90..6e32b1b35a5e 100644
--- a/lucene/luke/build.gradle
+++ b/lucene/luke/build.gradle
@@ -17,6 +17,8 @@
 
 apply plugin: 'java-library'
 
+description = 'Luke - Lucene Toolbox'
+
 dependencies {
   api project(':lucene:core')
 
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/AbstractHandler.java b/lucene/luke/src/java/org/apache/lucene/luke/app/AbstractHandler.java
index ab967a8d149d..bca88f18632d 100644
--- a/lucene/luke/src/java/org/apache/lucene/luke/app/AbstractHandler.java
+++ b/lucene/luke/src/java/org/apache/lucene/luke/app/AbstractHandler.java
@@ -33,7 +33,9 @@ public abstract class AbstractHandler {
 
   public void addObserver(T observer) {
     observers.add(observer);
-    log.debug("{} registered.", observer.getClass().getName());
+    if (log.isDebugEnabled()) {
+      log.debug("{} registered.", observer.getClass().getName());
+    }
   }
 
   void notifyObservers() {
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/documents/AddDocumentDialogFactory.java b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/documents/AddDocumentDialogFactory.java
index 0bbeb3eb6f5c..1b9508f79eb1 100644
--- a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/documents/AddDocumentDialogFactory.java
+++ b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/documents/AddDocumentDialogFactory.java
@@ -365,7 +365,7 @@ void addDocument(ActionEvent e) {
       }
 
       addDocument(doc);
-      log.info("Added document: {}", doc.toString());
+      log.info("Added document: {}", doc);
     }
 
     @SuppressWarnings("unchecked")
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java
index 471094223c1e..c091bea5c47d 100644
--- a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java
+++ b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java
@@ -248,10 +248,10 @@ protected Void doInBackground() {
           try {
             filename = toolsModel.exportTerms(directory, field, selectedDelimiter);
           } catch (LukeException e) {
-            log.error("Error while exporting terms from field " + field, e);
+            log.error("Error while exporting terms from field {}", field, e);
             statusLbl.setText(MessageUtils.getLocalizedMessage("export.terms.label.error", e.getMessage()));
           } catch (Exception e) {
-            log.error("Error while exporting terms from field " + field, e);
+            log.error("Error while exporting terms from field {}", field, e);
             statusLbl.setText(MessageUtils.getLocalizedMessage("message.error.unknown"));
             throw e;
           } finally {
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java
index e4b25296fb4b..d2f6c9b5f860 100644
--- a/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java
@@ -245,7 +245,9 @@ public Optional nextTermDoc() {
       if (penum.nextDoc() == PostingsEnum.NO_MORE_DOCS) {
         // end of the iterator
         resetPostingsIterator();
-        log.info("Reached the end of the postings iterator for term: {} in field: {}", BytesRefUtils.decode(tenum.term()), curField);
+        if (log.isInfoEnabled()) {
+          log.info("Reached the end of the postings iterator for term: {} in field: {}", BytesRefUtils.decode(tenum.term()), curField);
+        }
         return Optional.empty();
       } else {
         return Optional.of(penum.docID());
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/util/IndexUtils.java b/lucene/luke/src/java/org/apache/lucene/luke/models/util/IndexUtils.java
index 71e8070af465..9b98e92b9ede 100644
--- a/lucene/luke/src/java/org/apache/lucene/luke/models/util/IndexUtils.java
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/util/IndexUtils.java
@@ -93,7 +93,9 @@ public FileVisitResult preVisitDirectory(Path path, BasicFileAttributes attrs) t
       throw new RuntimeException("No valid directory at the location: " + indexPath);
     }
 
-    log.info(String.format(Locale.ENGLISH, "IndexReaders (%d leaf readers) successfully opened. Index path=%s", readers.size(), indexPath));
+    if (log.isInfoEnabled()) {
+      log.info(String.format(Locale.ENGLISH, "IndexReaders (%d leaf readers) successfully opened. Index path=%s", readers.size(), indexPath));
+    }
 
     if (readers.size() == 1) {
       return readers.get(0);
@@ -115,7 +117,9 @@ public FileVisitResult preVisitDirectory(Path path, BasicFileAttributes attrs) t
   public static Directory openDirectory(String dirPath, String dirImpl) throws IOException {
     final Path path = FileSystems.getDefault().getPath(Objects.requireNonNull(dirPath));
     Directory dir = openDirectory(path, dirImpl);
-    log.info(String.format(Locale.ENGLISH, "DirectoryReader successfully opened. Directory path=%s", dirPath));
+    if (log.isInfoEnabled()) {
+      log.info(String.format(Locale.ENGLISH, "DirectoryReader successfully opened. Directory path=%s", dirPath));
+    }
     return dir;
   }
 
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/util/twentynewsgroups/MessageFilesParser.java b/lucene/luke/src/java/org/apache/lucene/luke/models/util/twentynewsgroups/MessageFilesParser.java
index 5a2fe7398496..a11997450d13 100644
--- a/lucene/luke/src/java/org/apache/lucene/luke/models/util/twentynewsgroups/MessageFilesParser.java
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/util/twentynewsgroups/MessageFilesParser.java
@@ -54,7 +54,7 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attr) {
         }
       }
     } catch (IOException e) {
-      log.warn("Invalid file? " + file.toString());
+      log.warn("Invalid file? {}", file);
     }
     return FileVisitResult.CONTINUE;
   }
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/util/reflection/SubtypeCollector.java b/lucene/luke/src/java/org/apache/lucene/luke/util/reflection/SubtypeCollector.java
index 4f22a871434d..de40a187eeff 100644
--- a/lucene/luke/src/java/org/apache/lucene/luke/util/reflection/SubtypeCollector.java
+++ b/lucene/luke/src/java/org/apache/lucene/luke/util/reflection/SubtypeCollector.java
@@ -84,7 +84,7 @@ public void run() {
           }
         }
       } catch (IOException e) {
-        log.error("Cannot load jar " + url.toString(), e);
+        log.error("Cannot load jar {}", url, e);
       }
     }
   }
diff --git a/lucene/memory/build.gradle b/lucene/memory/build.gradle
index a52f88cdbcd0..0d3bc1f7b03c 100644
--- a/lucene/memory/build.gradle
+++ b/lucene/memory/build.gradle
@@ -18,6 +18,8 @@
 
 apply plugin: 'java-library'
 
+description = 'Single-document in-memory index implementation'
+
 dependencies {
   api project(':lucene:core')
 
diff --git a/lucene/misc/build.gradle b/lucene/misc/build.gradle
index eab862245879..bdf575df5628 100644
--- a/lucene/misc/build.gradle
+++ b/lucene/misc/build.gradle
@@ -17,6 +17,8 @@
 
 apply plugin: 'java-library'
 
+description = 'Index tools and other miscellaneous code'
+
 dependencies {
   api project(':lucene:core')
   testImplementation project(':lucene:test-framework')
diff --git a/lucene/misc/src/java/org/apache/lucene/util/fst/UpToTwoPositiveIntOutputs.java b/lucene/misc/src/java/org/apache/lucene/util/fst/UpToTwoPositiveIntOutputs.java
index af1766e23b5b..a6e0a66fe088 100644
--- a/lucene/misc/src/java/org/apache/lucene/util/fst/UpToTwoPositiveIntOutputs.java
+++ b/lucene/misc/src/java/org/apache/lucene/util/fst/UpToTwoPositiveIntOutputs.java
@@ -82,7 +82,8 @@ public int hashCode() {
       return (int) ((first^(first>>>32)) ^ (second^(second>>32)));
     }
   }
-  
+
+  @SuppressWarnings("deprecation")
   private final static Long NO_OUTPUT = new Long(0);
 
   private final boolean doShare;
diff --git a/lucene/misc/src/test/org/apache/lucene/search/similarity/TestLegacyBM25Similarity.java b/lucene/misc/src/test/org/apache/lucene/search/similarity/TestLegacyBM25Similarity.java
index b3a0cd24ca13..9ffdc63b2edb 100644
--- a/lucene/misc/src/test/org/apache/lucene/search/similarity/TestLegacyBM25Similarity.java
+++ b/lucene/misc/src/test/org/apache/lucene/search/similarity/TestLegacyBM25Similarity.java
@@ -23,6 +23,7 @@
 import org.apache.lucene.search.similarities.BaseSimilarityTestCase;
 import org.apache.lucene.search.similarities.Similarity;
 
+@Deprecated
 public class TestLegacyBM25Similarity extends BaseSimilarityTestCase {
 
   public void testIllegalK1() {
diff --git a/lucene/monitor/build.gradle b/lucene/monitor/build.gradle
index cc30dbed8afb..3dd65a4da7a2 100644
--- a/lucene/monitor/build.gradle
+++ b/lucene/monitor/build.gradle
@@ -17,6 +17,8 @@
 
 apply plugin: 'java-library'
 
+description = 'Reverse-search implementation for monitoring and classification'
+
 dependencies {
   api project(':lucene:core')
 
diff --git a/lucene/queries/build.gradle b/lucene/queries/build.gradle
index 199b39b9f20d..64c2bf41dad0 100644
--- a/lucene/queries/build.gradle
+++ b/lucene/queries/build.gradle
@@ -17,6 +17,8 @@
 
 apply plugin: 'java-library'
 
+description = 'Filters and Queries that add to core Lucene'
+
 dependencies {
   api project(':lucene:core')
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java
index c804523bcb97..9bd9ccc3e34e 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java
@@ -56,7 +56,7 @@ public ValueSource getValueSource() {
   protected class FunctionWeight extends Weight {
     protected final IndexSearcher searcher;
     protected final float boost;
-    protected final Map context;
+    protected final Map context;
 
     public FunctionWeight(IndexSearcher searcher, float boost) throws IOException {
       super(FunctionQuery.this);
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionRangeQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionRangeQuery.java
index f72e12b9e1c8..7848dc615733 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionRangeQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionRangeQuery.java
@@ -124,8 +124,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo
   }
 
   private class FunctionRangeWeight extends Weight {
-    @SuppressWarnings("rawtypes")
-    private final Map vsContext;
+    private final Map vsContext;
 
     public FunctionRangeWeight(IndexSearcher searcher) throws IOException {
       super(FunctionRangeQuery.this);
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionScoreQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionScoreQuery.java
index 2ba77e2afb77..afd1db309969 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionScoreQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionScoreQuery.java
@@ -105,7 +105,13 @@ public static FunctionScoreQuery boostByQuery(Query in, Query boostMatch, float
 
   @Override
   public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
-    Weight inner = in.createWeight(searcher, scoreMode.needsScores() && source.needsScores() ? scoreMode : ScoreMode.COMPLETE_NO_SCORES, 1f);
+    ScoreMode sm;
+    if (scoreMode.needsScores() && source.needsScores()) {
+      sm = ScoreMode.COMPLETE;
+    } else {
+      sm = ScoreMode.COMPLETE_NO_SCORES;
+    }
+    Weight inner = in.createWeight(searcher, sm, 1f);
     if (scoreMode.needsScores() == false)
       return inner;
     return new FunctionScoreWeight(this, inner, source.rewrite(searcher), boost);
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/ValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/ValueSource.java
index 1ba35804002b..0b3421da4b44 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/ValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/ValueSource.java
@@ -50,7 +50,7 @@ public abstract class ValueSource {
    * docID manner, and you must call this method again to iterate through
    * the values again.
    */
-  public abstract FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException;
+  public abstract FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException;
 
   @Override
   public abstract boolean equals(Object o);
@@ -74,14 +74,14 @@ public String toString() {
    * weight info in the context. The context object will be passed to getValues()
    * where this info can be retrieved.
    */
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
   }
 
   /**
    * Returns a new non-threadsafe context map.
    */
-  public static Map newContext(IndexSearcher searcher) {
-    Map context = new IdentityHashMap();
+  public static Map newContext(IndexSearcher searcher) {
+    Map context = new IdentityHashMap<>();
     context.put("searcher", searcher);
     return context;
   }
@@ -119,7 +119,7 @@ private WrappedLongValuesSource(ValueSource in) {
 
     @Override
     public LongValues getValues(LeafReaderContext ctx, DoubleValues scores) throws IOException {
-      Map context = new IdentityHashMap<>();
+      Map context = new IdentityHashMap<>();
       ScoreAndDoc scorer = new ScoreAndDoc();
       context.put("scorer", scorer);
       final FunctionValues fv = in.getValues(context, ctx);
@@ -196,7 +196,7 @@ private WrappedDoubleValuesSource(ValueSource in, IndexSearcher searcher) {
 
     @Override
     public DoubleValues getValues(LeafReaderContext ctx, DoubleValues scores) throws IOException {
-      Map context = new HashMap<>();
+      Map context = new HashMap<>();
       ScoreAndDoc scorer = new ScoreAndDoc();
       context.put("scorer", scorer);
       context.put("searcher", searcher);
@@ -236,7 +236,7 @@ public boolean isCacheable(LeafReaderContext ctx) {
 
     @Override
     public Explanation explain(LeafReaderContext ctx, int docId, Explanation scoreExplanation) throws IOException {
-      Map context = new HashMap<>();
+      Map context = new HashMap<>();
       ScoreAndDoc scorer = new ScoreAndDoc();
       scorer.score = scoreExplanation.getValue().floatValue();
       context.put("scorer", scorer);
@@ -283,7 +283,7 @@ private FromDoubleValuesSource(DoubleValuesSource in) {
     }
 
     @Override
-    public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+    public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
       Scorable scorer = (Scorable) context.get("scorer");
       DoubleValues scores = scorer == null ? null : DoubleValuesSource.fromScorer(scorer);
 
@@ -365,16 +365,16 @@ public ValueSourceSortField(boolean reverse) {
 
     @Override
     public SortField rewrite(IndexSearcher searcher) throws IOException {
-      Map context = newContext(searcher);
+      Map context = newContext(searcher);
       createWeight(context, searcher);
       return new SortField(getField(), new ValueSourceComparatorSource(context), getReverse());
     }
   }
 
   class ValueSourceComparatorSource extends FieldComparatorSource {
-    private final Map context;
+    private final Map context;
 
-    public ValueSourceComparatorSource(Map context) {
+    public ValueSourceComparatorSource(Map context) {
       this.context = context;
     }
 
@@ -394,10 +394,10 @@ class ValueSourceComparator extends SimpleFieldComparator {
     private final double[] values;
     private FunctionValues docVals;
     private double bottom;
-    private final Map fcontext;
+    private final Map fcontext;
     private double topValue;
 
-    ValueSourceComparator(Map fcontext, int numHits) {
+    ValueSourceComparator(Map fcontext, int numHits) {
       this.fcontext = fcontext;
       values = new double[numHits];
     }
@@ -429,7 +429,7 @@ public void setBottom(final int bottom) {
 
     @Override
     public void setTopValue(final Double value) {
-      this.topValue = value.doubleValue();
+      this.topValue = value;
     }
 
     @Override
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/BytesRefFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/BytesRefFieldSource.java
index bb33122bdcb6..af7ae3de0ec7 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/BytesRefFieldSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/BytesRefFieldSource.java
@@ -41,7 +41,7 @@ public BytesRefFieldSource(String field) {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FieldInfo fieldInfo = readerContext.reader().getFieldInfos().fieldInfo(field);
 
     // To be sorted or not to be sorted, that is the question
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ComparisonBoolFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ComparisonBoolFunction.java
index 1a936d98ea45..57adf6b19415 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ComparisonBoolFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ComparisonBoolFunction.java
@@ -50,7 +50,7 @@ public String name() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues lhsVal = this.lhs.getValues(context, readerContext);
     final FunctionValues rhsVal = this.rhs.getValues(context, readerContext);
     final String compLabel = this.name();
@@ -97,7 +97,7 @@ public String description() {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     lhs.createWeight(context, searcher);
     rhs.createWeight(context, searcher);
   }
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ConstValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ConstValueSource.java
index 125422fb2dee..b9fda157eaae 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ConstValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ConstValueSource.java
@@ -41,7 +41,7 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     return new FloatDocValues(this) {
       @Override
       public float floatVal(int doc) {
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DefFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DefFunction.java
index f3fd004675b6..2c6f2a9edddf 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DefFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DefFunction.java
@@ -42,7 +42,7 @@ protected String name() {
 
 
   @Override
-  public FunctionValues getValues(Map fcontext, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map fcontext, LeafReaderContext readerContext) throws IOException {
 
 
     return new Values(valsArr(sources, fcontext, readerContext)) {
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DocFreqValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DocFreqValueSource.java
index e03e3160495e..f7abe36c0e65 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DocFreqValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DocFreqValueSource.java
@@ -29,94 +29,6 @@
 import java.util.Map;
 
 
-class ConstIntDocValues extends IntDocValues {
-  final int ival;
-  final float fval;
-  final double dval;
-  final long lval;
-  final String sval;
-  final ValueSource parent;
-
-  ConstIntDocValues(int val, ValueSource parent) {
-    super(parent);
-    ival = val;
-    fval = val;
-    dval = val;
-    lval = val;
-    sval = Integer.toString(val);
-    this.parent = parent;
-  }
-
-  @Override
-  public float floatVal(int doc) {
-    return fval;
-  }
-  @Override
-  public int intVal(int doc) {
-    return ival;
-  }
-  @Override
-  public long longVal(int doc) {
-    return lval;
-  }
-  @Override
-  public double doubleVal(int doc) {
-    return dval;
-  }
-  @Override
-  public String strVal(int doc) {
-    return sval;
-  }
-  @Override
-  public String toString(int doc) {
-    return parent.description() + '=' + sval;
-  }
-}
-
-class ConstDoubleDocValues extends DoubleDocValues {
-  final int ival;
-  final float fval;
-  final double dval;
-  final long lval;
-  final String sval;
-  final ValueSource parent;
-
-  ConstDoubleDocValues(double val, ValueSource parent) {
-    super(parent);
-    ival = (int)val;
-    fval = (float)val;
-    dval = val;
-    lval = (long)val;
-    sval = Double.toString(val);
-    this.parent = parent;
-  }
-
-  @Override
-  public float floatVal(int doc) {
-    return fval;
-  }
-  @Override
-  public int intVal(int doc) {
-    return ival;
-  }
-  @Override
-  public long longVal(int doc) {
-    return lval;
-  }
-  @Override
-  public double doubleVal(int doc) {
-    return dval;
-  }
-  @Override
-  public String strVal(int doc) {
-    return sval;
-  }
-  @Override
-  public String toString(int doc) {
-    return parent.description() + '=' + sval;
-  }
-}
-
 
 /**
  * DocFreqValueSource returns the number of documents containing the term.
@@ -145,15 +57,15 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     IndexSearcher searcher = (IndexSearcher)context.get("searcher");
     int docfreq = searcher.getIndexReader().docFreq(new Term(indexedField, indexedBytes));
     return new ConstIntDocValues(docfreq, this);
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
-    context.put("searcher",searcher);
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+    context.put("searcher", searcher);
   }
 
   @Override
@@ -167,5 +79,92 @@ public boolean equals(Object o) {
     DocFreqValueSource other = (DocFreqValueSource)o;
     return this.indexedField.equals(other.indexedField) && this.indexedBytes.equals(other.indexedBytes);
   }
+  static class ConstIntDocValues extends IntDocValues {
+    final int ival;
+    final float fval;
+    final double dval;
+    final long lval;
+    final String sval;
+    final ValueSource parent;
+
+    ConstIntDocValues(int val, ValueSource parent) {
+      super(parent);
+      ival = val;
+      fval = val;
+      dval = val;
+      lval = val;
+      sval = Integer.toString(val);
+      this.parent = parent;
+    }
+
+    @Override
+    public float floatVal(int doc) {
+      return fval;
+    }
+    @Override
+    public int intVal(int doc) {
+      return ival;
+    }
+    @Override
+    public long longVal(int doc) {
+      return lval;
+    }
+    @Override
+    public double doubleVal(int doc) {
+      return dval;
+    }
+    @Override
+    public String strVal(int doc) {
+      return sval;
+    }
+    @Override
+    public String toString(int doc) {
+      return parent.description() + '=' + sval;
+    }
+  }
+
+  static class ConstDoubleDocValues extends DoubleDocValues {
+    final int ival;
+    final float fval;
+    final double dval;
+    final long lval;
+    final String sval;
+    final ValueSource parent;
+
+    ConstDoubleDocValues(double val, ValueSource parent) {
+      super(parent);
+      ival = (int)val;
+      fval = (float)val;
+      dval = val;
+      lval = (long)val;
+      sval = Double.toString(val);
+      this.parent = parent;
+    }
+
+    @Override
+    public float floatVal(int doc) {
+      return fval;
+    }
+    @Override
+    public int intVal(int doc) {
+      return ival;
+    }
+    @Override
+    public long longVal(int doc) {
+      return lval;
+    }
+    @Override
+    public double doubleVal(int doc) {
+      return dval;
+    }
+    @Override
+    public String strVal(int doc) {
+      return sval;
+    }
+    @Override
+    public String toString(int doc) {
+      return parent.description() + '=' + sval;
+    }
+  }
 }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleConstValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleConstValueSource.java
index 74a47741c378..21b20aab1bbe 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleConstValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleConstValueSource.java
@@ -43,7 +43,7 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     return new DoubleDocValues(this) {
       @Override
       public float floatVal(int doc) {
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleFieldSource.java
index 3f5f454b88d5..1bb6625d3a5c 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleFieldSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleFieldSource.java
@@ -50,7 +50,7 @@ public SortField getSortField(boolean reverse) {
   }
   
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
 
     final NumericDocValues values = getNumericDocValues(context, readerContext);
 
@@ -104,7 +104,7 @@ public void fillValue(int doc) throws IOException {
     };
   }
 
-  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
+  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
     return DocValues.getNumeric(readerContext.reader(), field);
   }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DualFloatFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DualFloatFunction.java
index e42ba659bc89..745753b6506f 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DualFloatFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DualFloatFunction.java
@@ -51,7 +51,7 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues aVals =  a.getValues(context, readerContext);
     final FunctionValues bVals =  b.getValues(context, readerContext);
     return new FloatDocValues(this) {
@@ -75,7 +75,7 @@ public String toString(int doc) throws IOException {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     a.createWeight(context,searcher);
     b.createWeight(context,searcher);
   }
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/EnumFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/EnumFieldSource.java
index 21dbfab04939..d575277d7282 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/EnumFieldSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/EnumFieldSource.java
@@ -94,7 +94,7 @@ public String description() {
 
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final NumericDocValues arr = DocValues.getNumeric(readerContext.reader(), field);
 
     return new IntDocValues(this) {
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/FloatFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/FloatFieldSource.java
index 87285f4b5688..346c79f89141 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/FloatFieldSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/FloatFieldSource.java
@@ -50,7 +50,7 @@ public SortField getSortField(boolean reverse) {
   }
   
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
 
     final NumericDocValues arr = getNumericDocValues(context, readerContext);
     
@@ -105,7 +105,7 @@ public void fillValue(int doc) throws IOException {
     };
   }
 
-  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
+  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
     return DocValues.getNumeric(readerContext.reader(), field);
   }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IDFValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IDFValueSource.java
index 4192f2d183f8..363f399f06b5 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IDFValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IDFValueSource.java
@@ -45,7 +45,7 @@ public String name() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     IndexSearcher searcher = (IndexSearcher)context.get("searcher");
     TFIDFSimilarity sim = asTFIDF(searcher.getSimilarity(), field);
     if (sim == null) {
@@ -53,7 +53,7 @@ public FunctionValues getValues(Map context, LeafReaderContext readerContext) th
     }
     int docfreq = searcher.getIndexReader().docFreq(new Term(indexedField, indexedBytes));
     float idf = sim.idf(docfreq, searcher.getIndexReader().maxDoc());
-    return new ConstDoubleDocValues(idf, this);
+    return new DocFreqValueSource.ConstDoubleDocValues(idf, this);
   }
   
   // tries extra hard to cast the sim to TFIDFSimilarity
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IfFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IfFunction.java
index 45d18dbb17c8..0d868d354d76 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IfFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IfFunction.java
@@ -43,7 +43,7 @@ public IfFunction(ValueSource ifSource, ValueSource trueSource, ValueSource fals
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues ifVals = ifSource.getValues(context, readerContext);
     final FunctionValues trueVals = trueSource.getValues(context, readerContext);
     final FunctionValues falseVals = falseSource.getValues(context, readerContext);
@@ -142,7 +142,7 @@ public boolean equals(Object o) {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     ifSource.createWeight(context, searcher);
     trueSource.createWeight(context, searcher);
     falseSource.createWeight(context, searcher);
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IntFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IntFieldSource.java
index 8d2ab7fe965c..d8e0def563a7 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IntFieldSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IntFieldSource.java
@@ -50,7 +50,7 @@ public SortField getSortField(boolean reverse) {
   }
   
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     
     final NumericDocValues arr = getNumericDocValues(context, readerContext);
 
@@ -109,7 +109,7 @@ public void fillValue(int doc) throws IOException {
     };
   }
 
-  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
+  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
     return DocValues.getNumeric(readerContext.reader(), field);
   }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/JoinDocFreqValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/JoinDocFreqValueSource.java
index 4e51ac647bd9..e20aaf52baa1 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/JoinDocFreqValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/JoinDocFreqValueSource.java
@@ -53,7 +53,7 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException
   {
     final BinaryDocValues terms = DocValues.getBinary(readerContext.reader(), field);
     final IndexReader top = ReaderUtil.getTopLevelContext(readerContext).reader();
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LinearFloatFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LinearFloatFunction.java
index bbc74195543b..66daaf2c3808 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LinearFloatFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LinearFloatFunction.java
@@ -50,7 +50,7 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues vals =  source.getValues(context, readerContext);
     return new FloatDocValues(this) {
       @Override
@@ -69,7 +69,7 @@ public String toString(int doc) throws IOException {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     source.createWeight(context, searcher);
   }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LiteralValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LiteralValueSource.java
index aeb3b49a717d..f5b4547886ac 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LiteralValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LiteralValueSource.java
@@ -45,7 +45,7 @@ public String getValue() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
 
     return new StrDocValues(this) {
       @Override
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LongFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LongFieldSource.java
index 847436277bb9..f728b0ac1c66 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LongFieldSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/LongFieldSource.java
@@ -62,7 +62,7 @@ public SortField getSortField(boolean reverse) {
   }
   
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     
     final NumericDocValues arr = getNumericDocValues(context, readerContext);
 
@@ -142,7 +142,7 @@ public void fillValue(int doc) throws IOException {
     };
   }
 
-  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
+  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
     return DocValues.getNumeric(readerContext.reader(), field);
   }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MaxDocValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MaxDocValueSource.java
index ccd73269bed1..1d828a0acaaf 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MaxDocValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MaxDocValueSource.java
@@ -41,14 +41,14 @@ public String description() {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
-    context.put("searcher",searcher);
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+    context.put("searcher", searcher);
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     IndexSearcher searcher = (IndexSearcher)context.get("searcher");
-    return new ConstIntDocValues(searcher.getIndexReader().maxDoc(), this);
+    return new DocFreqValueSource.ConstIntDocValues(searcher.getIndexReader().maxDoc(), this);
   }
 
   @Override
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiBoolFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiBoolFunction.java
index 6e905bf4f429..b89a42ac8346 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiBoolFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiBoolFunction.java
@@ -42,7 +42,7 @@ public MultiBoolFunction(List sources) {
   protected abstract boolean func(int doc, FunctionValues[] vals) throws IOException;
 
   @Override
-  public BoolDocValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public BoolDocValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues[] vals =  new FunctionValues[sources.size()];
     int i=0;
     for (ValueSource source : sources) {
@@ -104,7 +104,7 @@ public boolean equals(Object o) {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     for (ValueSource source : sources) {
       source.createWeight(context, searcher);
     }
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiFloatFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiFloatFunction.java
index f2deb219a536..f5f57c9c0a1d 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiFloatFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiFloatFunction.java
@@ -70,7 +70,7 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues[] valsArr = new FunctionValues[sources.length];
     for (int i=0; i context, IndexSearcher searcher) throws IOException {
     for (ValueSource source : sources)
       source.createWeight(context, searcher);
   }
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiFunction.java
index 591aa2c38d54..6b587de8e57f 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiFunction.java
@@ -112,7 +112,7 @@ public static String description(String name, List sources) {
     return sb.toString();
   }
 
-  public static FunctionValues[] valsArr(List sources, Map fcontext, LeafReaderContext readerContext) throws IOException {
+  public static FunctionValues[] valsArr(List sources, Map fcontext, LeafReaderContext readerContext) throws IOException {
     final FunctionValues[] valsArr = new FunctionValues[sources.size()];
     int i=0;
     for (ValueSource source : sources) {
@@ -157,7 +157,7 @@ public static String toString(String name, FunctionValues[] valsArr, int doc) th
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     for (ValueSource source : sources)
       source.createWeight(context, searcher);
   }
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedDoubleFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedDoubleFieldSource.java
index b0728cf5e27a..4895d5b56fd8 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedDoubleFieldSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedDoubleFieldSource.java
@@ -55,7 +55,7 @@ public String description() {
   }
   
   @Override
-  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
+  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
     SortedNumericDocValues sortedDv = DocValues.getSortedNumeric(readerContext.reader(), field);
     return SortedNumericSelector.wrap(sortedDv, selector, SortField.Type.DOUBLE);
   }
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedFloatFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedFloatFieldSource.java
index af8eacf92fe5..bedae049c3f8 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedFloatFieldSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedFloatFieldSource.java
@@ -55,7 +55,7 @@ public String description() {
   }
   
   @Override
-  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
+  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
     SortedNumericDocValues sortedDv = DocValues.getSortedNumeric(readerContext.reader(), field);
     return SortedNumericSelector.wrap(sortedDv, selector, SortField.Type.FLOAT);
   }
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedIntFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedIntFieldSource.java
index 3110f8dca49c..da14032fb151 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedIntFieldSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedIntFieldSource.java
@@ -55,7 +55,7 @@ public String description() {
   }
   
   @Override
-  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
+  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
     SortedNumericDocValues sortedDv = DocValues.getSortedNumeric(readerContext.reader(), field);
     return SortedNumericSelector.wrap(sortedDv, selector, SortField.Type.INT);
   }
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedLongFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedLongFieldSource.java
index 5e31e139cf1d..8c0f17dc7216 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedLongFieldSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MultiValuedLongFieldSource.java
@@ -55,7 +55,7 @@ public String description() {
   }
   
   @Override
-  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
+  protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
     SortedNumericDocValues sortedDv = DocValues.getSortedNumeric(readerContext.reader(), field);
     return SortedNumericSelector.wrap(sortedDv, selector, SortField.Type.LONG);
   }
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NormValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NormValueSource.java
index ffbb0c5fe4e2..9f81dc450d2a 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NormValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NormValueSource.java
@@ -55,12 +55,12 @@ public String description() {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     context.put("searcher",searcher);
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     IndexSearcher searcher = (IndexSearcher)context.get("searcher");
     final TFIDFSimilarity similarity = IDFValueSource.asTFIDF(searcher.getSimilarity(), field);
     if (similarity == null) {
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NumDocsValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NumDocsValueSource.java
index 6f92f1ea4c7b..2e5a6672f740 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NumDocsValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NumDocsValueSource.java
@@ -41,9 +41,9 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     // Searcher has no numdocs so we must use the reader instead
-    return new ConstIntDocValues(ReaderUtil.getTopLevelContext(readerContext).reader().numDocs(), this);
+    return new DocFreqValueSource.ConstIntDocValues(ReaderUtil.getTopLevelContext(readerContext).reader().numDocs(), this);
   }
 
   @Override
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/QueryValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/QueryValueSource.java
index 81d3bbaaf340..d8280105061e 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/QueryValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/QueryValueSource.java
@@ -55,7 +55,7 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map fcontext, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map fcontext, LeafReaderContext readerContext) throws IOException {
     return new QueryDocValues(this, readerContext, fcontext);
   }
 
@@ -72,7 +72,7 @@ public boolean equals(Object o) {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     Query rewritten = searcher.rewrite(q);
     Weight w = searcher.createWeight(rewritten, ScoreMode.COMPLETE, 1);
     context.put(this, w);
@@ -84,7 +84,7 @@ class QueryDocValues extends FloatDocValues {
   final LeafReaderContext readerContext;
   final Weight weight;
   final float defVal;
-  final Map fcontext;
+  final Map fcontext;
   final Query q;
 
   Scorer scorer;
@@ -96,7 +96,7 @@ class QueryDocValues extends FloatDocValues {
   int lastDocRequested=-1;
 
 
-  public QueryDocValues(QueryValueSource vs, LeafReaderContext readerContext, Map fcontext) throws IOException {
+  public QueryDocValues(QueryValueSource vs, LeafReaderContext readerContext, Map fcontext) throws IOException {
     super(vs);
 
     this.readerContext = readerContext;
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/RangeMapFloatFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/RangeMapFloatFunction.java
index 5492c7fb0edc..d924189d2fe6 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/RangeMapFloatFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/RangeMapFloatFunction.java
@@ -58,7 +58,7 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues vals =  source.getValues(context, readerContext);
     final FunctionValues targets = target.getValues(context, readerContext);
     final FunctionValues defaults = (this.defaultVal == null) ? null : defaultVal.getValues(context, readerContext);
@@ -76,7 +76,7 @@ public String toString(int doc) throws IOException {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     source.createWeight(context, searcher);
   }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ReciprocalFloatFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ReciprocalFloatFunction.java
index d6b050e6da2a..f59d9e772f64 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ReciprocalFloatFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ReciprocalFloatFunction.java
@@ -60,7 +60,7 @@ public ReciprocalFloatFunction(ValueSource source, float m, float a, float b) {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues vals = source.getValues(context, readerContext);
     return new FloatDocValues(this) {
       @Override
@@ -81,7 +81,7 @@ public String toString(int doc) throws IOException {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     source.createWeight(context, searcher);
   }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ScaleFloatFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ScaleFloatFunction.java
index 38bd86a7a009..bb465b43da11 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ScaleFloatFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/ScaleFloatFunction.java
@@ -59,7 +59,7 @@ private static class ScaleInfo {
     float maxVal;
   }
 
-  private ScaleInfo createScaleInfo(Map context, LeafReaderContext readerContext) throws IOException {
+  private ScaleInfo createScaleInfo(Map context, LeafReaderContext readerContext) throws IOException {
     final List leaves = ReaderUtil.getTopLevelContext(readerContext).leaves();
 
     float minVal = Float.POSITIVE_INFINITY;
@@ -100,7 +100,7 @@ private ScaleInfo createScaleInfo(Map context, LeafReaderContext readerContext)
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
 
     ScaleInfo scaleInfo = (ScaleInfo)context.get(ScaleFloatFunction.this);
     if (scaleInfo == null) {
@@ -133,7 +133,7 @@ public String toString(int doc) throws IOException {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     source.createWeight(context, searcher);
   }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SimpleBoolFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SimpleBoolFunction.java
index d9ad8c478fa9..2bb5e3ff687b 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SimpleBoolFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SimpleBoolFunction.java
@@ -44,7 +44,7 @@ public SimpleBoolFunction(ValueSource source) {
   protected abstract boolean func(int doc, FunctionValues vals) throws IOException;
 
   @Override
-  public BoolDocValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public BoolDocValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues vals =  source.getValues(context, readerContext);
     return new BoolDocValues(this) {
       @Override
@@ -76,7 +76,7 @@ public boolean equals(Object o) {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     source.createWeight(context, searcher);
   }
 }
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SimpleFloatFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SimpleFloatFunction.java
index f74f8e44dc01..76aac62ee8b0 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SimpleFloatFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SimpleFloatFunction.java
@@ -34,7 +34,7 @@ public SimpleFloatFunction(ValueSource source) {
   protected abstract float func(int doc, FunctionValues vals) throws IOException;
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues vals =  source.getValues(context, readerContext);
     return new FloatDocValues(this) {
       @Override
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SingleFunction.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SingleFunction.java
index f7eb69900b64..f862845001e8 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SingleFunction.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SingleFunction.java
@@ -52,7 +52,7 @@ public boolean equals(Object o) {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     source.createWeight(context, searcher);
   }
 }
\ No newline at end of file
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SortedSetFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SortedSetFieldSource.java
index ddf71292b0c6..e9a33584951d 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SortedSetFieldSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SortedSetFieldSource.java
@@ -55,7 +55,7 @@ public SortField getSortField(boolean reverse) {
   }
   
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     SortedSetDocValues sortedSet = DocValues.getSortedSet(readerContext.reader(), field);
     SortedDocValues view = SortedSetSelector.wrap(sortedSet, selector);
     return new DocTermsIndexDocValues(this, view) {
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SumTotalTermFreqValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SumTotalTermFreqValueSource.java
index 716d3045397e..031d2c050866 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SumTotalTermFreqValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/SumTotalTermFreqValueSource.java
@@ -48,12 +48,12 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     return (FunctionValues)context.get(this);
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     long sumTotalTermFreq = 0;
     for (LeafReaderContext readerContext : searcher.getTopReaderContext().leaves()) {
       Terms terms = readerContext.reader().terms(indexedField);
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TFValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TFValueSource.java
index 731ab1fee249..f127b2f8887e 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TFValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TFValueSource.java
@@ -48,7 +48,7 @@ public String name() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final Terms terms = readerContext.reader().terms(indexedField);
     IndexSearcher searcher = (IndexSearcher)context.get("searcher");
     final TFIDFSimilarity similarity = IDFValueSource.asTFIDF(searcher.getSimilarity(), indexedField);
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TermFreqValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TermFreqValueSource.java
index 55c54f5da306..78ca5ac3bb53 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TermFreqValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TermFreqValueSource.java
@@ -46,7 +46,7 @@ public String name() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final Terms terms = readerContext.reader().terms(indexedField);
 
     return new IntDocValues(this) {
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TotalTermFreqValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TotalTermFreqValueSource.java
index 5a69e42fcd2c..c7eb02f74687 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TotalTermFreqValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/TotalTermFreqValueSource.java
@@ -55,12 +55,12 @@ public String description() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     return (FunctionValues)context.get(this);
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     long totalTermFreq = 0;
     for (LeafReaderContext readerContext : searcher.getTopReaderContext().leaves()) {
       long val = readerContext.reader().totalTermFreq(new Term(indexedField, indexedBytes));
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/VectorValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/VectorValueSource.java
index 768243a04956..8ca99ca7f8db 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/VectorValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/VectorValueSource.java
@@ -52,7 +52,7 @@ public String name() {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     int size = sources.size();
 
     // special-case x,y and lat,lon since it's so common
@@ -178,7 +178,7 @@ public String toString(int doc) throws IOException {
   }
 
   @Override
-  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
+  public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     for (ValueSource source : sources)
       source.createWeight(context, searcher);
   }
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalMatches.java b/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalMatches.java
index 82d9d093a293..15448e963ba4 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalMatches.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalMatches.java
@@ -163,6 +163,7 @@ public int nextDoc() {
           case ITERATING:
           case NO_MORE_INTERVALS:
             state = State.EXHAUSTED;
+            break;
           case EXHAUSTED:
         }
         return NO_MORE_DOCS;
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionScoreQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionScoreQuery.java
index 952e6a4738bb..6c8ea52b137a 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionScoreQuery.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionScoreQuery.java
@@ -18,6 +18,7 @@
 package org.apache.lucene.queries.function;
 
 import java.io.IOException;
+import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.NumericDocValuesField;
@@ -37,8 +38,10 @@
 import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.QueryUtils;
+import org.apache.lucene.search.ScoreMode;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.Weight;
 import org.apache.lucene.store.Directory;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -244,4 +247,33 @@ public void testAccessToValueSource() throws Exception {
 
   }
 
+  public void testScoreMode() throws Exception {
+    // Value Source doesn't need scores
+    assertInnerScoreMode(ScoreMode.COMPLETE_NO_SCORES, ScoreMode.COMPLETE, DoubleValuesSource.fromDoubleField("foo"));
+    assertInnerScoreMode(ScoreMode.COMPLETE_NO_SCORES, ScoreMode.COMPLETE_NO_SCORES, DoubleValuesSource.fromDoubleField("foo"));
+    assertInnerScoreMode(ScoreMode.COMPLETE_NO_SCORES, ScoreMode.TOP_SCORES, DoubleValuesSource.fromDoubleField("foo"));
+    
+    // Value Source needs scores
+    assertInnerScoreMode(ScoreMode.COMPLETE, ScoreMode.COMPLETE, DoubleValuesSource.SCORES);
+    assertInnerScoreMode(ScoreMode.COMPLETE_NO_SCORES, ScoreMode.COMPLETE_NO_SCORES, DoubleValuesSource.SCORES);
+    assertInnerScoreMode(ScoreMode.COMPLETE, ScoreMode.TOP_SCORES, DoubleValuesSource.SCORES);
+    
+  }
+  
+  private void assertInnerScoreMode(ScoreMode expectedScoreMode, ScoreMode inputScoreMode, DoubleValuesSource valueSource) throws IOException {
+    final AtomicReference scoreModeInWeight = new AtomicReference();
+    Query innerQ = new TermQuery(new Term(TEXT_FIELD, "a")) {
+      
+      @Override
+      public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
+        scoreModeInWeight.set(scoreMode);
+        return super.createWeight(searcher, scoreMode, boost);
+      }
+    };
+    
+    FunctionScoreQuery fq = new FunctionScoreQuery(innerQ, valueSource);
+    fq.createWeight(searcher, inputScoreMode, 1f);
+    assertEquals(expectedScoreMode, scoreModeInWeight.get());
+  }
+
 }
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java
index a75cc66219e9..46ba04bd4063 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java
@@ -565,7 +565,7 @@ public void testMultiFunctionHelperEquivilence() throws IOException {
     
     // actual doc / index is not relevant for this test
     final LeafReaderContext leaf = searcher.getIndexReader().leaves().get(0);
-    final Map context = ValueSource.newContext(searcher);
+    final Map context = ValueSource.newContext(searcher);
 
     ALL_EXIST_VS.createWeight(context, searcher);
     NONE_EXIST_VS.createWeight(context, searcher);
@@ -643,12 +643,12 @@ void assertNoneExist(ValueSource vs) {
     assertExists(NONE_EXIST_VS, vs);
   }
   /**
-   * Asserts that for every doc, the {@link FunctionValues#exists} value from the 
-   * actual {@link ValueSource} matches the {@link FunctionValues#exists} 
+   * Asserts that for every doc, the {@link FunctionValues#exists} value from the
+   * actual {@link ValueSource} matches the {@link FunctionValues#exists}
    * value from the expected {@link ValueSource}
    */
   void assertExists(ValueSource expected, ValueSource actual) {
-    Map context = ValueSource.newContext(searcher);
+    Map context = ValueSource.newContext(searcher);
     try {
       expected.createWeight(context, searcher);
       actual.createWeight(context, searcher);
@@ -712,7 +712,7 @@ public String description() {
     }
     
     @Override
-    public FunctionValues getValues(Map context, LeafReaderContext readerContext) {
+    public FunctionValues getValues(Map context, LeafReaderContext readerContext) {
       return new FloatDocValues(this) {
         @Override
         public float floatVal(int doc) {
diff --git a/lucene/queryparser/build.gradle b/lucene/queryparser/build.gradle
index 9b01271df8bf..077eb245bc8f 100644
--- a/lucene/queryparser/build.gradle
+++ b/lucene/queryparser/build.gradle
@@ -17,6 +17,8 @@
 
 apply plugin: 'java-library'
 
+description = 'Query parsers and parsing framework'
+
 dependencies {
   api project(':lucene:core')
   api project(':lucene:queries')
diff --git a/lucene/replicator/build.gradle b/lucene/replicator/build.gradle
index 7e389e9dce47..32d5cc787323 100644
--- a/lucene/replicator/build.gradle
+++ b/lucene/replicator/build.gradle
@@ -17,6 +17,8 @@
 
 apply plugin: 'java-library'
 
+description = 'Lucene index files replication utility'
+
 dependencies {
   api project(':lucene:core')
 
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/ReplicatorTestCase.java b/lucene/replicator/src/test/org/apache/lucene/replicator/ReplicatorTestCase.java
index c2f47bfa1f0f..e2cd3117f5ef 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/ReplicatorTestCase.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/ReplicatorTestCase.java
@@ -99,10 +99,12 @@ public static synchronized Server newHttpServer(Handler handler) throws Exceptio
       HttpConfiguration configuration = new HttpConfiguration();
       configuration.setSecureScheme("https");
       configuration.addCustomizer(new SecureRequestCustomizer());
+      @SuppressWarnings("resource")
       ServerConnector c = new ServerConnector(server, new SslConnectionFactory(sslcontext, "http/1.1"),
           new HttpConnectionFactory(configuration));
       connector = c;
     } else {
+      @SuppressWarnings("resource")
       ServerConnector c = new ServerConnector(server, new HttpConnectionFactory());
       connector = c;
     }
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleServer.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleServer.java
index 6f95c5b42372..7e1e065f64ac 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleServer.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleServer.java
@@ -220,6 +220,7 @@ static CopyState readCopyState(DataInput in) throws IOException {
     return new CopyState(files, version, gen, infosBytes, completedMergeFiles, primaryGen, null);
   }
 
+  @SuppressWarnings("try")
   public void test() throws Exception {
 
     int id = Integer.parseInt(System.getProperty("tests.nrtreplication.nodeid"));
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java
index 05f02cce8c24..b41e0f92d14b 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java
@@ -271,7 +271,7 @@ public void testReplicateDeleteAllDocuments() throws Exception {
     waitForVersionAndHits(replica, primaryVersion3, 10);
 
     primaryC.close();
-
+    docs.close();
     replica.close();
     primary.close();
   }
@@ -320,7 +320,7 @@ public void testReplicateForceMerge() throws Exception {
     waitForVersionAndHits(replica, primaryVersion3, 20);
 
     primaryC.close();
-
+    docs.close();
     replica.close();
     primary.close();
   }
@@ -366,7 +366,7 @@ public void testReplicaCrashNoCommit() throws Exception {
     // Ask replica to sync:
     replica.newNRTPoint(primaryVersion1, 0, primary.tcpPort);
     waitForVersionAndHits(replica, primaryVersion1, 10);
-
+    docs.close();
     replica.close();
     primary.close();
   }
@@ -409,7 +409,7 @@ public void testReplicaCrashWithCommit() throws Exception {
 
     // On startup the replica searches the last commit:
     assertVersionAndHits(replica, primaryVersion1, 10);
-
+    docs.close();
     replica.close();
     primary.close();
   }
@@ -474,7 +474,7 @@ public void testIndexingWhileReplicaIsDown() throws Exception {
     replica.newNRTPoint(primaryVersion2, 0, primary.tcpPort);
 
     waitForVersionAndHits(replica, primaryVersion2, 20);
-
+    docs.close();
     replica.close();
     primary.close();
   }
@@ -508,6 +508,8 @@ public void testCrashPrimary1() throws Exception {
     // Wait for replica to sync up:
     waitForVersionAndHits(replica, primaryVersion1, 10);
 
+    docs.close();
+
     // Crash primary:
     primary.crash();
 
@@ -586,6 +588,7 @@ public void testCrashPrimary2() throws Exception {
     // Wait for replica to sync up:
     waitForVersionAndHits(replica, primaryVersion2, 20);
 
+    docs.close();
     primary.close();
     replica.close();
   }
@@ -693,7 +696,7 @@ public void testCrashPrimaryWhileCopying() throws Exception {
         assertEquals(100, hitCount);
       }
     }
-
+    docs.close();
     primary.close();
     replica.close();
   }
@@ -763,6 +766,7 @@ public void testCrashReplica() throws Exception {
     // Make sure it sees all docs that were indexed while it was down:
     assertVersionAndHits(primary, primaryVersion2, 110);
 
+    docs.close();
     replica.close();
     primary.close();
   }
@@ -832,6 +836,7 @@ public void testFullClusterCrash() throws Exception {
     assertVersionAndHits(replica1, primary.initInfosVersion, 50);
     assertVersionAndHits(replica2, primary.initInfosVersion, 50);
 
+    docs.close();
     primary.close();
     replica1.close();
     replica2.close();
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java
index d11b22bc58a9..d431dc1e056d 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java
@@ -1057,9 +1057,9 @@ private class IndexThread extends Thread {
 
     @Override
     public void run() {
-
+      LineFileDocs docs=null;
       try {
-        LineFileDocs docs = new LineFileDocs(random());
+        docs = new LineFileDocs(random());
         int docCount = 0;
 
         // How often we do an update/delete vs add:
@@ -1175,6 +1175,8 @@ public void run() {
         failed.set(true);
         stop.set(true);
         throw new RuntimeException(t);
+      } finally {
+        IOUtils.closeWhileHandlingException(docs);
       }
     }
   }
diff --git a/lucene/sandbox/build.gradle b/lucene/sandbox/build.gradle
index 5543dec15bbc..ec2d9ca80561 100644
--- a/lucene/sandbox/build.gradle
+++ b/lucene/sandbox/build.gradle
@@ -17,6 +17,8 @@
 
 apply plugin: 'java-library'
 
+description = 'Various third party contributions and new ideas'
+
 dependencies {
   api project(':lucene:core')
   testImplementation project(':lucene:test-framework')
diff --git a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsWriter.java b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsWriter.java
index b9c57491a88a..d13826a91662 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsWriter.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsWriter.java
@@ -779,7 +779,7 @@ public void finish() throws IOException {
 
         // Write FST to index
         indexStartFP = indexOut.getFilePointer();
-        root.index.save(indexOut);
+        root.index.save(indexOut, indexOut);
         //System.out.println("  write FST " + indexStartFP + " field=" + fieldInfo.name);
 
         // if (SAVE_DOT_FILES || DEBUG) {
diff --git a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionFieldReader.java b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionFieldReader.java
index 93888ae589d4..280a6c8b5838 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionFieldReader.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionFieldReader.java
@@ -74,7 +74,7 @@ final class VersionFieldReader extends Terms implements Accountable {
       final IndexInput clone = indexIn.clone();
       //System.out.println("start=" + indexStartFP + " field=" + fieldInfo.name);
       clone.seek(indexStartFP);
-      index = new FST<>(clone, VersionBlockTreeTermsWriter.FST_OUTPUTS);
+      index = new FST<>(clone, clone, VersionBlockTreeTermsWriter.FST_OUTPUTS);
         
       /*
         if (false) {
diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/LatLonPointPrototypeQueries.java b/lucene/sandbox/src/java/org/apache/lucene/search/LatLonPointPrototypeQueries.java
index 3c0d7ff3164f..bce984f432aa 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/search/LatLonPointPrototypeQueries.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/search/LatLonPointPrototypeQueries.java
@@ -20,7 +20,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.lucene.codecs.lucene60.Lucene60PointsFormat;
+import org.apache.lucene.codecs.lucene86.Lucene86PointsFormat;
 import org.apache.lucene.document.LatLonDocValuesField;
 import org.apache.lucene.document.LatLonPoint;
 import org.apache.lucene.geo.GeoUtils;
@@ -49,7 +49,7 @@ private LatLonPointPrototypeQueries() {
    * 

* This is functionally equivalent to running {@link MatchAllDocsQuery} with a {@link LatLonDocValuesField#newDistanceSort}, * but is far more efficient since it takes advantage of properties the indexed BKD tree. Currently this - * only works with {@link Lucene60PointsFormat} (used by the default codec). Multi-valued fields are + * only works with {@link Lucene86PointsFormat} (used by the default codec). Multi-valued fields are * currently not de-duplicated, so if a document had multiple instances of the specified field that * make it into the top n, that document will appear more than once. *

diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/PhraseWildcardQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/PhraseWildcardQuery.java index 16c601ac05f9..e348b6258b97 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/search/PhraseWildcardQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/search/PhraseWildcardQuery.java @@ -858,7 +858,7 @@ protected TermData(int numSegments, TermsData termsData) { @SuppressWarnings("unchecked") protected void setTermStatesForSegment(LeafReaderContext leafReaderContext, List termStates) { if (termStatesPerSegment == null) { - termStatesPerSegment = (List[]) new List[numSegments]; + termStatesPerSegment = (List[]) new List[numSegments]; termsData.numTermsMatching++; } termStatesPerSegment[leafReaderContext.ord] = termStates; diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java index b2d5b03fbae1..a14204caf333 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java @@ -243,7 +243,7 @@ private static double euclideanDistanceSquared(float[] a, float[] b) { private IndexWriterConfig getIndexWriterConfig() { IndexWriterConfig iwc = newIndexWriterConfig(); - iwc.setCodec(Codec.forName("Lucene84")); + iwc.setCodec(Codec.forName("Lucene86")); return iwc; } } diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java index 3ea2a4e8f8a6..a149aceb8801 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java +++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java @@ -246,7 +246,7 @@ public int compare(FieldDoc a, FieldDoc b) { private IndexWriterConfig getIndexWriterConfig() { IndexWriterConfig iwc = newIndexWriterConfig(); - iwc.setCodec(Codec.forName("Lucene84")); + iwc.setCodec(Codec.forName("Lucene86")); return iwc; } } diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestPhraseWildcardQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestPhraseWildcardQuery.java index 91ec32ec734e..c8d9d51fc4e1 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/search/TestPhraseWildcardQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestPhraseWildcardQuery.java @@ -230,7 +230,8 @@ public void testExplain() throws IOException { for (ScoreDoc scoreDoc : searcher.search(testQuery, MAX_DOCS).scoreDocs) { Explanation explanation = searcher.explain(testQuery, scoreDoc.doc); assertTrue(explanation.getValue().doubleValue() > 0); - assertEquals("weight(phraseWildcard(title:\"t?e b* b*\") in 1) [AssertingSimilarity], result of:", explanation.getDescription()); + assertTrue("Unexpected explanation \"" + explanation.getDescription() + "\"", + explanation.getDescription().startsWith("weight(phraseWildcard(title:\"t?e b* b*\")")); } // Verify that if we call PhraseWildcardQuery.PhraseWildcardWeight.scorer() twice, diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java index 64fe4c73d974..a95f0952495c 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java @@ -444,6 +444,7 @@ public TokenStreamComponents createComponents(String fieldName) { } IndexReader r = w.getReader(); + w.close(); IndexSearcher s = newSearcher(r); // Used to match ANY using MultiPhraseQuery: @@ -561,7 +562,7 @@ public TokenStreamComponents createComponents(String fieldName) { } } - IOUtils.close(w, r, dir, analyzer); + IOUtils.close(r, dir, analyzer); } private Set toDocIDs(IndexSearcher s, TopDocs hits) throws IOException { diff --git a/lucene/site/xsl/index.template.md b/lucene/site/xsl/index.template.md new file mode 100644 index 000000000000..22447add3425 --- /dev/null +++ b/lucene/site/xsl/index.template.md @@ -0,0 +1,50 @@ +![Apache Lucene Logo](lucene_green_300.gif) + +# Apache Lucene™ ${project.version} Documentation + +Lucene is a Java full-text search engine. Lucene is not a complete application, +but rather a code library and API that can easily be used to add search capabilities +to applications. + +This is the official documentation for **Apache Lucene ${project.version}**. +Additional documentation is available in the +[Wiki](https://cwiki.apache.org/confluence/display/lucene). + +## Getting Started + +The following section is intended as a "getting started" guide. It has three +audiences: first-time users looking to install Apache Lucene in their +application; developers looking to modify or base the applications they develop +on Lucene; and developers looking to become involved in and contribute to the +development of Lucene. The goal is to help you "get started". It does not go into great depth +on some of the conceptual or inner details of Lucene: + +* [Lucene demo, its usage, and sources](demo/overview-summary.html#overview.description): + Tutorial and walk-through of the command-line Lucene demo. +* [Introduction to Lucene's APIs](core/overview-summary.html#overview.description): + High-level summary of the different Lucene packages. +* [Analysis overview](core/org/apache/lucene/analysis/package-summary.html#package.description): + Introduction to Lucene's analysis API. See also the + [TokenStream consumer workflow](core/org/apache/lucene/analysis/TokenStream.html). + +## Reference Documents + +* [Changes](changes/Changes.html): List of changes in this release. +* [System Requirements](SYSTEM_REQUIREMENTS.html): Minimum and supported Java versions. +* [Migration Guide](MIGRATE.html): What changed in Lucene ${project.majorVersion()}; how to migrate code from + Lucene ${project.majorVersion()-1}.x. +* [JRE Version Migration](JRE_VERSION_MIGRATION.html): Information about upgrading between major JRE versions. +* [File Formats](core/org/apache/lucene/codecs/${defaultCodecPackage}/package-summary.html#package.description): + Guide to the supported index format used by Lucene. This can be customized by using + [an alternate codec](core/org/apache/lucene/codecs/package-summary.html#package.description). +* [Search and Scoring in Lucene](core/org/apache/lucene/search/package-summary.html#package.description): + Introduction to how Lucene scores documents. +* [Classic Scoring Formula](core/org/apache/lucene/search/similarities/TFIDFSimilarity.html): + Formula of Lucene's classic [Vector Space](https://en.wikipedia.org/wiki/Vector_Space_Model) implementation + (look [here](core/org/apache/lucene/search/similarities/package-summary.html#package.description) for other models). +* [Classic QueryParser Syntax](queryparser/org/apache/lucene/queryparser/classic/package-summary.html#package.description): + Overview of the Classic QueryParser's syntax and features. + +## API Javadocs + +${projectList} diff --git a/lucene/site/xsl/index.xsl b/lucene/site/xsl/index.xsl index 88c93efd2a7b..b3be85f79b50 100644 --- a/lucene/site/xsl/index.xsl +++ b/lucene/site/xsl/index.xsl @@ -55,7 +55,7 @@

This is the official documentation for Apache Lucene . Additional documentation is available in the - Wiki. + Wiki.

Getting Started

The following section is intended as a "getting started" guide. It has three @@ -77,11 +77,11 @@

API Javadocs

diff --git a/lucene/spatial-extras/build.gradle b/lucene/spatial-extras/build.gradle index 3994385e8359..2766f7b34a5c 100644 --- a/lucene/spatial-extras/build.gradle +++ b/lucene/spatial-extras/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Geospatial search' + dependencies { api project(':lucene:core') api project(':lucene:spatial3d') diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/SpatialStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/SpatialStrategy.java index bd23a36ad928..06efda93be14 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/SpatialStrategy.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/SpatialStrategy.java @@ -136,7 +136,7 @@ public DoubleValuesSource makeDistanceValueSource(Point queryPoint) { public final DoubleValuesSource makeRecipDistanceValueSource(Shape queryShape) { Rectangle bbox = queryShape.getBoundingBox(); double diagonalDist = ctx.getDistCalc().distance( - ctx.makePoint(bbox.getMinX(), bbox.getMinY()), bbox.getMaxX(), bbox.getMaxY()); + ctx.getShapeFactory().pointXY(bbox.getMinX(), bbox.getMinY()), bbox.getMaxX(), bbox.getMaxY()); double distToEdge = diagonalDist * 0.5; float c = (float)distToEdge * 0.1f;//one tenth DoubleValuesSource distance = makeDistanceValueSource(queryShape.getCenter(), 1.0); diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxValueSource.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxValueSource.java index 1a2525ed05b1..0a84879c25e7 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxValueSource.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxValueSource.java @@ -54,7 +54,7 @@ public ShapeValues getValues(LeafReaderContext readerContext) throws IOException final NumericDocValues maxY = DocValues.getNumeric(reader, strategy.field_maxY); //reused - final Rectangle rect = strategy.getSpatialContext().makeRectangle(0,0,0,0); + final Rectangle rect = strategy.getSpatialContext().getShapeFactory().rect(0,0,0,0); return new ShapeValues() { diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java index 6c5253e0d34a..5402dd591403 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java @@ -100,7 +100,7 @@ public static Heatmap calcFacets(PrefixTreeStrategy strategy, IndexReaderContext //First get the rect of the cell at the bottom-left at depth facetLevel final SpatialPrefixTree grid = strategy.getGrid(); final SpatialContext ctx = grid.getSpatialContext(); - final Point cornerPt = ctx.makePoint(inputRect.getMinX(), inputRect.getMinY()); + final Point cornerPt = ctx.getShapeFactory().pointXY(inputRect.getMinX(), inputRect.getMinY()); final CellIterator cellIterator = grid.getTreeCellIterator(cornerPt, facetLevel); Cell cornerCell = null; while (cellIterator.hasNext()) { @@ -141,7 +141,7 @@ public static Heatmap calcFacets(PrefixTreeStrategy strategy, IndexReaderContext heatMaxY = worldRect.getMaxY(); } - final Heatmap heatmap = new Heatmap(columns, rows, ctx.makeRectangle(heatMinX, heatMaxX, heatMinY, heatMaxY)); + final Heatmap heatmap = new Heatmap(columns, rows, ctx.getShapeFactory().rect(heatMinX, heatMaxX, heatMinY, heatMaxY)); if (topAcceptDocs instanceof Bits.MatchNoBits) { return heatmap; // short-circuit } diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeQuery.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeQuery.java index f595f0e4639b..538fc7d0cf98 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeQuery.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeQuery.java @@ -107,13 +107,13 @@ protected Shape bufferShape(Shape shape, double distErr) { throw new IllegalArgumentException("distErr must be > 0"); SpatialContext ctx = grid.getSpatialContext(); if (shape instanceof Point) { - return ctx.makeCircle((Point)shape, distErr); + return ctx.getShapeFactory().circle((Point)shape, distErr); } else if (shape instanceof Circle) { Circle circle = (Circle) shape; double newDist = circle.getRadius() + distErr; if (ctx.isGeo() && newDist > 180) newDist = 180; - return ctx.makeCircle(circle.getCenter(), newDist); + return ctx.getShapeFactory().circle(circle.getCenter(), newDist); } else { Rectangle bbox = shape.getBoundingBox(); double newMinX = bbox.getMinX() - distErr; @@ -139,7 +139,7 @@ protected Shape bufferShape(Shape shape, double distErr) { newMinY = Math.max(newMinY, ctx.getWorldBounds().getMinY()); newMaxY = Math.min(newMaxY, ctx.getWorldBounds().getMaxY()); } - return ctx.makeRectangle(newMinX, newMaxX, newMinY, newMaxY); + return ctx.getShapeFactory().rect(newMinX, newMaxX, newMinY, newMaxY); } } diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/PackedQuadPrefixTree.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/PackedQuadPrefixTree.java index a7174a49eef2..dbb40549097c 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/PackedQuadPrefixTree.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/PackedQuadPrefixTree.java @@ -62,7 +62,9 @@ public static class Factory extends QuadPrefixTree.Factory { @Override protected SpatialPrefixTree newSPT() { PackedQuadPrefixTree tree = new PackedQuadPrefixTree(ctx, maxLevels != null ? maxLevels : MAX_LEVELS_POSSIBLE); - tree.robust = getVersion().onOrAfter(Version.LUCENE_8_3_0); + @SuppressWarnings("deprecation") + Version lucene830 = Version.LUCENE_8_3_0; + tree.robust = getVersion().onOrAfter(lucene830); return tree; } } @@ -88,7 +90,7 @@ public Cell getWorldCell() { public Cell getCell(Point p, int level) { if (!robust) { // old method List cells = new ArrayList<>(1); - buildNotRobustly(xmid, ymid, 0, cells, 0x0L, ctx.makePoint(p.getX(), p.getY()), level); + buildNotRobustly(xmid, ymid, 0, cells, 0x0L, ctx.getShapeFactory().pointXY(p.getX(), p.getY()), level); if (!cells.isEmpty()) { return cells.get(0);//note cells could be longer if p on edge } @@ -152,7 +154,7 @@ protected void checkBattenbergNotRobustly(byte quad, double cx, double cy, int l double w = levelW[level] / 2; double h = levelH[level] / 2; - SpatialRelation v = shape.relate(ctx.makeRectangle(cx - w, cx + w, cy - h, cy + h)); + SpatialRelation v = shape.relate(ctx.getShapeFactory().rect(cx - w, cx + w, cy - h, cy + h)); if (SpatialRelation.DISJOINT == v) { return; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/QuadPrefixTree.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/QuadPrefixTree.java index e060302c2a9c..d1407e1a44f7 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/QuadPrefixTree.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/QuadPrefixTree.java @@ -53,7 +53,9 @@ protected int getLevelForDistance(double degrees) { protected SpatialPrefixTree newSPT() { QuadPrefixTree tree = new QuadPrefixTree(ctx, maxLevels != null ? maxLevels : MAX_LEVELS_POSSIBLE); - tree.robust = getVersion().onOrAfter(Version.LUCENE_8_3_0); + @SuppressWarnings("deprecation") + Version LUCENE_8_3_0 = Version.LUCENE_8_3_0; + tree.robust = getVersion().onOrAfter(LUCENE_8_3_0); return tree; } } @@ -142,7 +144,7 @@ public int getLevelForDistance(double dist) { public Cell getCell(Point p, int level) { if (!robust) { // old method List cells = new ArrayList<>(1); - buildNotRobustly(xmid, ymid, 0, cells, new BytesRef(maxLevels+1), ctx.makePoint(p.getX(),p.getY()), level); + buildNotRobustly(xmid, ymid, 0, cells, new BytesRef(maxLevels+1), ctx.getShapeFactory().pointXY(p.getX(),p.getY()), level); if (!cells.isEmpty()) { return cells.get(0);//note cells could be longer if p on edge } @@ -223,7 +225,7 @@ protected void checkBattenbergNotRobustly( double h = levelH[level] / 2; int strlen = str.length; - Rectangle rectangle = ctx.makeRectangle(cx - w, cx + w, cy - h, cy + h); + Rectangle rectangle = ctx.getShapeFactory().rect(cx - w, cx + w, cy - h, cy + h); SpatialRelation v = shape.relate(rectangle); if (SpatialRelation.CONTAINS == v) { str.bytes[str.length++] = (byte)c;//append @@ -354,7 +356,7 @@ protected Rectangle makeShape() { width = gridW; height = gridH; } - return ctx.makeRectangle(xmin, xmin + width, ymin, ymin + height); + return ctx.getShapeFactory().rect(xmin, xmin + width, ymin, ymin + height); } }//QuadCell } diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShapeFactory.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShapeFactory.java index 071c77517b6f..ee30d62a2913 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShapeFactory.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShapeFactory.java @@ -197,6 +197,7 @@ public Circle circle(Point point, double distance) { } @Override + @SuppressWarnings("deprecation") public Shape lineString(List list, double distance) { LineStringBuilder builder = lineString(); for (Point point : list) { @@ -207,6 +208,7 @@ public Shape lineString(List list, double distance) { } @Override + @SuppressWarnings("deprecation") public ShapeCollection multiShape(List list) { throw new UnsupportedOperationException(); } diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/DistanceStrategyTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/DistanceStrategyTest.java index 989252e3c6dd..97c1583d3437 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/DistanceStrategyTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/DistanceStrategyTest.java @@ -35,6 +35,7 @@ import org.locationtech.spatial4j.context.SpatialContext; import org.locationtech.spatial4j.shape.Point; import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeFactory; public class DistanceStrategyTest extends StrategyTestCase { @ParametersFactory(argumentFormatting = "strategy=%s") @@ -81,20 +82,21 @@ public DistanceStrategyTest(String suiteName, SpatialStrategy strategy) { @Test public void testDistanceOrder() throws IOException { - adoc("100", ctx.makePoint(2, 1)); - adoc("101", ctx.makePoint(-1, 4)); + ShapeFactory shapeFactory = ctx.getShapeFactory(); + adoc("100", shapeFactory.pointXY(2, 1)); + adoc("101", shapeFactory.pointXY(-1, 4)); adoc("103", (Shape)null);//test score for nothing commit(); //FYI distances are in docid order - checkDistValueSource(ctx.makePoint(4, 3), 2.8274937f, 5.0898066f, 180f); - checkDistValueSource(ctx.makePoint(0, 4), 3.6043684f, 0.9975641f, 180f); + checkDistValueSource(shapeFactory.pointXY(4, 3), 2.8274937f, 5.0898066f, 180f); + checkDistValueSource(shapeFactory.pointXY(0, 4), 3.6043684f, 0.9975641f, 180f); } @Test public void testRecipScore() throws IOException { - Point p100 = ctx.makePoint(2.02, 0.98); + Point p100 = ctx.getShapeFactory().pointXY(2.02, 0.98); adoc("100", p100); - Point p101 = ctx.makePoint(-1.001, 4.001); + Point p101 = ctx.getShapeFactory().pointXY(-1.001, 4.001); adoc("101", p101); adoc("103", (Shape)null);//test score for nothing commit(); diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/PortedSolr3Test.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/PortedSolr3Test.java index e9cd63d25f9c..244aae748597 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/PortedSolr3Test.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/PortedSolr3Test.java @@ -36,12 +36,15 @@ import org.locationtech.spatial4j.distance.DistanceUtils; import org.locationtech.spatial4j.shape.Point; import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeFactory; /** * Based off of Solr 3's SpatialFilterTest. */ public class PortedSolr3Test extends StrategyTestCase { + private ShapeFactory shapeFactory; + @ParametersFactory(argumentFormatting = "strategy=%s") public static Iterable parameters() { List ctorArgs = new ArrayList<>(); @@ -74,23 +77,24 @@ public static Iterable parameters() { public PortedSolr3Test(String suiteName, SpatialStrategy strategy) { this.ctx = strategy.getSpatialContext(); this.strategy = strategy; + shapeFactory = ctx.getShapeFactory(); } private void setupDocs() throws Exception { super.deleteAll(); - adoc("1", ctx.makePoint(-79.9289094, 32.7693246)); - adoc("2", ctx.makePoint(-80.9289094, 33.7693246)); - adoc("3", ctx.makePoint(50.9289094, -32.7693246)); - adoc("4", ctx.makePoint(60.9289094, -50.7693246)); - adoc("5", ctx.makePoint(0, 0)); - adoc("6", ctx.makePoint(0.1, 0.1)); - adoc("7", ctx.makePoint(-0.1, -0.1)); - adoc("8", ctx.makePoint(179.9, 0)); - adoc("9", ctx.makePoint(-179.9, 0)); - adoc("10", ctx.makePoint(50, 89.9)); - adoc("11", ctx.makePoint(-130, 89.9)); - adoc("12", ctx.makePoint(50, -89.9)); - adoc("13", ctx.makePoint(-130, -89.9)); + adoc("1", shapeFactory.pointXY(-79.9289094, 32.7693246)); + adoc("2", shapeFactory.pointXY(-80.9289094, 33.7693246)); + adoc("3", shapeFactory.pointXY(50.9289094, -32.7693246)); + adoc("4", shapeFactory.pointXY(60.9289094, -50.7693246)); + adoc("5", shapeFactory.pointXY(0, 0)); + adoc("6", shapeFactory.pointXY(0.1, 0.1)); + adoc("7", shapeFactory.pointXY(-0.1, -0.1)); + adoc("8", shapeFactory.pointXY(179.9, 0)); + adoc("9", shapeFactory.pointXY(-179.9, 0)); + adoc("10", shapeFactory.pointXY(50, 89.9)); + adoc("11", shapeFactory.pointXY(-130, 89.9)); + adoc("12", shapeFactory.pointXY(50, -89.9)); + adoc("13", shapeFactory.pointXY(-130, -89.9)); commit(); } @@ -100,39 +104,39 @@ public void testIntersections() throws Exception { setupDocs(); //Try some edge cases //NOTE: 2nd arg is distance in kilometers - checkHitsCircle(ctx.makePoint(1, 1), 175, 3, 5, 6, 7); - checkHitsCircle(ctx.makePoint(179.8, 0), 200, 2, 8, 9); - checkHitsCircle(ctx.makePoint(50, 89.8), 200, 2, 10, 11);//this goes over the north pole - checkHitsCircle(ctx.makePoint(50, -89.8), 200, 2, 12, 13);//this goes over the south pole + checkHitsCircle(shapeFactory.pointXY(1, 1), 175, 3, 5, 6, 7); + checkHitsCircle(shapeFactory.pointXY(179.8, 0), 200, 2, 8, 9); + checkHitsCircle(shapeFactory.pointXY(50, 89.8), 200, 2, 10, 11);//this goes over the north pole + checkHitsCircle(shapeFactory.pointXY(50, -89.8), 200, 2, 12, 13);//this goes over the south pole //try some normal cases - checkHitsCircle(ctx.makePoint(-80.0, 33.0), 300, 2); + checkHitsCircle(shapeFactory.pointXY(-80.0, 33.0), 300, 2); //large distance - checkHitsCircle(ctx.makePoint(1, 1), 5000, 3, 5, 6, 7); + checkHitsCircle(shapeFactory.pointXY(1, 1), 5000, 3, 5, 6, 7); //Because we are generating a box based on the west/east longitudes and the south/north latitudes, which then //translates to a range query, which is slightly more inclusive. Thus, even though 0.0 is 15.725 kms away, //it will be included, b/zScaling of the box calculation. - checkHitsBBox(ctx.makePoint(0.1, 0.1), 15, 2, 5, 6); + checkHitsBBox(shapeFactory.pointXY(0.1, 0.1), 15, 2, 5, 6); //try some more deleteAll(); - adoc("14", ctx.makePoint(5, 0)); - adoc("15", ctx.makePoint(15, 0)); + adoc("14", shapeFactory.pointXY(5, 0)); + adoc("15", shapeFactory.pointXY(15, 0)); //3000KM from 0,0, see http://www.movable-type.co.uk/scripts/latlong.html - adoc("16", ctx.makePoint(19.79750, 18.71111)); - adoc("17", ctx.makePoint(-95.436643, 44.043900)); + adoc("16", shapeFactory.pointXY(19.79750, 18.71111)); + adoc("17", shapeFactory.pointXY(-95.436643, 44.043900)); commit(); - checkHitsCircle(ctx.makePoint(0, 0), 1000, 1, 14); - checkHitsCircle(ctx.makePoint(0, 0), 2000, 2, 14, 15); - checkHitsBBox(ctx.makePoint(0, 0), 3000, 3, 14, 15, 16); - checkHitsCircle(ctx.makePoint(0, 0), 3001, 3, 14, 15, 16); - checkHitsCircle(ctx.makePoint(0, 0), 3000.1, 3, 14, 15, 16); + checkHitsCircle(shapeFactory.pointXY(0, 0), 1000, 1, 14); + checkHitsCircle(shapeFactory.pointXY(0, 0), 2000, 2, 14, 15); + checkHitsBBox(shapeFactory.pointXY(0, 0), 3000, 3, 14, 15, 16); + checkHitsCircle(shapeFactory.pointXY(0, 0), 3001, 3, 14, 15, 16); + checkHitsCircle(shapeFactory.pointXY(0, 0), 3000.1, 3, 14, 15, 16); //really fine grained distance and reflects some of the vagaries of how we are calculating the box - checkHitsCircle(ctx.makePoint(-96.789603, 43.517030), 109, 0); + checkHitsCircle(shapeFactory.pointXY(-96.789603, 43.517030), 109, 0); // falls outside of the real distance, but inside the bounding box - checkHitsCircle(ctx.makePoint(-96.789603, 43.517030), 110, 0); - checkHitsBBox(ctx.makePoint(-96.789603, 43.517030), 110, 1, 17); + checkHitsCircle(shapeFactory.pointXY(-96.789603, 43.517030), 110, 0); + checkHitsBBox(shapeFactory.pointXY(-96.789603, 43.517030), 110, 1, 17); } //---- these are similar to Solr test methods @@ -147,7 +151,7 @@ private void checkHitsBBox(Point pt, double distKM, int assertNumFound, int... a private void _checkHits(boolean bbox, Point pt, double distKM, int assertNumFound, int... assertIds) { SpatialOperation op = SpatialOperation.Intersects; double distDEG = DistanceUtils.dist2Degrees(distKM, DistanceUtils.EARTH_MEAN_RADIUS_KM); - Shape shape = ctx.makeCircle(pt, distDEG); + Shape shape = shapeFactory.circle(pt, distDEG); if (bbox) shape = shape.getBoundingBox(); diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/QueryEqualsHashCodeTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/QueryEqualsHashCodeTest.java index f52ef2b444d6..53da280c12cb 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/QueryEqualsHashCodeTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/QueryEqualsHashCodeTest.java @@ -103,12 +103,12 @@ private void testEqualsHashcode(SpatialArgs args1, SpatialArgs args2, ObjGenerat } private SpatialArgs makeArgs1() { - final Shape shape1 = ctx.makeRectangle(0, 0, 10, 10); + final Shape shape1 = ctx.getShapeFactory().rect(0, 0, 10, 10); return new SpatialArgs(predicate, shape1); } private SpatialArgs makeArgs2() { - final Shape shape2 = ctx.makeRectangle(0, 0, 20, 20); + final Shape shape2 = ctx.getShapeFactory().rect(0, 0, 20, 20); return new SpatialArgs(predicate, shape2); } diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java index f810ab74438a..f01c8199a398 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java @@ -108,14 +108,14 @@ private void indexPoints() throws Exception { //Spatial4j is x-y order for arguments indexWriter.addDocument(newSampleDocument( - 2, ctx.makePoint(-80.93, 33.77))); + 2, ctx.getShapeFactory().pointXY(-80.93, 33.77))); //Spatial4j has a WKT parser which is also "x y" order indexWriter.addDocument(newSampleDocument( 4, ctx.readShapeFromWkt("POINT(60.9289094 -50.7693246)"))); indexWriter.addDocument(newSampleDocument( - 20, ctx.makePoint(0.1,0.1), ctx.makePoint(0, 0))); + 20, ctx.getShapeFactory().pointXY(0.1,0.1), ctx.getShapeFactory().pointXY(0, 0))); indexWriter.close(); } @@ -149,7 +149,7 @@ private void search() throws Exception { //Search with circle //note: SpatialArgs can be parsed from a string SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects, - ctx.makeCircle(-80.0, 33.0, DistanceUtils.dist2Degrees(200, DistanceUtils.EARTH_MEAN_RADIUS_KM))); + ctx.getShapeFactory().circle(-80.0, 33.0, DistanceUtils.dist2Degrees(200, DistanceUtils.EARTH_MEAN_RADIUS_KM))); Query query = strategy.makeQuery(args); TopDocs docs = indexSearcher.search(query, 10, idSort); assertDocMatchedIds(indexSearcher, docs, 2); @@ -168,7 +168,7 @@ private void search() throws Exception { } //--Match all, order by distance ascending { - Point pt = ctx.makePoint(60, -50); + Point pt = ctx.getShapeFactory().pointXY(60, -50); DoubleValuesSource valueSource = strategy.makeDistanceValueSource(pt, DistanceUtils.DEG_TO_KM);//the distance (in km) Sort distSort = new Sort(valueSource.getSortField(false)).rewrite(indexSearcher);//false=asc dist TopDocs docs = indexSearcher.search(new MatchAllDocsQuery(), 10, distSort); @@ -183,7 +183,7 @@ private void search() throws Exception { //demo arg parsing { SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects, - ctx.makeCircle(-80.0, 33.0, 1)); + ctx.getShapeFactory().circle(-80.0, 33.0, 1)); SpatialArgs args2 = new SpatialArgsParser().parse("Intersects(BUFFER(POINT(-80 33),1))", ctx); assertEquals(args.toString(),args2.toString()); } diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java index 55ad1d7e4660..e875ee2f9d7a 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java @@ -41,7 +41,6 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomDouble; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomGaussian; -import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween; /** A base test class for spatial lucene. It's mostly Lucene generic. */ @@ -121,7 +120,7 @@ protected SearchResults executeQuery(Query query, int numDocs) { protected Point randomPoint() { final Rectangle WB = ctx.getWorldBounds(); - return ctx.makePoint( + return ctx.getShapeFactory().pointXY( randomIntBetween((int) WB.getMinX(), (int) WB.getMaxX()), randomIntBetween((int) WB.getMinY(), (int) WB.getMaxY())); } @@ -143,7 +142,7 @@ protected Rectangle randomRectangle(Rectangle bounds) { double yMin = yNewStartAndHeight[0]; double yMax = yMin + yNewStartAndHeight[1]; - return ctx.makeRectangle(xMin, xMax, yMin, yMax); + return ctx.getShapeFactory().rect(xMin, xMax, yMin, yMax); } /** Returns new minStart and new length that is inside the range specified by the arguments. */ @@ -154,7 +153,7 @@ protected double[] randomSubRange(double boundStart, double boundLen) { int intBoundEnd = (int) (boundStart + boundLen); int intBoundLen = intBoundEnd - intBoundStart; int newLen = (int) randomGaussianMeanMax(intBoundLen / 16.0, intBoundLen); - int newStart = intBoundStart + randomInt(intBoundLen - newLen); + int newStart = intBoundStart + randomIntBetween(0, intBoundLen - newLen); return new double[]{newStart, newLen}; } else { // (no int rounding) double newLen = randomGaussianMeanMax(boundLen / 16, boundLen); diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java index 199ebcf01a74..2fc68b442421 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java @@ -67,7 +67,7 @@ public abstract class StrategyTestCase extends SpatialTestCase { protected boolean storeShape = true; protected void executeQueries(SpatialMatchConcern concern, String... testQueryFile) throws IOException { - log.info("testing queried for strategy "+strategy); + log.info("testing queried for strategy "+strategy); // logOk for( String path : testQueryFile ) { Iterator testQueryIterator = getTestQueries(path, ctx); runTestQueries(testQueryIterator, concern); diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java index 3a94bf006156..bca89b515918 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java @@ -52,12 +52,12 @@ protected Shape randomIndexedShape() { if (ctx.isGeo() && (deltaLeft != 0 || deltaRight != 0)) { //if geo & doesn't world-wrap, we shift randomly to potentially cross dateline int shift = nextIntInclusive(360); - return ctx.makeRectangle( + return ctx.getShapeFactory().rect( DistanceUtils.normLonDEG(world.getMinX() + deltaLeft + shift), DistanceUtils.normLonDEG(world.getMaxX() - deltaRight + shift), world.getMinY() + deltaBottom, world.getMaxY() - deltaTop); } else { - return ctx.makeRectangle( + return ctx.getShapeFactory().rect( world.getMinX() + deltaLeft, world.getMaxX() - deltaRight, world.getMinY() + deltaBottom, world.getMaxY() - deltaTop); } @@ -110,45 +110,45 @@ public void testOperations() throws IOException { public void testIntersectsBugDatelineEdge() throws IOException { setupGeo(); testOperation( - ctx.makeRectangle(160, 180, -10, 10), + ctx.getShapeFactory().rect(160, 180, -10, 10), SpatialOperation.Intersects, - ctx.makeRectangle(-180, -160, -10, 10), true); + ctx.getShapeFactory().rect(-180, -160, -10, 10), true); } @Test public void testIntersectsWorldDatelineEdge() throws IOException { setupGeo(); testOperation( - ctx.makeRectangle(-180, 180, -10, 10), + ctx.getShapeFactory().rect(-180, 180, -10, 10), SpatialOperation.Intersects, - ctx.makeRectangle(180, 180, -10, 10), true); + ctx.getShapeFactory().rect(180, 180, -10, 10), true); } @Test public void testWithinBugDatelineEdge() throws IOException { setupGeo(); testOperation( - ctx.makeRectangle(180, 180, -10, 10), + ctx.getShapeFactory().rect(180, 180, -10, 10), SpatialOperation.IsWithin, - ctx.makeRectangle(-180, -100, -10, 10), true); + ctx.getShapeFactory().rect(-180, -100, -10, 10), true); } @Test public void testContainsBugDatelineEdge() throws IOException { setupGeo(); testOperation( - ctx.makeRectangle(-180, -150, -10, 10), + ctx.getShapeFactory().rect(-180, -150, -10, 10), SpatialOperation.Contains, - ctx.makeRectangle(180, 180, -10, 10), true); + ctx.getShapeFactory().rect(180, 180, -10, 10), true); } @Test public void testWorldContainsXDL() throws IOException { setupGeo(); testOperation( - ctx.makeRectangle(-180, 180, -10, 10), + ctx.getShapeFactory().rect(-180, 180, -10, 10), SpatialOperation.Contains, - ctx.makeRectangle(170, -170, -10, 10), true); + ctx.getShapeFactory().rect(170, -170, -10, 10), true); } /** See https://github.com/spatial4j/spatial4j/issues/85 */ @@ -161,8 +161,8 @@ public void testAlongDatelineOppositeSign() throws IOException { //both on dateline but expressed using opposite signs setupGeo(); - final Rectangle indexedShape = ctx.makeRectangle(180, 180, -10, 10); - final Rectangle queryShape = ctx.makeRectangle(-180, -180, -20, 20); + final Rectangle indexedShape = ctx.getShapeFactory().rect(180, 180, -10, 10); + final Rectangle queryShape = ctx.getShapeFactory().rect(-180, -180, -20, 20); final SpatialOperation operation = SpatialOperation.IsWithin; final boolean match = true;//yes it is within @@ -277,7 +277,7 @@ public void testOverlapRatio() throws IOException { } private Rectangle shiftedRect(double minX, double maxX, double minY, double maxY, int xShift) { - return ctx.makeRectangle( + return ctx.getShapeFactory().rect( DistanceUtils.normLonDEG(minX + xShift), DistanceUtils.normLonDEG(maxX + xShift), minY, maxY); @@ -286,7 +286,7 @@ private Rectangle shiftedRect(double minX, double maxX, double minY, double maxY public void testAreaValueSource() throws IOException { BBoxStrategy bboxStrategy = setupNeedsDocValuesOnly(); - adoc("100", ctx.makeRectangle(0, 20, 40, 80)); + adoc("100", ctx.getShapeFactory().rect(0, 20, 40, 80)); adoc("999", (Shape) null); commit(); checkValueSource(new ShapeAreaValueSource(bboxStrategy.makeShapeValueSource(), ctx, false, 1.0), diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/DateNRStrategyTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/DateNRStrategyTest.java index 54296da040c1..2571afd6c99f 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/DateNRStrategyTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/DateNRStrategyTest.java @@ -41,7 +41,7 @@ public class DateNRStrategyTest extends RandomSpatialOpStrategyTestCase { @Before public void setUp() throws Exception { super.setUp(); - tree = DateRangePrefixTree.INSTANCE; + tree = new DateRangePrefixTree(DateRangePrefixTree.DEFAULT_CAL); strategy = new NumberRangePrefixTreeStrategy(tree, "dateRange"); ((NumberRangePrefixTreeStrategy)strategy).setPointsOnly(randomInt() % 5 == 0); Calendar tmpCal = tree.newCal(); diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java index a38f5b6e7a78..357a48774cab 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java @@ -37,6 +37,7 @@ import org.locationtech.spatial4j.shape.Point; import org.locationtech.spatial4j.shape.Rectangle; import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeFactory; import org.locationtech.spatial4j.shape.SpatialRelation; import org.locationtech.spatial4j.shape.impl.RectangleImpl; @@ -46,6 +47,7 @@ public class HeatmapFacetCounterTest extends StrategyTestCase { SpatialPrefixTree grid; + ShapeFactory shapeFactory; int cellsValidated; int cellValidatedNonZero; @@ -55,6 +57,7 @@ public void setUp() throws Exception { super.setUp(); cellsValidated = cellValidatedNonZero = 0; ctx = SpatialContext.GEO; + shapeFactory = ctx.getShapeFactory(); grid = new QuadPrefixTree(ctx, randomIntBetween(1, 8)); strategy = new RecursivePrefixTreeStrategy(grid, getTestClass().getSimpleName()); if (rarely()) { @@ -64,20 +67,20 @@ public void setUp() throws Exception { @After public void after() { - log.info("Validated " + cellsValidated + " cells, " + cellValidatedNonZero + " non-zero"); + log.info("Validated " + cellsValidated + " cells, " + cellValidatedNonZero + " non-zero"); // logOK } @Test public void testStatic() throws IOException { //Some specific tests (static, not random). - adoc("0", ctx.makeRectangle(179.8, -170, -90, -80));//barely crosses equator - adoc("1", ctx.makePoint(-180, -85));//a pt within the above rect - adoc("2", ctx.makePoint(172, -85));//a pt to left of rect + adoc("0", shapeFactory.rect(179.8, -170, -90, -80));//barely crosses equator + adoc("1", shapeFactory.pointXY(-180, -85));//a pt within the above rect + adoc("2", shapeFactory.pointXY(172, -85));//a pt to left of rect commit(); - validateHeatmapResultLoop(ctx.makeRectangle(+170, +180, -90, -85), 1, 100); - validateHeatmapResultLoop(ctx.makeRectangle(-180, -160, -89, -50), 1, 100); - validateHeatmapResultLoop(ctx.makeRectangle(179, 179, -89, -50), 1, 100);//line + validateHeatmapResultLoop(shapeFactory.rect(+170, +180, -90, -85), 1, 100); + validateHeatmapResultLoop(shapeFactory.rect(-180, -160, -89, -50), 1, 100); + validateHeatmapResultLoop(shapeFactory.rect(179, 179, -89, -50), 1, 100);//line // We could test anything and everything at this point... I prefer we leave that to random testing and then // add specific tests if we find a bug. } @@ -86,9 +89,9 @@ public void testStatic() throws IOException { public void testLucene7291Dateline() throws IOException { grid = new QuadPrefixTree(ctx, 2); // only 2, and we wind up with some big leaf cells strategy = new RecursivePrefixTreeStrategy(grid, getTestClass().getSimpleName()); - adoc("0", ctx.makeRectangle(-102, -83, 43, 52)); + adoc("0", shapeFactory.rect(-102, -83, 43, 52)); commit(); - validateHeatmapResultLoop(ctx.makeRectangle(179, -179, 62, 63), 2, 100);// HM crosses dateline + validateHeatmapResultLoop(shapeFactory.rect(179, -179, 62, 63), 2, 100);// HM crosses dateline } @Test @@ -98,12 +101,13 @@ public void testQueryCircle() throws IOException { spatialContextFactory.geo = false; spatialContextFactory.worldBounds = new RectangleImpl(-90, 90, -90, 90, null); ctx = spatialContextFactory.newSpatialContext(); + shapeFactory = ctx.getShapeFactory(); final int LEVEL = 4; grid = new QuadPrefixTree(ctx, LEVEL); strategy = new RecursivePrefixTreeStrategy(grid, getTestClass().getSimpleName()); - Circle circle = ctx.makeCircle(0, 0, 89); - adoc("0", ctx.makePoint(88, 88));//top-right, inside bbox of circle but not the circle - adoc("1", ctx.makePoint(0, 0));//clearly inside; dead center in fact + Circle circle = shapeFactory.circle(0, 0, 89); + adoc("0", shapeFactory.pointXY(88, 88));//top-right, inside bbox of circle but not the circle + adoc("1", shapeFactory.pointXY(0, 0));//clearly inside; dead center in fact commit(); final HeatmapFacetCounter.Heatmap heatmap = HeatmapFacetCounter.calcFacets( (PrefixTreeStrategy) strategy, indexSearcher.getTopReaderContext(), null, @@ -178,7 +182,7 @@ public void testRandom() throws IOException { // and once with dateline wrap if (rect.getWidth() > 0) { double shift = random().nextDouble() % rect.getWidth(); - queryHeatmapRecursive(ctx.makeRectangle( + queryHeatmapRecursive(shapeFactory.rect( DistanceUtils.normLonDEG(rect.getMinX() - shift), DistanceUtils.normLonDEG(rect.getMaxX() - shift), rect.getMinY(), rect.getMaxY()), @@ -231,7 +235,7 @@ private void validateHeatmapResult(Rectangle inputRange, int facetLevel, Heatmap final int facetCount = heatmap.getCount(c, r); double x = DistanceUtils.normLonDEG(heatRect.getMinX() + c * cellWidth + cellWidth / 2); double y = DistanceUtils.normLatDEG(heatRect.getMinY() + r * cellHeight + cellHeight / 2); - Point pt = ctx.makePoint(x, y); + Point pt = shapeFactory.pointXY(x, y); assertEquals(countMatchingDocsAtLevel(pt, facetLevel), facetCount); } } @@ -258,4 +262,4 @@ private Shape randomIndexedShape() { return randomRectangle(); } } -} \ No newline at end of file +} diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/JtsPolygonTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/JtsPolygonTest.java index 7d029f10e974..47a92e678a21 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/JtsPolygonTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/JtsPolygonTest.java @@ -99,10 +99,10 @@ public void testBadPrefixTreePrune() throws Exception { } addDocument(doc); - Point upperleft = ctx.makePoint(-122.88, 48.54); - Point lowerright = ctx.makePoint(-122.82, 48.62); + Point upperleft = ctx.getShapeFactory().pointXY(-122.88, 48.54); + Point lowerright = ctx.getShapeFactory().pointXY(-122.82, 48.62); - Query query = strategy.makeQuery(new SpatialArgs(SpatialOperation.Intersects, ctx.makeRectangle(upperleft, lowerright))); + Query query = strategy.makeQuery(new SpatialArgs(SpatialOperation.Intersects, ctx.getShapeFactory().rect(upperleft, lowerright))); commit(); TopDocs search = indexSearcher.search(query, 10); diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java index 9c128836f92a..3ddf0c78ba2c 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java @@ -42,7 +42,6 @@ import org.junit.Test; import org.locationtech.spatial4j.shape.Shape; -import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween; public class NumberRangeFacetsTest extends StrategyTestCase { @@ -55,7 +54,7 @@ public class NumberRangeFacetsTest extends StrategyTestCase { @Before public void setUp() throws Exception { super.setUp(); - tree = DateRangePrefixTree.INSTANCE; + tree = new DateRangePrefixTree(DateRangePrefixTree.DEFAULT_CAL); strategy = new NumberRangePrefixTreeStrategy(tree, "dateRange"); Calendar tmpCal = tree.newCal(); randomCalWindowField = randomIntBetween(1, Calendar.ZONE_OFFSET - 1);//we're not allowed to add zone offset @@ -104,9 +103,9 @@ public void test() throws IOException { calFieldFacet--; } final Calendar leftCal = randomCalendar(); - leftCal.add(calFieldFacet, -1 * randomInt(1000)); + leftCal.add(calFieldFacet, -1 * randomIntBetween(0, 1000)); Calendar rightCal = (Calendar) leftCal.clone(); - rightCal.add(calFieldFacet, randomInt(2000)); + rightCal.add(calFieldFacet, randomIntBetween(0, 2000)); // Pick facet detail level based on cal field. int detailLevel = tree.getTreeLevelForCalendarField(calFieldFacet); if (detailLevel < 0) {//no exact match @@ -126,7 +125,7 @@ public void test() throws IOException { acceptFieldIds.add(i); } Collections.shuffle(acceptFieldIds, random()); - acceptFieldIds = acceptFieldIds.subList(0, randomInt(acceptFieldIds.size())); + acceptFieldIds = acceptFieldIds.subList(0, randomIntBetween(0, acceptFieldIds.size())); if (!acceptFieldIds.isEmpty()) { List terms = new ArrayList<>(); for (Integer acceptDocId : acceptFieldIds) { @@ -237,7 +236,7 @@ public ScoreMode scoreMode() { private void preQueryHavoc() { if (strategy instanceof RecursivePrefixTreeStrategy) { RecursivePrefixTreeStrategy rpts = (RecursivePrefixTreeStrategy) strategy; - int scanLevel = randomInt(rpts.getGrid().getMaxLevels()); + int scanLevel = randomIntBetween(0, rpts.getGrid().getMaxLevels()); rpts.setPrefixGridScanLevel(scanLevel); } } diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java index c5b145f97c92..97f3b382c382 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java @@ -87,7 +87,7 @@ public void setupGrid(int maxLevels) throws IOException { ((PrefixTreeStrategy) strategy).setPointsOnly(true); } - log.info("Strategy: " + strategy.toString()); + log.info("Strategy: " + strategy.toString()); // logOk } private void setupCtx2D(SpatialContext ctx) { diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpStrategyTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpStrategyTestCase.java index 22c58393f5f8..8aebafe877a9 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpStrategyTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpStrategyTestCase.java @@ -28,7 +28,6 @@ import org.apache.lucene.spatial.query.SpatialArgs; import org.apache.lucene.spatial.query.SpatialOperation; -import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween; /** Base test harness, ideally for SpatialStrategy impls that have exact results @@ -130,7 +129,7 @@ private void fail(String label, String id, List indexedShapes, Shape quer protected void preQueryHavoc() { if (strategy instanceof RecursivePrefixTreeStrategy) { RecursivePrefixTreeStrategy rpts = (RecursivePrefixTreeStrategy) strategy; - int scanLevel = randomInt(rpts.getGrid().getMaxLevels()); + int scanLevel = randomIntBetween(0, rpts.getGrid().getMaxLevels()); rpts.setPrefixGridScanLevel(scanLevel); } } diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java index 18cc536e3399..14e2140e3521 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java @@ -70,11 +70,11 @@ public void testOneMeterPrecision() { public void testPrecision() throws IOException{ init(GeohashPrefixTree.getMaxLevelsPossible()); - Point iPt = ctx.makePoint(2.8028712999999925, 48.3708044);//lon, lat + Point iPt = ctx.getShapeFactory().pointXY(2.8028712999999925, 48.3708044);//lon, lat addDocument(newDoc("iPt", iPt)); commit(); - Point qPt = ctx.makePoint(2.4632387000000335, 48.6003516); + Point qPt = ctx.getShapeFactory().pointXY(2.4632387000000335, 48.6003516); final double KM2DEG = DistanceUtils.dist2Degrees(1, DistanceUtils.EARTH_MEAN_RADIUS_KM); final double DEG2KM = 1 / KM2DEG; @@ -101,7 +101,7 @@ public void testPrecision() throws IOException{ } private SpatialArgs q(Point pt, double distDEG, double distErrPct) { - Shape shape = ctx.makeCircle(pt, distDEG); + Shape shape = ctx.getShapeFactory().circle(pt, distDEG); SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects,shape); args.setDistErrPct(distErrPct); return args; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java index fb4f67890e17..3e61661f1b1d 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java @@ -71,7 +71,7 @@ public void testFailure2_LUCENE6475() { GeoCircle geo3dCircle = GeoCircleFactory.makeGeoCircle(planetModel, 1.6282053147165243E-4 * RADIANS_PER_DEGREE, -70.1600629789353 * RADIANS_PER_DEGREE, 86 * RADIANS_PER_DEGREE); Geo3dShape geo3dShape = new Geo3dShape<>(geo3dCircle, ctx); - Rectangle rect = ctx.makeRectangle(-118, -114, -2.0, 32.0); + Rectangle rect = ctx.getShapeFactory().rect(-118, -114, -2.0, 32.0); assertTrue(geo3dShape.relate(rect).intersects()); // thus the bounding box must intersect too assertTrue(geo3dShape.getBoundingBox().relate(rect).intersects()); diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/RandomizedShapeTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/RandomizedShapeTestCase.java index 0c18f5d7ddea..eb3563bad1fb 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/RandomizedShapeTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/RandomizedShapeTestCase.java @@ -17,40 +17,17 @@ package org.apache.lucene.spatial.spatial4j; import org.locationtech.spatial4j.context.SpatialContext; -import org.locationtech.spatial4j.distance.DistanceUtils; -import org.locationtech.spatial4j.shape.Circle; -import org.locationtech.spatial4j.shape.Point; -import org.locationtech.spatial4j.shape.Rectangle; -import org.locationtech.spatial4j.shape.Shape; -import org.locationtech.spatial4j.shape.SpatialRelation; -import org.locationtech.spatial4j.shape.impl.Range; - -import static org.locationtech.spatial4j.shape.SpatialRelation.CONTAINS; -import static org.locationtech.spatial4j.shape.SpatialRelation.WITHIN; import org.apache.lucene.util.LuceneTestCase; -import static com.carrotsearch.randomizedtesting.RandomizedTest.*; - /** * A base test class with utility methods to help test shapes. * Extends from RandomizedTest. */ public abstract class RandomizedShapeTestCase extends LuceneTestCase { - protected static final double EPS = 10e-9; - protected SpatialContext ctx;//needs to be set ASAP - /** Used to reduce the space of numbers to increase the likelihood that - * random numbers become equivalent, and thus trigger different code paths. - * Also makes some random shapes easier to manually examine. - */ - protected final double DIVISIBLE = 2;// even coordinates; (not always used) - - protected RandomizedShapeTestCase() { - } - public RandomizedShapeTestCase(SpatialContext ctx) { this.ctx = ctx; } @@ -71,218 +48,8 @@ public static void checkShapesImplementEquals( Class[] classes ) { } } - //These few norm methods normalize the arguments for creating a shape to - // account for the dateline. Some tests loop past the dateline or have offsets - // that go past it and it's easier to have them coded that way and correct for - // it here. These norm methods should be used when needed, not frivolously. - - protected double normX(double x) { - return ctx.isGeo() ? DistanceUtils.normLonDEG(x) : x; - } - - protected double normY(double y) { - return ctx.isGeo() ? DistanceUtils.normLatDEG(y) : y; - } - - protected Rectangle makeNormRect(double minX, double maxX, double minY, double maxY) { - if (ctx.isGeo()) { - if (Math.abs(maxX - minX) >= 360) { - minX = -180; - maxX = 180; - } else { - minX = DistanceUtils.normLonDEG(minX); - maxX = DistanceUtils.normLonDEG(maxX); - } - - } else { - if (maxX < minX) { - double t = minX; - minX = maxX; - maxX = t; - } - minX = boundX(minX, ctx.getWorldBounds()); - maxX = boundX(maxX, ctx.getWorldBounds()); - } - if (maxY < minY) { - double t = minY; - minY = maxY; - maxY = t; - } - minY = boundY(minY, ctx.getWorldBounds()); - maxY = boundY(maxY, ctx.getWorldBounds()); - return ctx.makeRectangle(minX, maxX, minY, maxY); - } - public static double divisible(double v, double divisible) { return (int) (Math.round(v / divisible) * divisible); } - protected double divisible(double v) { - return divisible(v, DIVISIBLE); - } - - /** reset()'s p, and confines to world bounds. Might not be divisible if - * the world bound isn't divisible too. - */ - protected Point divisible(Point p) { - Rectangle bounds = ctx.getWorldBounds(); - double newX = boundX( divisible(p.getX()), bounds ); - double newY = boundY( divisible(p.getY()), bounds ); - p.reset(newX, newY); - return p; - } - - static double boundX(double i, Rectangle bounds) { - return bound(i, bounds.getMinX(), bounds.getMaxX()); - } - - static double boundY(double i, Rectangle bounds) { - return bound(i, bounds.getMinY(), bounds.getMaxY()); - } - - static double bound(double i, double min, double max) { - if (i < min) return min; - if (i > max) return max; - return i; - } - - protected void assertRelation(SpatialRelation expected, Shape a, Shape b) { - assertRelation(null, expected, a, b); - } - - protected void assertRelation(String msg, SpatialRelation expected, Shape a, Shape b) { - _assertIntersect(msg, expected, a, b); - //check flipped a & b w/ transpose(), while we're at it - _assertIntersect(msg, expected.transpose(), b, a); - } - - private void _assertIntersect(String msg, SpatialRelation expected, Shape a, Shape b) { - SpatialRelation sect = a.relate(b); - if (sect == expected) - return; - msg = ((msg == null) ? "" : msg+"\r") + a +" intersect "+b; - if (expected == WITHIN || expected == CONTAINS) { - if (a.getClass().equals(b.getClass())) // they are the same shape type - assertEquals(msg,a,b); - else { - //they are effectively points or lines that are the same location - assertTrue(msg,!a.hasArea()); - assertTrue(msg,!b.hasArea()); - - Rectangle aBBox = a.getBoundingBox(); - Rectangle bBBox = b.getBoundingBox(); - if (aBBox.getHeight() == 0 && bBBox.getHeight() == 0 - && (aBBox.getMaxY() == 90 && bBBox.getMaxY() == 90 - || aBBox.getMinY() == -90 && bBBox.getMinY() == -90)) - ;//== a point at the pole - else - assertEquals(msg, aBBox, bBBox); - } - } else { - assertEquals(msg,expected,sect);//always fails - } - } - - protected void assertEqualsRatio(String msg, double expected, double actual) { - double delta = Math.abs(actual - expected); - double base = Math.min(actual, expected); - double deltaRatio = base==0 ? delta : Math.min(delta,delta / base); - assertEquals(msg,0,deltaRatio, EPS); - } - - protected int randomIntBetweenDivisible(int start, int end) { - return randomIntBetweenDivisible(start, end, (int)DIVISIBLE); - } - /** Returns a random integer between [start, end]. Integers between must be divisible by the 3rd argument. */ - protected int randomIntBetweenDivisible(int start, int end, int divisible) { - // DWS: I tested this - int divisStart = (int) Math.ceil( (start+1) / (double)divisible ); - int divisEnd = (int) Math.floor( (end-1) / (double)divisible ); - int divisRange = Math.max(0,divisEnd - divisStart + 1); - int r = randomInt(1 + divisRange);//remember that '0' is counted - if (r == 0) - return start; - if (r == 1) - return end; - return (r-2 + divisStart)*divisible; - } - - protected Rectangle randomRectangle(Point nearP) { - Rectangle bounds = ctx.getWorldBounds(); - if (nearP == null) - nearP = randomPointIn(bounds); - - Range xRange = randomRange(rarely() ? 0 : nearP.getX(), Range.xRange(bounds, ctx)); - Range yRange = randomRange(rarely() ? 0 : nearP.getY(), Range.yRange(bounds, ctx)); - - return makeNormRect( - divisible(xRange.getMin()), - divisible(xRange.getMax()), - divisible(yRange.getMin()), - divisible(yRange.getMax()) ); - } - - private Range randomRange(double near, Range bounds) { - double mid = near + randomGaussian() * bounds.getWidth() / 6; - double width = Math.abs(randomGaussian()) * bounds.getWidth() / 6;//1/3rd - return new Range(mid - width / 2, mid + width / 2); - } - - private double randomGaussianZeroTo(double max) { - if (max == 0) - return max; - assert max > 0; - double r; - do { - r = Math.abs(randomGaussian()) * (max * 0.50); - } while (r > max); - return r; - } - - protected Rectangle randomRectangle(int divisible) { - double rX = randomIntBetweenDivisible(-180, 180, divisible); - double rW = randomIntBetweenDivisible(0, 360, divisible); - double rY1 = randomIntBetweenDivisible(-90, 90, divisible); - double rY2 = randomIntBetweenDivisible(-90, 90, divisible); - double rYmin = Math.min(rY1,rY2); - double rYmax = Math.max(rY1,rY2); - if (rW > 0 && rX == 180) - rX = -180; - return makeNormRect(rX, rX + rW, rYmin, rYmax); - } - - protected Point randomPoint() { - return randomPointIn(ctx.getWorldBounds()); - } - - protected Point randomPointIn(Circle c) { - double d = c.getRadius() * randomDouble(); - double angleDEG = 360 * randomDouble(); - Point p = ctx.getDistCalc().pointOnBearing(c.getCenter(), d, angleDEG, ctx, null); - assertEquals(CONTAINS,c.relate(p)); - return p; - } - - protected Point randomPointIn(Rectangle r) { - double x = r.getMinX() + randomDouble()*r.getWidth(); - double y = r.getMinY() + randomDouble()*r.getHeight(); - x = normX(x); - y = normY(y); - Point p = ctx.makePoint(x,y); - assertEquals(CONTAINS,r.relate(p)); - return p; - } - - protected Point randomPointInOrNull(Shape shape) { - if (!shape.hasArea())// or try the center? - throw new UnsupportedOperationException("Need area to define shape!"); - Rectangle bbox = shape.getBoundingBox(); - for (int i = 0; i < 1000; i++) { - Point p = randomPointIn(bbox); - if (shape.relate(p).intersects()) { - return p; - } - } - return null;//tried too many times and failed - } } diff --git a/lucene/spatial3d/build.gradle b/lucene/spatial3d/build.gradle index eab862245879..415981b53603 100644 --- a/lucene/spatial3d/build.gradle +++ b/lucene/spatial3d/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = '3D spatial planar geometry APIs' + dependencies { api project(':lucene:core') testImplementation project(':lucene:test-framework') diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java index c2389595752b..b3ded508d036 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java @@ -32,8 +32,8 @@ import org.apache.lucene.codecs.PointsFormat; import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.PointsWriter; -import org.apache.lucene.codecs.lucene60.Lucene60PointsReader; -import org.apache.lucene.codecs.lucene60.Lucene60PointsWriter; +import org.apache.lucene.codecs.lucene86.Lucene86PointsReader; +import org.apache.lucene.codecs.lucene86.Lucene86PointsWriter; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; @@ -104,12 +104,12 @@ public PointsFormat pointsFormat() { return new PointsFormat() { @Override public PointsWriter fieldsWriter(SegmentWriteState writeState) throws IOException { - return new Lucene60PointsWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap); + return new Lucene86PointsWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap); } @Override public PointsReader fieldsReader(SegmentReadState readState) throws IOException { - return new Lucene60PointsReader(readState); + return new Lucene86PointsReader(readState); } }; } diff --git a/lucene/suggest/build.gradle b/lucene/suggest/build.gradle index 74ed1c429200..54a559624dab 100644 --- a/lucene/suggest/build.gradle +++ b/lucene/suggest/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Auto-suggest and Spellchecking support' + dependencies { api project(':lucene:core') api project(':lucene:analysis:common') diff --git a/lucene/suggest/src/java/org/apache/lucene/search/spell/LuceneLevenshteinDistance.java b/lucene/suggest/src/java/org/apache/lucene/search/spell/LuceneLevenshteinDistance.java index 21ca535c1e5f..16ff24dfb01a 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/spell/LuceneLevenshteinDistance.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/spell/LuceneLevenshteinDistance.java @@ -37,7 +37,7 @@ * for merging results from multiple DirectSpellCheckers. */ public final class LuceneLevenshteinDistance implements StringDistance { - + /** * Creates a new comparator, mimicing the behavior of Lucene's internal * edit distance. @@ -50,7 +50,7 @@ public float getDistance(String target, String other) { IntsRef otherPoints; int n; int d[][]; // cost array - + // NOTE: if we cared, we could 3*m space instead of m*n space, similar to // what LevenshteinDistance does, except cycling thru a ring of three // horizontal cost arrays... but this comparator is never actually used by @@ -63,7 +63,7 @@ public float getDistance(String target, String other) { n = targetPoints.length; final int m = otherPoints.length; d = new int[n+1][m+1]; - + if (n == 0 || m == 0) { if (n == m) { return 0; @@ -71,7 +71,7 @@ public float getDistance(String target, String other) { else { return Math.max(n, m); } - } + } // indexes into strings s and t int i; // iterates through s @@ -84,7 +84,7 @@ public float getDistance(String target, String other) { for (i = 0; i<=n; i++) { d[i][0] = i; } - + for (j = 0; j<=m; j++) { d[0][j] = j; } @@ -102,10 +102,10 @@ public float getDistance(String target, String other) { } } } - + return 1.0f - ((float) d[n][m] / Math.min(m, n)); } - + private static IntsRef toIntsRef(String s) { IntsRef ref = new IntsRef(s.length()); // worst case int utf16Len = s.length(); @@ -114,12 +114,18 @@ private static IntsRef toIntsRef(String s) { } return ref; } - + @Override public boolean equals(Object obj) { if (this == obj) return true; if (null == obj) return false; return (getClass() == obj.getClass()); } - + + @Override + public int hashCode() { + // constant hashCode since all instances of this class are equal() + return 6; + } + } diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggester.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggester.java index a03ae229f92b..f2345aeea3b4 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggester.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggester.java @@ -598,7 +598,7 @@ public boolean store(DataOutput output) throws IOException { return false; } - fst.save(output); + fst.save(output, output); output.writeVInt(maxAnalyzedPathsForOneInput); output.writeByte((byte) (hasPayloads ? 1 : 0)); return true; @@ -607,7 +607,7 @@ public boolean store(DataOutput output) throws IOException { @Override public boolean load(DataInput input) throws IOException { count = input.readVLong(); - this.fst = new FST<>(input, new PairOutputs<>(PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton())); + this.fst = new FST<>(input, input, new PairOutputs<>(PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton())); maxAnalyzedPathsForOneInput = input.readVInt(); hasPayloads = input.readByte() == 1; return true; diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/FreeTextSuggester.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/FreeTextSuggester.java index 81c079e85196..a10f989357a1 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/FreeTextSuggester.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/FreeTextSuggester.java @@ -360,7 +360,7 @@ public boolean store(DataOutput output) throws IOException { output.writeByte(separator); output.writeVInt(grams); output.writeVLong(totTokens); - fst.save(output); + fst.save(output, output); return true; } @@ -378,7 +378,7 @@ public boolean load(DataInput input) throws IOException { } totTokens = input.readVLong(); - fst = new FST<>(input, PositiveIntOutputs.getSingleton()); + fst = new FST<>(input, input, PositiveIntOutputs.getSingleton()); return true; } diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java index 3256ead930e0..b62d9b900cd1 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java @@ -324,11 +324,11 @@ public static NRTSuggester load(IndexInput input, FSTLoadMode fstLoadMode) throw OffHeapFSTStore store = new OffHeapFSTStore(); IndexInput clone = input.clone(); clone.seek(input.getFilePointer()); - fst = new FST<>(clone, new PairOutputs<>( + fst = new FST<>(clone, clone, new PairOutputs<>( PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton()), store); input.seek(clone.getFilePointer() + store.size()); } else { - fst = new FST<>(input, new PairOutputs<>( + fst = new FST<>(input, input, new PairOutputs<>( PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton())); } diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggesterBuilder.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggesterBuilder.java index 34b1508a552f..ca2934439c3b 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggesterBuilder.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggesterBuilder.java @@ -123,7 +123,7 @@ public boolean store(DataOutput output) throws IOException { if (fst == null) { return false; } - fst.save(output); + fst.save(output, output); /* write some more meta-info */ assert maxAnalyzedPathsPerOutput > 0; diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/fst/FSTCompletionLookup.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/fst/FSTCompletionLookup.java index 7db97a8a1855..547b3268acb3 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/fst/FSTCompletionLookup.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/fst/FSTCompletionLookup.java @@ -298,7 +298,7 @@ public synchronized boolean store(DataOutput output) throws IOException { if (normalCompletion == null || normalCompletion.getFST() == null) { return false; } - normalCompletion.getFST().save(output); + normalCompletion.getFST().save(output, output); return true; } @@ -306,7 +306,7 @@ public synchronized boolean store(DataOutput output) throws IOException { public synchronized boolean load(DataInput input) throws IOException { count = input.readVLong(); this.higherWeightsCompletion = new FSTCompletion(new FST<>( - input, NoOutputs.getSingleton())); + input, input, NoOutputs.getSingleton())); this.normalCompletion = new FSTCompletion( higherWeightsCompletion.getFST(), false, exactMatchFirst); return true; diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/fst/WFSTCompletionLookup.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/fst/WFSTCompletionLookup.java index 9c8ebb8cda10..d621e24de973 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/fst/WFSTCompletionLookup.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/fst/WFSTCompletionLookup.java @@ -141,14 +141,14 @@ public boolean store(DataOutput output) throws IOException { if (fst == null) { return false; } - fst.save(output); + fst.save(output, output); return true; } @Override public boolean load(DataInput input) throws IOException { count = input.readVLong(); - this.fst = new FST<>(input, PositiveIntOutputs.getSingleton()); + this.fst = new FST<>(input, input, PositiveIntOutputs.getSingleton()); return true; } diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/jaspell/JaspellLookup.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/jaspell/JaspellLookup.java index 948ebeb01519..5dce144c201e 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/jaspell/JaspellLookup.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/jaspell/JaspellLookup.java @@ -73,7 +73,7 @@ public void build(InputIterator iterator) throws IOException { continue; } charsSpare.copyUTF8Bytes(spare); - trie.put(charsSpare.toString(), Long.valueOf(weight)); + trie.put(charsSpare.toString(), weight); count++; } } @@ -144,7 +144,7 @@ private void readRecursively(DataInput in, TSTNode node) throws IOException { node.splitchar = in.readString().charAt(0); byte mask = in.readByte(); if ((mask & HAS_VALUE) != 0) { - node.data = Long.valueOf(in.readLong()); + node.data = in.readLong(); } if ((mask & LO_KID) != 0) { TSTNode kid = new TSTNode('\0', node); diff --git a/lucene/suggest/src/test/org/apache/lucene/search/spell/TestWordBreakSpellChecker.java b/lucene/suggest/src/test/org/apache/lucene/search/spell/TestWordBreakSpellChecker.java index 35e8245ac8b6..f27543467ead 100644 --- a/lucene/suggest/src/test/org/apache/lucene/search/spell/TestWordBreakSpellChecker.java +++ b/lucene/suggest/src/test/org/apache/lucene/search/spell/TestWordBreakSpellChecker.java @@ -20,8 +20,6 @@ import java.util.List; import java.util.regex.Pattern; -import junit.framework.Assert; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockTokenizer; @@ -37,6 +35,7 @@ import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; +import org.junit.Assert; public class TestWordBreakSpellChecker extends LuceneTestCase { private Directory dir; diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/LookupBenchmarkTest.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/LookupBenchmarkTest.java index 9f9190a57733..5e5a4d552fc0 100644 --- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/LookupBenchmarkTest.java +++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/LookupBenchmarkTest.java @@ -53,7 +53,7 @@ */ @Ignore("COMMENT ME TO RUN BENCHMARKS!") public class LookupBenchmarkTest extends LuceneTestCase { - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked","deprecation"}) private final List> benchmarkClasses = Arrays.asList( FuzzySuggester.class, AnalyzingSuggester.class, diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/PersistenceTest.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/PersistenceTest.java index fd5b558de4ec..9f4c6877e64f 100644 --- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/PersistenceTest.java +++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/PersistenceTest.java @@ -50,7 +50,8 @@ public class PersistenceTest extends LuceneTestCase { public void testTSTPersistence() throws Exception { runTest(TSTLookup.class, true); } - + + @Deprecated public void testJaspellPersistence() throws Exception { runTest(JaspellLookup.class, true); } diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java index 3e89275af80f..530a4c3ce3ef 100644 --- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java +++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java @@ -192,6 +192,7 @@ public boolean hasContexts() { } } analyzer.close(); + lfd.close(); } // Make sure you can suggest based only on unigram model: diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java index 5ed84e0170e7..4dbccdec3bf2 100644 --- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java +++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java @@ -50,7 +50,6 @@ public void testEndIsStopWord() throws Exception { Tokenizer stream = new MockTokenizer(); stream.setReader(new StringReader("go to ")); TokenStream filter = new SuggestStopFilter(stream, stopWords); - filter = new SuggestStopFilter(stream, stopWords); assertTokenStreamContents(filter, new String[] {"go"}, new int[] {0}, @@ -69,8 +68,6 @@ public void testMidStopWord() throws Exception { Tokenizer stream = new MockTokenizer(); stream.setReader(new StringReader("go to school")); TokenStream filter = new SuggestStopFilter(stream, stopWords); - - filter = new SuggestStopFilter(stream, stopWords); assertTokenStreamContents(filter, new String[] {"go", "school"}, new int[] {0, 6}, @@ -89,8 +86,6 @@ public void testMultipleStopWords() throws Exception { Tokenizer stream = new MockTokenizer(); stream.setReader(new StringReader("go to a the school")); TokenStream filter = new SuggestStopFilter(stream, stopWords); - - filter = new SuggestStopFilter(stream, stopWords); assertTokenStreamContents(filter, new String[] { "go", "school" }, new int[] {0, 12}, @@ -109,8 +104,6 @@ public void testMultipleStopWordsEnd() throws Exception { Tokenizer stream = new MockTokenizer(); stream.setReader(new StringReader("go to a the")); TokenStream filter = new SuggestStopFilter(stream, stopWords); - - filter = new SuggestStopFilter(stream, stopWords); assertTokenStreamContents(filter, new String[] { "go", "the"}, new int[] {0, 8}, @@ -129,8 +122,6 @@ public void testMultipleStopWordsEnd2() throws Exception { Tokenizer stream = new MockTokenizer(); stream.setReader(new StringReader("go to a the ")); TokenStream filter = new SuggestStopFilter(stream, stopWords); - - filter = new SuggestStopFilter(stream, stopWords); assertTokenStreamContents(filter, new String[] { "go"}, new int[] {0}, diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java index 1dbadc1224e7..12c8902f4ad1 100644 --- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java +++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java @@ -16,9 +16,6 @@ */ package org.apache.lucene.search.suggest.document; -import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; -import static org.hamcrest.core.IsEqual.equalTo; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; @@ -32,6 +29,7 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CyclicBarrier; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.TokenFilter; @@ -41,7 +39,7 @@ import org.apache.lucene.analysis.tokenattributes.TypeAttribute; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene84.Lucene84Codec; +import org.apache.lucene.codecs.lucene86.Lucene86Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.IntPoint; @@ -69,7 +67,8 @@ import org.junit.Before; import org.junit.Test; -import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; +import static org.hamcrest.core.IsEqual.equalTo; public class TestSuggestField extends LuceneTestCase { @@ -761,7 +760,7 @@ public void testRealisticKeys() throws Exception { } assertTrue("at least one of the entries should have the score", matched); } - + lineFileDocs.close(); reader.close(); iw.close(); } @@ -888,7 +887,7 @@ static IndexWriterConfig iwcWithSuggestField(Analyzer analyzer, String... sugges static IndexWriterConfig iwcWithSuggestField(Analyzer analyzer, final Set suggestFields) { IndexWriterConfig iwc = newIndexWriterConfig(random(), analyzer); iwc.setMergePolicy(newLogMergePolicy()); - Codec filterCodec = new Lucene84Codec() { + Codec filterCodec = new Lucene86Codec() { CompletionPostingsFormat.FSTLoadMode fstLoadMode = RandomPicks.randomFrom(random(), CompletionPostingsFormat.FSTLoadMode.values()); PostingsFormat postingsFormat = new Completion84PostingsFormat(fstLoadMode); diff --git a/lucene/test-framework/build.gradle b/lucene/test-framework/build.gradle index 5abf89d9c773..4b0cadee4bfb 100644 --- a/lucene/test-framework/build.gradle +++ b/lucene/test-framework/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Framework for testing Lucene-based applications' + dependencies { api project(':lucene:core') diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitRot13PostingsFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitRot13PostingsFormat.java index 4b3a68034d81..26d14adb2902 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitRot13PostingsFormat.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/uniformsplit/UniformSplitRot13PostingsFormat.java @@ -28,6 +28,7 @@ import org.apache.lucene.codecs.lucene84.Lucene84PostingsWriter; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.store.DataOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; @@ -40,6 +41,7 @@ public class UniformSplitRot13PostingsFormat extends PostingsFormat { public static volatile boolean encoderCalled; public static volatile boolean decoderCalled; public static volatile boolean blocksEncoded; + public static volatile boolean fieldsMetadataEncoded; public static volatile boolean dictionaryEncoded; protected final boolean dictionaryOnHeap; @@ -56,6 +58,7 @@ public static void resetEncodingFlags() { encoderCalled = false; decoderCalled = false; blocksEncoded = false; + fieldsMetadataEncoded = false; dictionaryEncoded = false; } @@ -86,6 +89,11 @@ protected void writeDictionary(IndexDictionary.Builder dictionaryBuilder) throws super.writeDictionary(dictionaryBuilder); recordDictionaryEncodingCall(); } + @Override + protected void writeEncodedFieldsMetadata(ByteBuffersDataOutput fieldsOutput) throws IOException { + super.writeEncodedFieldsMetadata(fieldsOutput); + recordFieldsMetadataEncodingCall(); + } }; } @@ -96,6 +104,13 @@ protected void recordBlockEncodingCall() { } } + protected void recordFieldsMetadataEncodingCall() { + if (encoderCalled) { + fieldsMetadataEncoded = true; + encoderCalled = false; + } + } + protected void recordDictionaryEncodingCall() { if (encoderCalled) { dictionaryEncoded = true; diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitRot13PostingsFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitRot13PostingsFormat.java index a300e3642371..04f3964e337b 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitRot13PostingsFormat.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/uniformsplit/sharedterms/STUniformSplitRot13PostingsFormat.java @@ -28,6 +28,7 @@ import org.apache.lucene.codecs.uniformsplit.UniformSplitTermsWriter; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.ByteBuffersDataOutput; /** * {@link STUniformSplitPostingsFormat} with block encoding using ROT13 cypher. @@ -50,6 +51,12 @@ protected void writeDictionary(IndexDictionary.Builder dictionaryBuilder) throws super.writeDictionary(dictionaryBuilder); recordDictionaryEncodingCall(); } + @Override + protected void writeEncodedFieldsMetadata(ByteBuffersDataOutput fieldsOutput) throws IOException { + recordBlockEncodingCall(); + super.writeEncodedFieldsMetadata(fieldsOutput); + recordFieldsMetadataEncodingCall(); + } }; } diff --git a/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java index e67e2a72323f..f556c0d55ccc 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java @@ -30,8 +30,8 @@ import org.apache.lucene.codecs.PointsFormat; import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.PointsWriter; -import org.apache.lucene.codecs.lucene60.Lucene60PointsReader; -import org.apache.lucene.codecs.lucene60.Lucene60PointsWriter; +import org.apache.lucene.codecs.lucene86.Lucene86PointsReader; +import org.apache.lucene.codecs.lucene86.Lucene86PointsWriter; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; @@ -1276,18 +1276,18 @@ private void doRandomDistanceTest(int numDocs, int numQueries) throws IOExceptio // Else seeds may not reproduce: iwc.setMergeScheduler(new SerialMergeScheduler()); int pointsInLeaf = 2 + random().nextInt(4); - iwc.setCodec(new FilterCodec("Lucene84", TestUtil.getDefaultCodec()) { + iwc.setCodec(new FilterCodec("Lucene86", TestUtil.getDefaultCodec()) { @Override public PointsFormat pointsFormat() { return new PointsFormat() { @Override public PointsWriter fieldsWriter(SegmentWriteState writeState) throws IOException { - return new Lucene60PointsWriter(writeState, pointsInLeaf, BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP); + return new Lucene86PointsWriter(writeState, pointsInLeaf, BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP); } @Override public PointsReader fieldsReader(SegmentReadState readState) throws IOException { - return new Lucene60PointsReader(readState); + return new Lucene86PointsReader(readState); } }; } diff --git a/lucene/test-framework/src/java/org/apache/lucene/geo/BaseXYPointTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/geo/BaseXYPointTestCase.java index a597ca5a0425..f60bd4c07d3f 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/geo/BaseXYPointTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/geo/BaseXYPointTestCase.java @@ -30,8 +30,8 @@ import org.apache.lucene.codecs.PointsFormat; import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.PointsWriter; -import org.apache.lucene.codecs.lucene60.Lucene60PointsReader; -import org.apache.lucene.codecs.lucene60.Lucene60PointsWriter; +import org.apache.lucene.codecs.lucene86.Lucene86PointsReader; +import org.apache.lucene.codecs.lucene86.Lucene86PointsWriter; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; @@ -1190,18 +1190,18 @@ private void doRandomDistanceTest(int numDocs, int numQueries) throws IOExceptio // Else seeds may not reproduce: iwc.setMergeScheduler(new SerialMergeScheduler()); int pointsInLeaf = 2 + random().nextInt(4); - iwc.setCodec(new FilterCodec("Lucene84", TestUtil.getDefaultCodec()) { + iwc.setCodec(new FilterCodec("Lucene86", TestUtil.getDefaultCodec()) { @Override public PointsFormat pointsFormat() { return new PointsFormat() { @Override public PointsWriter fieldsWriter(SegmentWriteState writeState) throws IOException { - return new Lucene60PointsWriter(writeState, pointsInLeaf, BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP); + return new Lucene86PointsWriter(writeState, pointsInLeaf, BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP); } @Override public PointsReader fieldsReader(SegmentReadState readState) throws IOException { - return new Lucene60PointsReader(readState); + return new Lucene86PointsReader(readState); } }; } diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java index beb4dad03578..92ffc732a295 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java @@ -128,6 +128,38 @@ public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, Mer return findMerges(null, segmentInfos, mergeContext); } + @Override + public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException { + MergeSpecification mergeSpecification = findMerges(null, segmentInfos, mergeContext); + if (mergeSpecification == null) { + return null; + } + // Do not return any merges involving already-merging segments. + MergeSpecification filteredMergeSpecification = new MergeSpecification(); + for (OneMerge oneMerge : mergeSpecification.merges) { + boolean filtered = false; + List nonMergingSegments = new ArrayList<>(); + for (SegmentCommitInfo sci : oneMerge.segments) { + if (mergeContext.getMergingSegments().contains(sci) == false) { + nonMergingSegments.add(sci); + } else { + filtered = true; + } + } + if (filtered == true) { + if (nonMergingSegments.size() > 0) { + filteredMergeSpecification.add(new OneMerge(nonMergingSegments)); + } + } else { + filteredMergeSpecification.add(oneMerge); + } + } + if (filteredMergeSpecification.merges.size() > 0) { + return filteredMergeSpecification; + } + return null; + } + @Override public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo, MergeContext mergeContext) throws IOException { // 80% of the time we create CFS: diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java b/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java index 7c158a2f28d4..0e3f7f3045ac 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java @@ -40,8 +40,8 @@ import org.apache.lucene.codecs.blockterms.LuceneVarGapFixedInterval; import org.apache.lucene.codecs.blocktreeords.BlockTreeOrdsPostingsFormat; import org.apache.lucene.codecs.bloom.TestBloomFilteredLucenePostings; -import org.apache.lucene.codecs.lucene60.Lucene60PointsReader; -import org.apache.lucene.codecs.lucene60.Lucene60PointsWriter; +import org.apache.lucene.codecs.lucene86.Lucene86PointsReader; +import org.apache.lucene.codecs.lucene86.Lucene86PointsWriter; import org.apache.lucene.codecs.memory.DirectPostingsFormat; import org.apache.lucene.codecs.memory.FSTPostingsFormat; import org.apache.lucene.codecs.mockrandom.MockRandomPostingsFormat; @@ -97,9 +97,9 @@ public PointsFormat pointsFormat() { @Override public PointsWriter fieldsWriter(SegmentWriteState writeState) throws IOException { - // Randomize how BKDWriter chooses its splis: + // Randomize how BKDWriter chooses its splits: - return new Lucene60PointsWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap) { + return new Lucene86PointsWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap) { @Override public void writeField(FieldInfo fieldInfo, PointsReader reader) throws IOException { @@ -132,8 +132,10 @@ public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue }); // We could have 0 points on merge since all docs with dimensional fields may be deleted: - if (writer.getPointCount() > 0) { - indexFPs.put(fieldInfo.name, writer.finish(dataOut)); + Runnable finalizer = writer.finish(metaOut, indexOut, dataOut); + if (finalizer != null) { + metaOut.writeInt(fieldInfo.number); + finalizer.run(); } } } @@ -142,7 +144,7 @@ public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue @Override public PointsReader fieldsReader(SegmentReadState readState) throws IOException { - return new Lucene60PointsReader(readState); + return new Lucene86PointsReader(readState); } }); } diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java b/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java index 3cb7f0a2be60..5583f5f09b3a 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java @@ -23,15 +23,16 @@ import java.util.TreeSet; import java.util.regex.Pattern; -import junit.framework.Assert; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.LuceneTestCase; -import static junit.framework.Assert.assertNotNull; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.fail; /** * Utility class for asserting expected hits in tests. @@ -60,7 +61,7 @@ public static void checkNoMatchExplanations(Query q, String defaultFieldName, Explanation exp = searcher.explain(q, doc); assertNotNull("Explanation of [["+d+"]] for #"+doc+" is null", exp); - Assert.assertFalse("Explanation of [["+d+"]] for #"+doc+ + assertFalse("Explanation of [["+d+"]] for #"+doc+ " doesn't indicate non-match: " + exp.toString(), exp.isMatch()); } @@ -95,7 +96,7 @@ public static void checkHitCollector(Random random, Query query, String defaultF final Collector c = new SetCollector(actual); searcher.search(query, c); - Assert.assertEquals("Simple: " + query.toString(defaultFieldName), + assertEquals("Simple: " + query.toString(defaultFieldName), correct, actual); for (int i = -1; i < 2; i++) { @@ -103,7 +104,7 @@ public static void checkHitCollector(Random random, Query query, String defaultF IndexSearcher s = QueryUtils.wrapUnderlyingReader (random, searcher, i); s.search(query, c); - Assert.assertEquals("Wrap Reader " + i + ": " + + assertEquals("Wrap Reader " + i + ": " + query.toString(defaultFieldName), correct, actual); } @@ -168,16 +169,16 @@ public static void checkHits( actual.add(Integer.valueOf(hits[i].doc)); } - Assert.assertEquals(query.toString(defaultFieldName), correct, actual); + assertEquals(query.toString(defaultFieldName), correct, actual); QueryUtils.check(random, query,searcher, LuceneTestCase.rarely(random)); } /** Tests that a Hits has an expected order of documents */ public static void checkDocIds(String mes, int[] results, ScoreDoc[] hits) { - Assert.assertEquals(mes + " nr of hits", hits.length, results.length); + assertEquals(mes + " nr of hits", hits.length, results.length); for (int i = 0; i < results.length; i++) { - Assert.assertEquals(mes + " doc nrs for hit " + i, results[i], hits[i].doc); + assertEquals(mes + " doc nrs for hit " + i, results[i], hits[i].doc); } } @@ -198,11 +199,11 @@ public static void checkHitsQuery( public static void checkEqual(Query query, ScoreDoc[] hits1, ScoreDoc[] hits2) { final float scoreTolerance = 1.0e-6f; if (hits1.length != hits2.length) { - Assert.fail("Unequal lengths: hits1="+hits1.length+",hits2="+hits2.length); + fail("Unequal lengths: hits1="+hits1.length+",hits2="+hits2.length); } for (int i = 0; i < hits1.length; i++) { if (hits1[i].doc != hits2[i].doc) { - Assert.fail("Hit " + i + " docnumbers don't match\n" + fail("Hit " + i + " docnumbers don't match\n" + hits2str(hits1, hits2,0,0) + "for query:" + query.toString()); } @@ -210,7 +211,7 @@ public static void checkEqual(Query query, ScoreDoc[] hits1, ScoreDoc[] hits2) { if ((hits1[i].doc != hits2[i].doc) || Math.abs(hits1[i].score - hits2[i].score) > scoreTolerance) { - Assert.fail("Hit " + i + ", doc nrs " + hits1[i].doc + " and " + hits2[i].doc + fail("Hit " + i + ", doc nrs " + hits1[i].doc + " and " + hits2[i].doc + "\nunequal : " + hits1[i].score + "\n and: " + hits2[i].score + "\nfor query:" + query.toString()); @@ -335,9 +336,9 @@ public static void verifyExplanation(String q, float value = expl.getValue().floatValue(); // TODO: clean this up if we use junit 5 (the assert message is costly) try { - Assert.assertEquals(score, value, 0d); + assertEquals(score, value, 0d); } catch (Exception e) { - Assert.fail(q+": score(doc="+doc+")="+score+" != explanationScore="+value+" Explanation: "+expl); + fail(q+": score(doc="+doc+")="+score+" != explanationScore="+value+" Explanation: "+expl); } if (!deep) return; @@ -349,7 +350,7 @@ public static void verifyExplanation(String q, } String descr = expl.getDescription().toLowerCase(Locale.ROOT); if (descr.startsWith("score based on ") && descr.contains("child docs in range")) { - Assert.assertTrue("Child doc explanations are missing", detail.length > 0); + assertTrue("Child doc explanations are missing", detail.length > 0); } if (detail.length > 0) { if (detail.length==1 && COMPUTED_FROM_PATTERN.matcher(descr).matches() == false) { @@ -388,7 +389,7 @@ public static void verifyExplanation(String q, } // TODO: this is a TERRIBLE assertion!!!! if (false == (productOf || sumOf || maxOf || computedOf || maxTimesOthers)) { - Assert.fail( + fail( q+": multi valued explanation description=\""+descr +"\" must be 'max of plus x times others', 'computed as x from:' or end with 'product of'" +" or 'sum of:' or 'max of:' - "+expl); @@ -423,14 +424,14 @@ public static void verifyExplanation(String q, } else if (maxTimesOthers) { combined = (float) (max + x * (sum - max)); } else { - Assert.assertTrue("should never get here!", computedOf); + assertTrue("should never get here!", computedOf); combined = value; } // TODO: clean this up if we use junit 5 (the assert message is costly) try { - Assert.assertEquals(combined, value, maxError); + assertEquals(combined, value, maxError); } catch (Exception e) { - Assert.fail(q+": actual subDetails combined=="+combined+ + fail(q+": actual subDetails combined=="+combined+ " != value="+value+" Explanation: "+expl); } } @@ -444,14 +445,17 @@ public static void verifyExplanation(String q, * @see ExplanationAsserter */ public static class ExplanationAssertingSearcher extends IndexSearcher { + public ExplanationAssertingSearcher(IndexReader r) { super(r); } + protected void checkExplanations(Query q) throws IOException { super.search(q, new ExplanationAsserter (q, null, this)); } + @Override public TopFieldDocs search(Query query, int n, @@ -460,17 +464,20 @@ public TopFieldDocs search(Query query, checkExplanations(query); return super.search(query,n,sort); } + @Override public void search(Query query, Collector results) throws IOException { checkExplanations(query); super.search(query, results); } + @Override public TopDocs search(Query query, int n) throws IOException { checkExplanations(query); return super.search(query, n); } + } /** @@ -521,9 +528,9 @@ public void collect(int doc) throws IOException { assertNotNull("Explanation of [["+d+"]] for #"+doc+" is null", exp); verifyExplanation(d,doc,scorer.score(),deep,exp); - Assert.assertTrue("Explanation of [["+d+"]] for #"+ doc + - " does not indicate match: " + exp.toString(), - exp.isMatch()); + assertTrue("Explanation of [["+d+"]] for #"+ doc + + " does not indicate match: " + exp.toString(), + exp.isMatch()); } @Override protected void doSetNextReader(LeafReaderContext context) throws IOException { @@ -602,7 +609,7 @@ private static void doCheckMaxScores(Random random, Query query, IndexSearcher s Scorer s1 = w1.scorer(ctx); Scorer s2 = w2.scorer(ctx); if (s1 == null) { - Assert.assertTrue(s2 == null || s2.iterator().nextDoc() == DocIdSetIterator.NO_MORE_DOCS); + assertTrue(s2 == null || s2.iterator().nextDoc() == DocIdSetIterator.NO_MORE_DOCS); continue; } TwoPhaseIterator twoPhase1 = s1.twoPhaseIterator(); @@ -616,25 +623,25 @@ private static void doCheckMaxScores(Random random, Query query, IndexSearcher s int doc1; for (doc1 = approx1.nextDoc(); doc1 < doc2; doc1 = approx1.nextDoc()) { if (twoPhase1 == null || twoPhase1.matches()) { - Assert.assertTrue(s1.score() < minScore); + assertTrue(s1.score() < minScore); } } - Assert.assertEquals(doc1, doc2); + assertEquals(doc1, doc2); if (doc2 == DocIdSetIterator.NO_MORE_DOCS) { break; } if (doc2 > upTo) { upTo = s2.advanceShallow(doc2); - Assert.assertTrue(upTo >= doc2); + assertTrue(upTo >= doc2); maxScore = s2.getMaxScore(upTo); } if (twoPhase2 == null || twoPhase2.matches()) { - Assert.assertTrue(twoPhase1 == null || twoPhase1.matches()); + assertTrue(twoPhase1 == null || twoPhase1.matches()); float score = s2.score(); - Assert.assertEquals(s1.score(), score); - Assert.assertTrue(score + " > " + maxScore + " up to " + upTo, score <= maxScore); + assertEquals(s1.score(), score, 0); + assertTrue(score + " > " + maxScore + " up to " + upTo, score <= maxScore); if (score >= minScore && random.nextInt(10) == 0) { // On some scorers, changing the min score changes the way that docs are iterated @@ -650,7 +657,7 @@ private static void doCheckMaxScores(Random random, Query query, IndexSearcher s Scorer s1 = w1.scorer(ctx); Scorer s2 = w2.scorer(ctx); if (s1 == null) { - Assert.assertTrue(s2 == null || s2.iterator().nextDoc() == DocIdSetIterator.NO_MORE_DOCS); + assertTrue(s2 == null || s2.iterator().nextDoc() == DocIdSetIterator.NO_MORE_DOCS); continue; } TwoPhaseIterator twoPhase1 = s1.twoPhaseIterator(); @@ -691,7 +698,7 @@ private static void doCheckMaxScores(Random random, Query query, IndexSearcher s int doc1; for (doc1 = approx1.advance(target); doc1 < doc2; doc1 = approx1.nextDoc()) { if (twoPhase1 == null || twoPhase1.matches()) { - Assert.assertTrue(s1.score() < minScore); + assertTrue(s1.score() < minScore); } } assertEquals(doc1, doc2); @@ -701,17 +708,17 @@ private static void doCheckMaxScores(Random random, Query query, IndexSearcher s } if (twoPhase2 == null || twoPhase2.matches()) { - Assert.assertTrue(twoPhase1 == null || twoPhase1.matches()); + assertTrue(twoPhase1 == null || twoPhase1.matches()); float score = s2.score(); - Assert.assertEquals(s1.score(), score); + assertEquals(s1.score(), score, 0); if (doc2 > upTo) { upTo = s2.advanceShallow(doc2); - Assert.assertTrue(upTo >= doc2); + assertTrue(upTo >= doc2); maxScore = s2.getMaxScore(upTo); } - Assert.assertTrue(score <= maxScore); + assertTrue(score <= maxScore); if (score >= minScore && random.nextInt(10) == 0) { // On some scorers, changing the min score changes the way that docs are iterated diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java index cdbac7778469..fdd5fb2f8c23 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java @@ -20,7 +20,6 @@ import java.util.List; import java.util.Random; -import junit.framework.Assert; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.Fields; @@ -39,10 +38,11 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.Version; +import org.junit.Assert; -import static junit.framework.Assert.assertEquals; -import static junit.framework.Assert.assertFalse; -import static junit.framework.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; /** * Utility class for sanity-checking queries. @@ -424,7 +424,7 @@ protected void doSetNextReader(LeafReaderContext context) throws IOException { break; } } - Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but advance("+(lastDoc[0]+1)+") got to "+scorer.docID(),more); + assertFalse("query's last doc was "+ lastDoc[0] +" but advance("+(lastDoc[0]+1)+") got to "+scorer.docID(),more); } } } @@ -453,11 +453,11 @@ public void collect(int doc) throws IOException { for (int i=lastDoc[0]+1; i<=doc; i++) { Weight w = s.createWeight(rewritten, ScoreMode.COMPLETE, 1); Scorer scorer = w.scorer(context.get(leafPtr)); - Assert.assertTrue("query collected "+doc+" but advance("+i+") says no more docs!",scorer.iterator().advance(i) != DocIdSetIterator.NO_MORE_DOCS); - Assert.assertEquals("query collected "+doc+" but advance("+i+") got to "+scorer.docID(),doc,scorer.docID()); + assertTrue("query collected "+doc+" but advance("+i+") says no more docs!",scorer.iterator().advance(i) != DocIdSetIterator.NO_MORE_DOCS); + assertEquals("query collected "+doc+" but advance("+i+") got to "+scorer.docID(),doc,scorer.docID()); float advanceScore = scorer.score(); - Assert.assertEquals("unstable advance("+i+") score!",advanceScore,scorer.score(),maxDiff); - Assert.assertEquals("query assigned doc "+doc+" a score of <"+score+"> but advance("+i+") has <"+advanceScore+">!",score,advanceScore,maxDiff); + assertEquals("unstable advance("+i+") score!",advanceScore,scorer.score(),maxDiff); + assertEquals("query assigned doc "+doc+" a score of <"+score+"> but advance("+i+") has <"+advanceScore+">!",score,advanceScore,maxDiff); // Hurry things along if they are going slow (eg // if you got SimpleText codec this will kick in): @@ -496,7 +496,7 @@ protected void doSetNextReader(LeafReaderContext context) throws IOException { break; } } - Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but advance("+(lastDoc[0]+1)+") got to "+scorer.docID(),more); + assertFalse("query's last doc was "+ lastDoc[0] +" but advance("+(lastDoc[0]+1)+") got to "+scorer.docID(),more); } leafPtr++; } @@ -524,7 +524,7 @@ protected void doSetNextReader(LeafReaderContext context) throws IOException { break; } } - Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but advance("+(lastDoc[0]+1)+") got to "+scorer.docID(),more); + assertFalse("query's last doc was "+ lastDoc[0] +" but advance("+(lastDoc[0]+1)+") got to "+scorer.docID(),more); } } } @@ -561,8 +561,8 @@ public void setScorer(Scorable scorer) throws IOException { public void collect(int doc) throws IOException { assert doc >= min; assert doc < max; - Assert.assertEquals(scorer.docID(), doc); - Assert.assertEquals(scorer.score(), scorer2.score(), 0.01f); + assertEquals(scorer.docID(), doc); + assertEquals(scorer.score(), scorer2.score(), 0.01f); iterator.nextDoc(); } }, null, min, max); diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java b/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java index 4f01cf770671..a8f1b7daefbf 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java @@ -550,8 +550,7 @@ public void close() throws IOException { private final class ChangeIndices extends Thread { @Override public void run() { - try { - final LineFileDocs docs = new LineFileDocs(random()); + try (final LineFileDocs docs = new LineFileDocs(random())) { int numDocs = 0; while (System.nanoTime() < endTimeNanos) { final int what = random().nextInt(3); diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java b/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java index fa409d194c58..6d0c4bffdd5f 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java @@ -88,7 +88,6 @@ private synchronized void open() throws IOException { // true if the InputStream is not already randomly seek'd after the if/else block below: boolean needSkip; - boolean skipFirstLineFragment = false; long size = 0L, seekTo = 0L; if (is == null) { @@ -109,8 +108,15 @@ private synchronized void open() throws IOException { channel.position(seekTo); is = Channels.newInputStream(channel); - // we (likely) seeked to the middle of a line: - skipFirstLineFragment = true; + // read until newline char, otherwise we may hit "java.nio.charset.MalformedInputException: Input length = 1" + // exception in readline() below, because we seeked part way through a multi-byte (in UTF-8) encoded + // unicode character: + if (seekTo > 0L) { + int b; + do { + b = is.read(); + } while (b >= 0 && b != 13 && b != 10); + } needSkip = false; } @@ -169,11 +175,6 @@ private synchronized void open() throws IOException { .onMalformedInput(CodingErrorAction.REPORT) .onUnmappableCharacter(CodingErrorAction.REPORT); reader = new BufferedReader(new InputStreamReader(is, decoder), BUFFER_SIZE); - - if (skipFirstLineFragment) { - // read until end of line: - reader.readLine(); - } } public synchronized void reset() throws IOException { diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java index 9f2cd27c8c7e..cc779a0a2210 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java @@ -1003,6 +1003,7 @@ protected synchronized boolean maybeStall(MergeSource mergeSource) { if (rarely(r)) { c.setCheckPendingFlushUpdate(false); } + c.setMaxCommitMergeWaitSeconds(atLeast(r, 1)); return c; } diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java index bd59e8cf40bd..aef11ac1d7f1 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java @@ -16,17 +16,6 @@ */ package org.apache.lucene.util; -import static org.apache.lucene.util.LuceneTestCase.INFOSTREAM; -import static org.apache.lucene.util.LuceneTestCase.TEST_CODEC; -import static org.apache.lucene.util.LuceneTestCase.TEST_DOCVALUESFORMAT; -import static org.apache.lucene.util.LuceneTestCase.TEST_POSTINGSFORMAT; -import static org.apache.lucene.util.LuceneTestCase.VERBOSE; -import static org.apache.lucene.util.LuceneTestCase.assumeFalse; -import static org.apache.lucene.util.LuceneTestCase.localeForLanguageTag; -import static org.apache.lucene.util.LuceneTestCase.random; -import static org.apache.lucene.util.LuceneTestCase.randomLocale; -import static org.apache.lucene.util.LuceneTestCase.randomTimeZone; - import java.io.PrintStream; import java.util.Arrays; import java.util.HashSet; @@ -34,6 +23,8 @@ import java.util.Random; import java.util.TimeZone; +import com.carrotsearch.randomizedtesting.RandomizedContext; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; @@ -43,7 +34,7 @@ import org.apache.lucene.codecs.cheapbastard.CheapBastardCodec; import org.apache.lucene.codecs.compressing.CompressingCodec; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; -import org.apache.lucene.codecs.lucene84.Lucene84Codec; +import org.apache.lucene.codecs.lucene86.Lucene86Codec; import org.apache.lucene.codecs.mockrandom.MockRandomPostingsFormat; import org.apache.lucene.codecs.simpletext.SimpleTextCodec; import org.apache.lucene.index.RandomCodec; @@ -54,8 +45,16 @@ import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.junit.internal.AssumptionViolatedException; -import com.carrotsearch.randomizedtesting.RandomizedContext; -import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import static org.apache.lucene.util.LuceneTestCase.INFOSTREAM; +import static org.apache.lucene.util.LuceneTestCase.TEST_CODEC; +import static org.apache.lucene.util.LuceneTestCase.TEST_DOCVALUESFORMAT; +import static org.apache.lucene.util.LuceneTestCase.TEST_POSTINGSFORMAT; +import static org.apache.lucene.util.LuceneTestCase.VERBOSE; +import static org.apache.lucene.util.LuceneTestCase.assumeFalse; +import static org.apache.lucene.util.LuceneTestCase.localeForLanguageTag; +import static org.apache.lucene.util.LuceneTestCase.random; +import static org.apache.lucene.util.LuceneTestCase.randomLocale; +import static org.apache.lucene.util.LuceneTestCase.randomTimeZone; /** * Setup and restore suite-level environment (fine grained junk that @@ -189,7 +188,7 @@ public String toString() { } else if ("Compressing".equals(TEST_CODEC) || ("random".equals(TEST_CODEC) && randomVal == 6 && !shouldAvoidCodec("Compressing"))) { codec = CompressingCodec.randomInstance(random); } else if ("Lucene84".equals(TEST_CODEC) || ("random".equals(TEST_CODEC) && randomVal == 5 && !shouldAvoidCodec("Lucene84"))) { - codec = new Lucene84Codec(RandomPicks.randomFrom(random, Lucene50StoredFieldsFormat.Mode.values()) + codec = new Lucene86Codec(RandomPicks.randomFrom(random, Lucene50StoredFieldsFormat.Mode.values()) ); } else if (!"random".equals(TEST_CODEC)) { codec = Codec.forName(TEST_CODEC); diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java index f0697f95d494..2dc9ead733f2 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java @@ -53,8 +53,8 @@ import org.apache.lucene.codecs.blockterms.LuceneFixedGap; import org.apache.lucene.codecs.blocktreeords.BlockTreeOrdsPostingsFormat; import org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat; -import org.apache.lucene.codecs.lucene84.Lucene84Codec; import org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat; +import org.apache.lucene.codecs.lucene86.Lucene86Codec; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; import org.apache.lucene.document.BinaryDocValuesField; @@ -919,7 +919,7 @@ public DocValuesFormat getDocValuesFormatForField(String field) { * This may be different than {@link Codec#getDefault()} because that is randomized. */ public static Codec getDefaultCodec() { - return new Lucene84Codec(); + return new Lucene86Codec(); } /** diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/fst/FSTTester.java b/lucene/test-framework/src/java/org/apache/lucene/util/fst/FSTTester.java index 19111d13d9e3..e844a5eb6019 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/fst/FSTTester.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/fst/FSTTester.java @@ -296,11 +296,11 @@ FST doTest(int prune1, int prune2, boolean allowRandomSuffixSharing) throws I if (random.nextBoolean() && fst != null) { IOContext context = LuceneTestCase.newIOContext(random); IndexOutput out = dir.createOutput("fst.bin", context); - fst.save(out); + fst.save(out, out); out.close(); IndexInput in = dir.openInput("fst.bin", context); try { - fst = new FST(in, outputs); + fst = new FST(in, in, outputs); } finally { in.close(); dir.deleteFile("fst.bin"); @@ -340,6 +340,7 @@ protected boolean outputsEqual(T a, T b) { } // FST is complete + @SuppressWarnings("deprecation") private void verifyUnPruned(int inputMode, FST fst) throws IOException { final FST fstLong; diff --git a/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java b/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java index 2f0f067c5136..1889d5e18842 100644 --- a/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java +++ b/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java @@ -21,21 +21,19 @@ import org.apache.lucene.index.Term; -import junit.framework.AssertionFailedError; - -/** +/** * Tests that the {@link BaseExplanationTestCase} helper code, as well as * {@link CheckHits#checkNoMatchExplanations} are checking what they are suppose to. */ public class TestBaseExplanationTestCase extends BaseExplanationTestCase { public void testQueryNoMatchWhenExpected() throws Exception { - expectThrows(AssertionFailedError.class, () -> { + expectThrows(AssertionError.class, () -> { qtest(new TermQuery(new Term(FIELD, "BOGUS")), new int[] { 3 /* none */ }); }); } public void testQueryMatchWhenNotExpected() throws Exception { - expectThrows(AssertionFailedError.class, () -> { + expectThrows(AssertionError.class, () -> { qtest(new TermQuery(new Term(FIELD, "w1")), new int[] { 0, 1 /*, 2, 3 */ }); }); } @@ -45,7 +43,7 @@ public void testIncorrectExplainScores() throws Exception { qtest(new TermQuery(new Term(FIELD, "zz")), new int[] { 1, 3 }); // ensure when the Explanations are broken, we get an error about those matches - expectThrows(AssertionFailedError.class, () -> { + expectThrows(AssertionError.class, () -> { qtest(new BrokenExplainTermQuery(new Term(FIELD, "zz"), false, true), new int[] { 1, 3 }); }); @@ -56,7 +54,7 @@ public void testIncorrectExplainMatches() throws Exception { qtest(new TermQuery(new Term(FIELD, "zz")), new int[] { 1, 3 }); // ensure when the Explanations are broken, we get an error about the non matches - expectThrows(AssertionFailedError.class, () -> { + expectThrows(AssertionError.class, () -> { CheckHits.checkNoMatchExplanations(new BrokenExplainTermQuery(new Term(FIELD, "zz"), true, false), FIELD, searcher, new int[] { 1, 3 }); }); diff --git a/lucene/test-framework/src/test/org/apache/lucene/util/TestExceptionInBeforeClassHooks.java b/lucene/test-framework/src/test/org/apache/lucene/util/TestExceptionInBeforeClassHooks.java index bd99b4bea648..16c64cf9de76 100644 --- a/lucene/test-framework/src/test/org/apache/lucene/util/TestExceptionInBeforeClassHooks.java +++ b/lucene/test-framework/src/test/org/apache/lucene/util/TestExceptionInBeforeClassHooks.java @@ -20,8 +20,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import junit.framework.Assert; - +import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 3282f7c78978..013a84510d02 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -10,7 +10,7 @@ Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this r New Features --------------------- -(No changes) +* SOLR-14440: Introduce new Certificate Authentication Plugin to load Principal from certificate subject. (Mike Drob) Improvements ---------------------- @@ -25,6 +25,8 @@ Improvements Other Changes ---------------------- +* LUCENE-9391: Upgrade HPPC to 0.8.2. (Haoyu Zhai) + * SOLR-10288: Remove non-minified JavaScript from the webapp. (Erik Hatcher, marcussorealheis) * SOLR-13655:Upgrade Collections.unModifiableSet to Set.of and Set.copyOf (Atri Sharma via Tomás Fernández Löbbe) @@ -67,6 +69,16 @@ Other Changes * SOLR-14412: Automatically set urlScheme to https when running secure solr with embedded zookeeper. (Mike Drob) Do not erroneously set solr.jetty.https.port system property when running in http mode (Upendra Penegalapati) +* SOLR-14014: Introducing a system property that allows users to disable the Admin UI, which is enabled by default. + If you have security concerns or other reasons to disable the Admin UI, you can modify `SOLR_ADMIN_UI_DISABLED` + `solr.in.sh`/`solr.in.cmd` at start. (marcussorealheis) + +* SOLR-14486: Autoscaling simulation framework no longer creates /clusterstate.json (format 1), + instead it creates individual per-collection /state.json files (format 2). (ab) + + * SOLR-12823: Remove /clusterstate.json support: support for collections created with stateFormat=1, + as well as support for Collection API MIGRATESTATEFORMAT action and support for the legacyCloud flag (Ilan Ginzburg). + ================== 8.6.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. @@ -76,10 +88,19 @@ New Features * SOLR-14210: HealthCheckHandler can now require that all cores are healthy before returning 'OK' To enable, add &requireHealthyCores=true to enable (janhoy, Houston Putman, shalin) -* SOLR-13942: A read API at /api/cluster/zk/* to fetch raw ZK data and view contents of a ZK direcory (noble) +* SOLR-13942: A read API at /api/cluster/zk/* to fetch raw ZK data and view contents of a ZK directory (noble) * SOLR-14237: A new panel with security info in admin UI's dashboard (Ishan Chattopadhyaya, Moshe Bla) +* SOLR-12131: ExternalRoleRuleBasedAuthorizationPlugin which gets user's roles from request (janhoy) + +* SOLR-14478: Allow the diff Stream Evaluator to operate on the rows of a matrix (Joel Bernstein) + +* SOLR-14476: Add percentiles and standard deviation aggregations to stats, facet and + timeseries Streaming Expressions (Joel Bernstein) + +* SOLR-14470: Add streaming expressions to /export handler. (ab, Joel Bernstein) + Improvements --------------------- * SOLR-14316: Remove unchecked type conversion warning in JavaBinCodec's readMapEntry's equals() method @@ -104,6 +125,32 @@ Improvements * SOLR-14433: Metrics: SolrShardReporter's default metrics list now includes TLOG and UPDATE./update (David Smiley) +* SOLR-14423: Move static SolrClientCache from StreamHandler to CoreContainer for wider reuse and better life-cycle management. (ab) + +* SOLR-14407: Handle shards.purpose in the postlogs tool (Joel Bernstein) + +* SOLR-13325: ComputePlanAction now supports a collection selector of the form `collections: {policy: my_policy}` + which can be used to select multiple collections that match collection property/value pairs. This is useful to + maintain a whitelist of collections for which actions are taken without needing to hard code the collection names + themselves. The collection hints are pushed down to the policy engine so operations for non-matching collections + are not computed at all. (ab, shalin) + +* SOLR-14419: json.queries as well as other parameters might be referred via {"param":"ref"} in Query DSL (Mikhail Khludnev) + +* SOLR-11334: hl.fl and tv.fl now parse field lists when they have both commas and spaces + (David Smiley, Yasufumi Mizoguchi) + +* SOLR-14442: bin/solr and bin\solr.cmd invoke jstack before forceful termination, if jstack is available. + Also, bin\solr.cmd executes forceful termination even port is unbinded already (Christine Poerschke via Mikhail Khludnev). + +* SOLR-14384: SolrRequestInfo now stacks internally when a new request is set/clear'ed. + Also fixes SolrIndexSearcher.warm which should have re-instated previous SRI. + (Nazerke Seidan, David Smiley) + +* SOLR-14561: CoreAdminAPI's parameters instanceDir and dataDir are now validated, and must be relative to either + SOLR_HOME, SOLR_DATA_HOME or coreRootDir. Added new solr.xml config 'allowPaths', controlled by system property + 'solr.allowPaths' that allows you to add other allowed paths when needed. + Optimizations --------------------- * SOLR-8306: Do not collect expand documents when expand.rows=0 (Marshall Sanders, Amelia Henderson) @@ -118,6 +165,16 @@ Optimizations * LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects (Erick Erickson) +* SOLR-13289: When the "minExactCount" parameters is provided in queries and it's value is lower than the number of hits, + Solr can speedup the query resolution by using the Block-Max WAND algorithm (see LUCENE-8135). When doing this, the + value of matching documents in the response (numFound) will be an approximation. + (Ishan Chattopadhyaya, Munendra S N, Tomás Fernández Löbbe, David Smiley) + +* SOLR-14472: Autoscaling "cores" preference now retrieves the core count more efficiently, and counts all cores. + (David Smiley) + +* SOLR-14552: Add BlockMax-WAND support to ReRank queries (Tomás Fernández Löbbe) + Bug Fixes --------------------- * SOLR-13264: IndexSizeTrigger aboveOp / belowOp properties not in valid properties. @@ -143,13 +200,58 @@ Bug Fixes * SOLR-14291: Handle dotted fields in legacy Analytics Component (Anatolii Siuniaev via Mikhail Khludnev) -* SOLR-14411: Fix Admin UI collection/core drop-downs placeholder text. Completes work started in SOLR-14359 (janhoy) - * SOLR-14371: Zk StatusHandler now parse dynamic zk server config if supported, fixing Admin UI Zookeeper Status screen in case dynamic reconfig host list differs from static zkHost string (janhoy) * SOLR-14421: New examples in solr.in.cmd in Solr 8.5 don't work as provided (Colvin Cowie via janhoy) +* SOLR-14431: SegmentsInfoRequestHandler does not release IndexWriter (Tiziano Degaetano, ab) + +* SOLR-14463: Solr Admin ZkStatus page now works with ZK 3.6, without 'For input string: "null"' error (janhoy, Bernd Wahlen) + +* SOLR-14456: Fix Content-Type header usage when a request is forwarded from Solr node to Solr + node with compression enabled (samuelgmartinez via Houston Putman) + +* SOLR-8394: /admin/luke was always showing 0 for indexHeapUsageBytes. It should work now. + (Steve Molloy, Isabelle Giguere, David Smiley) + +* SOLR-14492: Fix ArrayIndexOutOfBoundsException in json.facet 'terms' when FacetFieldProcessorByHashDV is + used with aggregations over multivalued numeric fields (hossman) + +* SOLR-14477: Fix incorrect 'relatedness()' calculations in json.facet 'terms' when 'prefix' option is used + (hossman) + +* SOLR-14504: ZkController LiveNodesListener has NullPointerException in startup race. + (Colvin Cowie via ab) + +* SOLR-14498: Upgrade to Caffeine 2.8.4, which fixes the cache poisoning issue. (Jakub Zytka, ab) + +* SOLR-14517: Dont ignore 'mm' localparam on edismax queries using operators (Yuriy Koval via Jason Gerlowski) + +* SOLR-14491: Intercepting internode requests in KerberosPlugin when HTTP/2 client is used (Ishan Chattopadhyaya, Moshe Bla) + +* SOLR-14525: SolrCoreAware, ResourceLoaderAware should be honored for plugin loaded from packages (noble) + +* SOLR-9679: When removing zk node /security.json, security is now disabled gracefully (janhoy) + +* SOLR-14520: Fixed server errors from the json.facet allBuckets:true option when combined with refine:true + (Michael Gibney, hossman) + +* SOLR-14467: Fix relatedness() stat in json.facets to no longer cause server errors (or nonsense results) + when combined with allBuckets:true. (Michael Gibney, hossman) + +* SOLR-13203: Return 400 status code on invalid dynamic field for Edismax's user Fields + (Johannes Kloos, mrsoong via Munendra S N) + +* SOLR-14550: Fix duplicates issue in Atomic updates with add-distinct (Thomas Corthals, Munendra S N) + +* SOLR-14345: Return proper error message when non-BinaryResponseParser is used in solrJ (Munendra S N) + +* SOLR-14516: NPE in JsonTextWriter (noble) + +* SOLR-14577: Return 400 BAD REQUEST when field is missing on a Terms query parser request + (Tomás Fernández Löbbe) + Other Changes --------------------- * SOLR-14197: SolrResourceLoader: marked many methods as deprecated, and in some cases rerouted exiting logic to avoid @@ -186,6 +288,91 @@ Other Changes * SOLR-13886: HDFSSyncSliceTest and SyncSliceTest started failing frequently (Kevin Risden) +* SOLR-14173: Major redesign of the Solr Reference Guide (Cassandra Targett) + +* SOLR-14461: Replaced commons-fileupload dependency with using Jetty's facilities. (David Smiley) + +* SOLR-14466: Upgrade log4j2 to latest release (2.13.2) (Erick Erickson) + +* SOLR-11934: Visit Solr logging, it's too noisy. Note particularly that the messages for + opening a new searcher have changed and include the autowarm time. (Erick Erickson) + +* SOLR-7880: Update commons-cli to 1.4 (Erick Erickson) + +* SOLR-14226: Fix or suppress 14 resource leak warnings in apache/solr/core (Andras Salaman via + Erick Erickson) + +* SOLR-14482: Fix or suppress warnings in solr/search/facet (Erick Erickson) + +* SOLR-14485: Fix or suppress 11 resource leak warnings in apache/solr/cloud (Andras Salaman via + Erick Erickson) + +* SOLR-14495: Fix or suppress warnings in solr/search/function (Erick Erickson) + +* SOLR-14280: Improve error reporting in SolrConfig (Andras Salamon via Jason Gerlowski) + +* SOLR-14474: Fix remaining auxilliary class warnings in Solr (Erick Erickson) + +* SOLR-14519: Fix or suppress warnings in solr/cloud/autoscaling/ (Erick Erickson) + +* SOLR-14526: fix or suppress warnings in apache/solr/core (Erick Erickson) + +* SOLR-14533: Fix or suppress warnings in solr/handler/admin (Andras Salamon, Erick Erickson) + +* SOLR-14535: Fix or suppress warnings in apache/solr/handler/component (Erick Erickson) + +* SOLR-14538: Fix or suppress remaining warnings in apache/solr/handler (Erick Erickson) + +* SOLR-14536: Fix or suppress warnings in apache/solr/common (Erick Erickson) + +* SOLR-14480: Fix or suppress warnings in solr/cloud/api (Erick Erickson) + +* SOLR-13492: Ensure explicit GCs are concurrent by adding '+ExplicitGCInvokesConcurrent'. + (Guna Sekhar Dora, Shawn Heisey, Munendra S N) + +* SOLR-14542: Fix or suppress warnings in solr/handler/dataimport (Erick Erickson) + +* SOLR-14544: Fix or suppress warnings in solr/client/solrj/io/eval (Erick Erickson) + +* SOLR-14543: Fix or suppress warnings in apache/solr/search (Erick Erickson) + +* SOLR-14545: Fix or suppress warnings in apache/solr/update (Erick Erickson) + +* SOLR-14548: Address warning: static member should be qualified by type name (Mike Drob) + +* SOLR-14547: Fix or suppress warnings in solr/client/solrj/io/stream (Erick Erickson) + +* SOLR-14455: Fix or suppress warnings in solr/test-framework (Erick Erickson) + +* SOLR-14559: Fix or suppress warnings in solr/core/src/java/org/apache/solr/util, + response, cloud, security, schema, api (Erick Erickson) + +* SOLR-14563: Fix or suppress warnings in solr/contrib (Erick Erickson) + +* SOLR-14565: Fix or suppress warnings in solrj/impl and solrj/io/graph (Erick Erickson) + +* SOLR-14564: Fix or suppress remaining warnings in solr/core (Erick Erickson) + +* SOLR-14567: Fix or suppress remaining warnings in solrj (Erick Erickson) + +* SOLR-14556: Fix or suppress warnings in solrj/cloud/autoscaling (Erick Erickson) + +* SOLR-14573: Fix or suppress warnings in solrj/src/test (Erick Erickson) + +* SOLR-14574: Fix or suppress warnings in solr/core/src/test (parts 1 and 2) (Erick Erickson) + +================== 8.5.2 ================== + +Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. + +Bug Fixes +--------------------- + +* SOLR-14411: Fix Admin UI collection/core drop-downs placeholder text. Completes work started in SOLR-14359 (janhoy) + +* SOLR-14471: Fix bug in shards.preference behavior, base replica selection strategy not applied to the last group of + equivalent replicas. (Michael Gibney via Tomás Fernández Löbbe) + ================== 8.5.1 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/bin/solr b/solr/bin/solr index 27e625f7ee52..1bbe3b388d87 100755 --- a/solr/bin/solr +++ b/solr/bin/solr @@ -103,10 +103,17 @@ fi if [ -n "$SOLR_JAVA_HOME" ]; then JAVA="$SOLR_JAVA_HOME/bin/java" + JSTACK="$SOLR_JAVA_HOME/bin/jstack" elif [ -n "$JAVA_HOME" ]; then - for java in "$JAVA_HOME"/bin/amd64/java "$JAVA_HOME"/bin/java; do - if [ -x "$java" ]; then - JAVA="$java" + for java in "$JAVA_HOME"/bin/amd64 "$JAVA_HOME"/bin; do + if [ -x "$java/java" ]; then + JAVA="$java/java" + if [ -x "$java/jstack" ]; then + JSTACK="$java/jstack" + else + echo >&2 "The currently defined JAVA_HOME ($JAVA_HOME) refers to a location" + echo >&2 "where java was found but jstack was not found. Continuing." + fi break fi done @@ -119,6 +126,7 @@ elif [ -n "$JAVA_HOME" ]; then fi else JAVA=java + JSTACK=jstack fi if [ -z "$SOLR_STOP_WAIT" ]; then @@ -821,6 +829,7 @@ function run_package() { # tries to gracefully stop Solr using the Jetty # stop command and if that fails, then uses kill -9 +# (will attempt to jstack before killing) function stop_solr() { DIR="$1" @@ -857,6 +866,10 @@ function stop_solr() { CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $SOLR_PID | sort -r | tr -d ' '` if [ "$CHECK_PID" != "" ]; then + if [ "$JSTACK" != "" ]; then + echo -e "Solr process $SOLR_PID is still running; jstacking it now." + $JSTACK $SOLR_PID + fi echo -e "Solr process $SOLR_PID is still running; forcefully killing it now." kill -9 $SOLR_PID echo "Killed process $SOLR_PID" @@ -2097,6 +2110,14 @@ else SECURITY_MANAGER_OPTS=() fi +# Enable ADMIN UI by default, and give the option for users to disable it +if [ "$SOLR_ADMIN_UI_DISABLED" == "true" ]; then + SOLR_ADMIN_UI="-DdisableAdminUI=true" + echo -e "ADMIN UI Disabled" +else + SOLR_ADMIN_UI="-DdisableAdminUI=false" +fi + JAVA_MEM_OPTS=() if [ -z "$SOLR_HEAP" ] && [ -n "$SOLR_JAVA_MEM" ]; then JAVA_MEM_OPTS=($SOLR_JAVA_MEM) @@ -2131,7 +2152,8 @@ function start_solr() { '-XX:+ParallelRefProcEnabled' \ '-XX:MaxGCPauseMillis=250' \ '-XX:+UseLargePages' \ - '-XX:+AlwaysPreTouch') + '-XX:+AlwaysPreTouch' \ + '-XX:+ExplicitGCInvokesConcurrent') else GC_TUNE=($GC_TUNE) fi @@ -2208,7 +2230,7 @@ function start_solr() { # users who don't care about useful error msgs can override in SOLR_OPTS with +OmitStackTraceInFastThrow "${SOLR_HOST_ARG[@]}" "-Duser.timezone=$SOLR_TIMEZONE" "-XX:-OmitStackTraceInFastThrow" \ "-Djetty.home=$SOLR_SERVER_DIR" "-Dsolr.solr.home=$SOLR_HOME" "-Dsolr.data.home=$SOLR_DATA_HOME" "-Dsolr.install.dir=$SOLR_TIP" \ - "-Dsolr.default.confdir=$DEFAULT_CONFDIR" "${LOG4J_CONFIG[@]}" "${SOLR_OPTS[@]}" "${SECURITY_MANAGER_OPTS[@]}") + "-Dsolr.default.confdir=$DEFAULT_CONFDIR" "${LOG4J_CONFIG[@]}" "${SOLR_OPTS[@]}" "${SECURITY_MANAGER_OPTS[@]}" "${SOLR_ADMIN_UI}") if [ "$SOLR_MODE" == "solrcloud" ]; then IN_CLOUD_MODE=" in SolrCloud mode" diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd index 4b3f991bdf36..4b4d8132c5bd 100755 --- a/solr/bin/solr.cmd +++ b/solr/bin/solr.cmd @@ -1042,13 +1042,10 @@ IF "%SCRIPT_CMD%"=="stop" ( del "%SOLR_TIP%"\bin\solr-!SOME_SOLR_PORT!.port timeout /T 5 REM Kill it if it is still running after the graceful shutdown - For /f "tokens=2,5" %%M in ('netstat -nao ^| find "TCP " ^| find ":0 " ^| find ":!SOME_SOLR_PORT! "') do ( - IF "%%N"=="%%k" ( - IF "%%M"=="%SOLR_JETTY_HOST%:!SOME_SOLR_PORT!" ( - @echo Forcefully killing process %%N - taskkill /f /PID %%N - ) - ) + IF EXIST "%JAVA_HOME%\bin\jstack.exe" ( + qprocess "%%k" >nul 2>nul && "%JAVA_HOME%\bin\jstack.exe" %%k && taskkill /f /PID %%k + ) else ( + qprocess "%%k" >nul 2>nul && taskkill /f /PID %%k ) ) ) @@ -1072,13 +1069,10 @@ IF "%SCRIPT_CMD%"=="stop" ( del "%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port timeout /T 5 REM Kill it if it is still running after the graceful shutdown - For /f "tokens=2,5" %%j in ('netstat -nao ^| find "TCP " ^| find ":0 " ^| find ":%SOLR_PORT% "') do ( - IF "%%N"=="%%k" ( - IF "%%j"=="%SOLR_JETTY_HOST%:%SOLR_PORT%" ( - @echo Forcefully killing process %%N - taskkill /f /PID %%N - ) - ) + IF EXIST "%JAVA_HOME%\bin\jstack.exe" ( + qprocess "%%N" >nul 2>nul && "%JAVA_HOME%\bin\jstack.exe" %%N && taskkill /f /PID %%N + ) else ( + qprocess "%%N" >nul 2>nul && taskkill /f /PID %%N ) ) ) @@ -1199,6 +1193,13 @@ IF "%SOLR_SECURITY_MANAGER_ENABLED%"=="true" ( -Dsolr.internal.network.permission=* ) +REM Enable ADMIN UI by default, and give the option for users to disable it +IF "%SOLR_ADMIN_UI_DISABLED%"=="true" ( + set DISABLE_ADMIN_UI="true" +) else ( + set DISABLE_ADMIN_UI="false" +) + IF NOT "%SOLR_HEAP%"=="" set SOLR_JAVA_MEM=-Xms%SOLR_HEAP% -Xmx%SOLR_HEAP% IF "%SOLR_JAVA_MEM%"=="" set SOLR_JAVA_MEM=-Xms512m -Xmx512m IF "%SOLR_JAVA_STACK_SIZE%"=="" set SOLR_JAVA_STACK_SIZE=-Xss256k @@ -1211,7 +1212,8 @@ IF "%GC_TUNE%"=="" ( -XX:+ParallelRefProcEnabled ^ -XX:MaxGCPauseMillis=250 ^ -XX:+UseLargePages ^ - -XX:+AlwaysPreTouch + -XX:+AlwaysPreTouch ^ + -XX:+ExplicitGCInvokesConcurrent ) if !JAVA_MAJOR_VERSION! GEQ 9 ( @@ -1288,6 +1290,7 @@ REM '-OmitStackTraceInFastThrow' ensures stack traces in errors, REM users who don't care about useful error msgs can override in SOLR_OPTS with +OmitStackTraceInFastThrow set "START_OPTS=%START_OPTS% -XX:-OmitStackTraceInFastThrow" set START_OPTS=%START_OPTS% !GC_TUNE! %GC_LOG_OPTS% +set START_OPTS=%START_OPTS% -DdisableAdminUI=%DISABLE_ADMIN_UI% IF NOT "!CLOUD_MODE_OPTS!"=="" set "START_OPTS=%START_OPTS% !CLOUD_MODE_OPTS!" IF NOT "!IP_ACL_OPTS!"=="" set "START_OPTS=%START_OPTS% !IP_ACL_OPTS!" IF NOT "%REMOTE_JMX_OPTS%"=="" set "START_OPTS=%START_OPTS% %REMOTE_JMX_OPTS%" diff --git a/solr/bin/solr.in.cmd b/solr/bin/solr.in.cmd index 45622d86c4aa..c8a6c8c2df98 100755 --- a/solr/bin/solr.in.cmd +++ b/solr/bin/solr.in.cmd @@ -35,6 +35,7 @@ REM set GC_LOG_OPTS=-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails -XX:+Prin REM Various GC settings have shown to work well for a number of common Solr workloads. REM See solr.cmd GC_TUNE for the default list. +REM set GC_TUNE=-XX:+ExplicitGCInvokesConcurrent REM set GC_TUNE=-XX:SurvivorRatio=4 REM set GC_TUNE=%GC_TUNE% -XX:TargetSurvivorRatio=90 REM set GC_TUNE=%GC_TUNE% -XX:MaxTenuringThreshold=8 @@ -203,3 +204,12 @@ REM Runtime properties are passed to the security policy file (server\etc\securi REM You can also tweak via standard JDK files such as ~\.java.policy, see https://s.apache.org/java8policy REM This is experimental! It may not work at all with Hadoop/HDFS features. REM set SOLR_SECURITY_MANAGER_ENABLED=true +REM This variable provides you with the option to disable the Admin UI. if you uncomment the variable below and +REM change the value to true. The option is configured as a system property as defined in SOLR_START_OPTS in the start +REM scripts. +REM set SOLR_ADMIN_UI_DISABLED=false + +REM Solr is by default allowed to read and write data from/to SOLR_HOME and a few other well defined locations +REM Sometimes it may be necessary to place a core or a backup on a different location or a different disk +REM This parameter lets you specify file system path(s) to explicitly allow. The special value of '*' will allow any path +REM SOLR_OPTS="%SOLR_OPTS% -Dsolr.allowPaths=D:\,E:\other\path" diff --git a/solr/bin/solr.in.sh b/solr/bin/solr.in.sh index b13d2084b341..3ecd7b942464 100644 --- a/solr/bin/solr.in.sh +++ b/solr/bin/solr.in.sh @@ -47,6 +47,7 @@ # These GC settings have shown to work well for a number of common Solr workloads #GC_TUNE=" \ +#-XX:+ExplicitGCInvokesConcurrent \ #-XX:SurvivorRatio=4 \ #-XX:TargetSurvivorRatio=90 \ #-XX:MaxTenuringThreshold=8 \ @@ -234,4 +235,12 @@ # You can also tweak via standard JDK files such as ~/.java.policy, see https://s.apache.org/java8policy # This is experimental! It may not work at all with Hadoop/HDFS features. #SOLR_SECURITY_MANAGER_ENABLED=true - +# This variable provides you with the option to disable the Admin UI. if you uncomment the variable below and +# change the value to true. The option is configured as a system property as defined in SOLR_START_OPTS in the start +# scripts. +# SOLR_ADMIN_UI_DISABLED=false + +# Solr is by default allowed to read and write data from/to SOLR_HOME and a few other well defined locations +# Sometimes it may be necessary to place a core or a backup on a different location or a different disk +# This parameter lets you specify file system path(s) to explicitly allow. The special value of '*' will allow any path +#SOLR_OPTS="$SOLR_OPTS -Dsolr.allowPaths=/mnt/bigdisk,/other/path" diff --git a/solr/build.gradle b/solr/build.gradle index 148c8e8f968c..9edc4d1aa2cf 100644 --- a/solr/build.gradle +++ b/solr/build.gradle @@ -15,6 +15,8 @@ * limitations under the License. */ +description = 'Parent project for Apache Solr' + subprojects { group "org.apache.solr" } \ No newline at end of file diff --git a/solr/build.xml b/solr/build.xml index b0ff51149ed7..c3cf6bc6a137 100644 --- a/solr/build.xml +++ b/solr/build.xml @@ -207,7 +207,7 @@ - + diff --git a/solr/contrib/analysis-extras/build.gradle b/solr/contrib/analysis-extras/build.gradle index 1a3a42330367..c39629a7bee4 100644 --- a/solr/contrib/analysis-extras/build.gradle +++ b/solr/contrib/analysis-extras/build.gradle @@ -18,6 +18,8 @@ apply plugin: 'java-library' +description = 'Additional analysis components' + dependencies { api project(':solr:core') diff --git a/solr/contrib/analysis-extras/src/java/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.java b/solr/contrib/analysis-extras/src/java/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.java index d69c367f1b74..2fdbd013c50f 100644 --- a/solr/contrib/analysis-extras/src/java/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.java +++ b/solr/contrib/analysis-extras/src/java/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.java @@ -215,7 +215,7 @@ protected final FieldNameSelector getSourceSelector() { @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { // high level (loose) check for which type of config we have. // @@ -260,7 +260,7 @@ public void init(NamedList args) { * "source" and "dest" init params do not exist. */ @SuppressWarnings("unchecked") - private void initSimpleRegexReplacement(NamedList args) { + private void initSimpleRegexReplacement(@SuppressWarnings({"rawtypes"})NamedList args) { // The syntactic sugar for the case where there is only one regex pattern for source and the same pattern // is used for the destination pattern... // @@ -316,7 +316,7 @@ private void initSimpleRegexReplacement(NamedList args) { * "source" and "dest" init params do exist. */ @SuppressWarnings("unchecked") - private void initSourceSelectorSyntax(NamedList args) { + private void initSourceSelectorSyntax(@SuppressWarnings({"rawtypes"})NamedList args) { // Full and complete syntax where source and dest are mandatory. // // source may be a single string or a selector. @@ -340,6 +340,7 @@ private void initSourceSelectorSyntax(NamedList args) { if (1 == sources.size()) { if (sources.get(0) instanceof NamedList) { // nested set of selector options + @SuppressWarnings({"rawtypes"}) NamedList selectorConfig = (NamedList) args.remove(SOURCE_PARAM); srcInclusions = parseSelectorParams(selectorConfig); @@ -355,6 +356,7 @@ private void initSourceSelectorSyntax(NamedList args) { throw new SolrException(SERVER_ERROR, "Init param '" + SOURCE_PARAM + "' child 'exclude' must be "); } + @SuppressWarnings({"rawtypes"}) NamedList exc = (NamedList) excObj; srcExclusions.add(parseSelectorParams(exc)); if (0 < exc.size()) { @@ -387,6 +389,7 @@ private void initSourceSelectorSyntax(NamedList args) { } if (d instanceof NamedList) { + @SuppressWarnings({"rawtypes"}) NamedList destList = (NamedList) d; Object patt = destList.remove(PATTERN_PARAM); @@ -489,7 +492,7 @@ public void processAdd(AddUpdateCommand cmd) throws IOException { if (matcher.find()) { resolvedDest = matcher.replaceAll(dest); } else { - log.debug("srcSelector.shouldMutate(\"{}\") returned true, " + + log.debug("srcSelector.shouldMutate('{}') returned true, " + "but replacement pattern did not match, field skipped.", fname); continue; } @@ -571,7 +574,7 @@ private void extractEntitiesFromSentence(String fullText, List terms, Li } /** macro */ - private static SelectorParams parseSelectorParams(NamedList args) { + private static SelectorParams parseSelectorParams(@SuppressWarnings({"rawtypes"})NamedList args) { return FieldMutatingUpdateProcessorFactory.parseSelectorParams(args); } } diff --git a/solr/contrib/analytics/build.gradle b/solr/contrib/analytics/build.gradle index 9f975d25bcfa..91e165ab2e47 100644 --- a/solr/contrib/analytics/build.gradle +++ b/solr/contrib/analytics/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Analytics Package' + dependencies { implementation project(':solr:core') testImplementation project(':solr:test-framework') diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/ExpressionFactory.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/ExpressionFactory.java index 9407d1dd354b..1bd4334976ab 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/ExpressionFactory.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/ExpressionFactory.java @@ -849,43 +849,43 @@ public static interface CreatorFunction { public static interface ConstantFunction { AnalyticsValueStream apply(String t) throws SolrException; } + static class VariableFunctionInfo { + public String[] params; + public String returnSignature; + } + static class WeightedMeanVariableFunction { + public static final String name = "wmean"; + public static final String params = "a,b"; + public static final String function = DivideFunction.name+"("+SumFunction.name+"("+MultFunction.name+"(a,b)),"+SumFunction.name+"("+FilterFunction.name+"(b,"+ExistsFunction.name+"(a))))"; + } + static class SumOfSquaresVariableFunction { + public static final String name = "sumofsquares"; + public static final String params = "a"; + public static final String function = SumFunction.name+"("+PowerFunction.name+"(a,2))"; + } + static class SquareRootVariableFunction { + public static final String name = "sqrt"; + public static final String params = "a"; + public static final String function = PowerFunction.name+"(a,0.5)"; + } + static class VarianceVariableFunction { + public static final String name = "variance"; + public static final String params = "a"; + public static final String function = SubtractFunction.name+"("+MeanFunction.name+"("+PowerFunction.name+"(a,2)),"+PowerFunction.name+"("+MeanFunction.name+"(a),2))"; + } + static class SandardDeviationVariableFunction { + public static final String name = "stddev"; + public static final String params = "a"; + public static final String function = SquareRootVariableFunction.name+"("+VarianceVariableFunction.name+"(a))"; + } + static class CSVVariableFunction { + public static final String name = "csv"; + public static final String params = "a"+ExpressionFactory.variableLengthParamSuffix; + public static final String function = SeparatedConcatFunction.name+"(',',a)"; + } + static class CSVOutputVariableFunction { + public static final String name = "csv_output"; + public static final String params = "a"+ExpressionFactory.variableLengthParamSuffix; + public static final String function = "concat_sep(',',a"+ExpressionFactory.variableForEachSep+FillMissingFunction.name+"("+SeparatedConcatFunction.name+"(';',"+ExpressionFactory.variableForEachParam+"),''))"; + } } -class VariableFunctionInfo { - public String[] params; - public String returnSignature; -} -class WeightedMeanVariableFunction { - public static final String name = "wmean"; - public static final String params = "a,b"; - public static final String function = DivideFunction.name+"("+SumFunction.name+"("+MultFunction.name+"(a,b)),"+SumFunction.name+"("+FilterFunction.name+"(b,"+ExistsFunction.name+"(a))))"; -} -class SumOfSquaresVariableFunction { - public static final String name = "sumofsquares"; - public static final String params = "a"; - public static final String function = SumFunction.name+"("+PowerFunction.name+"(a,2))"; -} -class SquareRootVariableFunction { - public static final String name = "sqrt"; - public static final String params = "a"; - public static final String function = PowerFunction.name+"(a,0.5)"; -} -class VarianceVariableFunction { - public static final String name = "variance"; - public static final String params = "a"; - public static final String function = SubtractFunction.name+"("+MeanFunction.name+"("+PowerFunction.name+"(a,2)),"+PowerFunction.name+"("+MeanFunction.name+"(a),2))"; -} -class SandardDeviationVariableFunction { - public static final String name = "stddev"; - public static final String params = "a"; - public static final String function = SquareRootVariableFunction.name+"("+VarianceVariableFunction.name+"(a))"; -} -class CSVVariableFunction { - public static final String name = "csv"; - public static final String params = "a"+ExpressionFactory.variableLengthParamSuffix; - public static final String function = SeparatedConcatFunction.name+"(',',a)"; -} -class CSVOutputVariableFunction { - public static final String name = "csv_output"; - public static final String params = "a"+ExpressionFactory.variableLengthParamSuffix; - public static final String function = "concat_sep(',',a"+ExpressionFactory.variableForEachSep+FillMissingFunction.name+"("+SeparatedConcatFunction.name+"(';',"+ExpressionFactory.variableForEachParam+"),''))"; -} \ No newline at end of file diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/facet/PivotFacet.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/facet/PivotFacet.java index d06bba804596..d6ff05e19aa7 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/facet/PivotFacet.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/facet/PivotFacet.java @@ -72,43 +72,44 @@ public NamedList createOldResponse() { public Iterable> createResponse() { return pivotHead.createResponse(); } -} -/** - * Typed Pivot class that stores the overall Pivot data and head of the Pivot node chain. - * - * This class exists so that the {@link PivotFacet} class doesn't have to be typed ( {@code } ). - */ -class PivotHead implements StreamingFacet { - private final PivotNode topPivot; - private final Map pivotValues; - - public PivotHead(PivotNode topPivot) { - this.topPivot = topPivot; - this.pivotValues = new HashMap<>(); - } - - public void setReductionCollectionManager(ReductionCollectionManager collectionManager) { - topPivot.setReductionCollectionManager(collectionManager); - } - - public void setExpressionCalculator(ExpressionCalculator expressionCalculator) { - topPivot.setExpressionCalculator(expressionCalculator); - } - @Override - public void addFacetValueCollectionTargets() { - topPivot.addFacetValueCollectionTargets(pivotValues); - } - - public void importShardData(DataInput input) throws IOException { - topPivot.importPivot(input, pivotValues); - } - - public void exportShardData(DataOutput output) throws IOException { - topPivot.exportPivot(output, pivotValues); - } - - public Iterable> createResponse() { - return topPivot.getPivotedResponse(pivotValues); + /** + * Typed Pivot class that stores the overall Pivot data and head of the Pivot node chain. + * + * This class exists so that the {@link PivotFacet} class doesn't have to be typed ( {@code } ). + */ + private static class PivotHead implements StreamingFacet { + private final PivotNode topPivot; + private final Map pivotValues; + + public PivotHead(PivotNode topPivot) { + this.topPivot = topPivot; + this.pivotValues = new HashMap<>(); + } + + public void setReductionCollectionManager(ReductionCollectionManager collectionManager) { + topPivot.setReductionCollectionManager(collectionManager); + } + + public void setExpressionCalculator(ExpressionCalculator expressionCalculator) { + topPivot.setExpressionCalculator(expressionCalculator); + } + + @Override + public void addFacetValueCollectionTargets() { + topPivot.addFacetValueCollectionTargets(pivotValues); + } + + public void importShardData(DataInput input) throws IOException { + topPivot.importPivot(input, pivotValues); + } + + public void exportShardData(DataOutput output) throws IOException { + topPivot.exportPivot(output, pivotValues); + } + + public Iterable> createResponse() { + return topPivot.getPivotedResponse(pivotValues); + } } -} \ No newline at end of file +} diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ComparisonFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ComparisonFunction.java index 1ecc930c44c9..4b7497d1b1d1 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ComparisonFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ComparisonFunction.java @@ -17,7 +17,6 @@ package org.apache.solr.analytics.function.mapping; import org.apache.solr.analytics.ExpressionFactory.CreatorFunction; -import org.apache.solr.analytics.function.mapping.ComparisonFunction.CompResultFunction; import org.apache.solr.analytics.util.function.BooleanConsumer; import org.apache.solr.analytics.value.AnalyticsValue; import org.apache.solr.analytics.value.AnalyticsValueStream; @@ -141,178 +140,183 @@ public static interface CompResultFunction { private static CompResultFunction reverse(CompResultFunction original) { return val -> original.apply(val*-1); } -} -/** - * A comparison function for two {@link DoubleValue}s. - */ -class CompareDoubleValueFunction extends AbstractBooleanValue { - private final DoubleValue exprA; - private final DoubleValue exprB; - private final CompResultFunction comp; - private final String name; - private final String funcStr; - private final ExpressionType funcType; - public CompareDoubleValueFunction(String name, DoubleValue exprA, DoubleValue exprB, CompResultFunction comp) { - this.name = name; - this.exprA = exprA; - this.exprB = exprB; - this.comp = comp; - this.funcStr = AnalyticsValueStream.createExpressionString(name,exprA,exprB); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,exprA,exprB); - } + /** + * A comparison function for two {@link DoubleValue}s. + */ + static class CompareDoubleValueFunction extends AbstractBooleanValue { + private final DoubleValue exprA; + private final DoubleValue exprB; + private final CompResultFunction comp; + private final String name; + private final String funcStr; + private final ExpressionType funcType; - private boolean exists = false; - @Override - public boolean getBoolean() { - double valueA = exprA.getDouble(); - double valueB = exprB.getDouble(); - exists = exprA.exists() && exprB.exists(); - return exists ? comp.apply(Double.compare(valueA,valueB)) : false; - } - @Override - public boolean exists() { - return exists; - } + public CompareDoubleValueFunction(String name, DoubleValue exprA, DoubleValue exprB, CompResultFunction comp) { + this.name = name; + this.exprA = exprA; + this.exprB = exprB; + this.comp = comp; + this.funcStr = AnalyticsValueStream.createExpressionString(name,exprA,exprB); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,exprA,exprB); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -/** - * A comparison function for a {@link DoubleValue} and a {@link DoubleValueStream}. - */ -class CompareDoubleStreamFunction extends AbstractBooleanValueStream { - private final DoubleValue baseExpr; - private final DoubleValueStream compExpr; - private final CompResultFunction comp; - private final String name; - private final String funcStr; - private final ExpressionType funcType; + private boolean exists = false; + @Override + public boolean getBoolean() { + double valueA = exprA.getDouble(); + double valueB = exprB.getDouble(); + exists = exprA.exists() && exprB.exists(); + return exists ? comp.apply(Double.compare(valueA,valueB)) : false; + } + @Override + public boolean exists() { + return exists; + } - public CompareDoubleStreamFunction(String name, DoubleValue baseExpr, DoubleValueStream compExpr, CompResultFunction comp) throws SolrException { - this.name = name; - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.comp = comp; - this.funcStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,baseExpr,compExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamBooleans(BooleanConsumer cons) { - double baseValue = baseExpr.getDouble(); - if (baseExpr.exists()) { - compExpr.streamDoubles(compValue -> cons.accept(comp.apply(Double.compare(baseValue,compValue)))); + /** + * A comparison function for a {@link DoubleValue} and a {@link DoubleValueStream}. + */ + static class CompareDoubleStreamFunction extends AbstractBooleanValueStream { + private final DoubleValue baseExpr; + private final DoubleValueStream compExpr; + private final CompResultFunction comp; + private final String name; + private final String funcStr; + private final ExpressionType funcType; + + public CompareDoubleStreamFunction(String name, DoubleValue baseExpr, DoubleValueStream compExpr, CompResultFunction comp) throws SolrException { + this.name = name; + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.comp = comp; + this.funcStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,baseExpr,compExpr); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -/** - * A comparison function for two {@link DateValue}s. - */ -class CompareDateValueFunction extends AbstractBooleanValue { - private final DateValue exprA; - private final DateValue exprB; - private final CompResultFunction comp; - private final String name; - private final String funcStr; - private final ExpressionType funcType; + @Override + public void streamBooleans(BooleanConsumer cons) { + double baseValue = baseExpr.getDouble(); + if (baseExpr.exists()) { + compExpr.streamDoubles(compValue -> cons.accept(comp.apply(Double.compare(baseValue,compValue)))); + } + } - public CompareDateValueFunction(String name, DateValue exprA, DateValue exprB, CompResultFunction comp) { - this.name = name; - this.exprA = exprA; - this.exprB = exprB; - this.comp = comp; - this.funcStr = AnalyticsValueStream.createExpressionString(name,exprA,exprB); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,exprA,exprB); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - private boolean exists = false; - @Override - public boolean getBoolean() { - long valueA = exprA.getLong(); - long valueB = exprB.getLong(); - exists = exprA.exists() && exprB.exists(); - return exists ? comp.apply(Long.compare(valueA,valueB)) : false; - } - @Override - public boolean exists() { - return exists; - } + /** + * A comparison function for two {@link DateValue}s. + */ + static class CompareDateValueFunction extends AbstractBooleanValue { + private final DateValue exprA; + private final DateValue exprB; + private final CompResultFunction comp; + private final String name; + private final String funcStr; + private final ExpressionType funcType; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -/** - * A comparison function for a {@link DateValue} and a {@link DateValueStream}. - */ -class CompareDateStreamFunction extends AbstractBooleanValueStream { - private final DateValue baseExpr; - private final DateValueStream compExpr; - private final CompResultFunction comp; - private final String name; - private final String funcStr; - private final ExpressionType funcType; + public CompareDateValueFunction(String name, DateValue exprA, DateValue exprB, CompResultFunction comp) { + this.name = name; + this.exprA = exprA; + this.exprB = exprB; + this.comp = comp; + this.funcStr = AnalyticsValueStream.createExpressionString(name,exprA,exprB); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,exprA,exprB); + } - public CompareDateStreamFunction(String name, DateValue baseExpr, DateValueStream compExpr, CompResultFunction comp) throws SolrException { - this.name = name; - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.comp = comp; - this.funcStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,baseExpr,compExpr); - } + private boolean exists = false; + @Override + public boolean getBoolean() { + long valueA = exprA.getLong(); + long valueB = exprB.getLong(); + exists = exprA.exists() && exprB.exists(); + return exists ? comp.apply(Long.compare(valueA,valueB)) : false; + } + @Override + public boolean exists() { + return exists; + } - @Override - public void streamBooleans(BooleanConsumer cons) { - long baseValue = baseExpr.getLong(); - if (baseExpr.exists()) { - compExpr.streamLongs(compValue -> cons.accept(comp.apply(Long.compare(baseValue,compValue)))); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + /** + * A comparison function for a {@link DateValue} and a {@link DateValueStream}. + */ + static class CompareDateStreamFunction extends AbstractBooleanValueStream { + private final DateValue baseExpr; + private final DateValueStream compExpr; + private final CompResultFunction comp; + private final String name; + private final String funcStr; + private final ExpressionType funcType; + + public CompareDateStreamFunction(String name, DateValue baseExpr, DateValueStream compExpr, CompResultFunction comp) throws SolrException { + this.name = name; + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.comp = comp; + this.funcStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,baseExpr,compExpr); + } + + @Override + public void streamBooleans(BooleanConsumer cons) { + long baseValue = baseExpr.getLong(); + if (baseExpr.exists()) { + compExpr.streamLongs(compValue -> cons.accept(comp.apply(Long.compare(baseValue,compValue)))); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DateMathFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DateMathFunction.java index 8a5756144abd..93fee3e3e914 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DateMathFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DateMathFunction.java @@ -62,97 +62,100 @@ public class DateMathFunction { throw new SolrException(ErrorCode.BAD_REQUEST,"The "+name+" function requires a date as the first parameter."); } }); -} -/** - * DateMath function that supports {@link DateValue}s. - */ -class DateMathValueFunction extends AbstractDateValue { - private final DateValue dateParam; - private final String mathParam; - DateMathParser parser = new DateMathParser(); - public static final String name = DateMathFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public DateMathValueFunction(DateValue dateParam, ConstantStringValue mathParam) throws SolrException { - this.dateParam = dateParam; - this.mathParam = "NOW" + mathParam.getString(); - this.exprStr = AnalyticsValueStream.createExpressionString(name,dateParam,mathParam); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,dateParam); - } + /** + * DateMath function that supports {@link DateValue}s. + */ + static class DateMathValueFunction extends AbstractDateValue { + private final DateValue dateParam; + private final String mathParam; + DateMathParser parser = new DateMathParser(); + public static final String name = DateMathFunction.name; + private final String exprStr; + private final ExpressionType funcType; - private boolean exists = false; + public DateMathValueFunction(DateValue dateParam, ConstantStringValue mathParam) throws SolrException { + this.dateParam = dateParam; + this.mathParam = "NOW" + mathParam.getString(); + this.exprStr = AnalyticsValueStream.createExpressionString(name,dateParam,mathParam); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,dateParam); + } - @Override - public long getLong() { - Date date = getDate(); - return (exists) ? date.getTime() : 0; - } - @Override - public Date getDate() { - Date date = dateParam.getDate(); - if (dateParam.exists()) { - exists = true; - return DateMathParser.parseMath(date,mathParam); - } else { - exists = false; - return null; + private boolean exists = false; + + @Override + public long getLong() { + Date date = getDate(); + return (exists) ? date.getTime() : 0; + } + @Override + public Date getDate() { + Date date = dateParam.getDate(); + if (dateParam.exists()) { + exists = true; + return DateMathParser.parseMath(date,mathParam); + } else { + exists = false; + return null; + } + } + @Override + public boolean exists() { + return exists; } - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -/** - * DateMath function that supports {@link DateValueStream}s. - */ -class DateMathStreamFunction extends AbstractDateValueStream { - private final DateValueStream dateParam; - private final String mathParam; - public static final String name = DateMathFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public DateMathStreamFunction(DateValueStream dateParam, ConstantStringValue mathParam) throws SolrException { - this.dateParam = dateParam; - this.mathParam = "NOW" + mathParam.getString(); - this.exprStr = AnalyticsValueStream.createExpressionString(name,dateParam,mathParam); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,dateParam); - } + /** + * DateMath function that supports {@link DateValueStream}s. + */ + static class DateMathStreamFunction extends AbstractDateValueStream { + private final DateValueStream dateParam; + private final String mathParam; + public static final String name = DateMathFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public void streamLongs(LongConsumer cons) { - streamDates(value -> cons.accept(value.getTime())); - } - @Override - public void streamDates(Consumer cons) { - dateParam.streamDates(value -> cons.accept(DateMathParser.parseMath(value, mathParam))); - } + public DateMathStreamFunction(DateValueStream dateParam, ConstantStringValue mathParam) throws SolrException { + this.dateParam = dateParam; + this.mathParam = "NOW" + mathParam.getString(); + this.exprStr = AnalyticsValueStream.createExpressionString(name,dateParam,mathParam); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,dateParam); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public void streamLongs(LongConsumer cons) { + streamDates(value -> cons.accept(value.getTime())); + } + @Override + public void streamDates(Consumer cons) { + dateParam.streamDates(value -> cons.accept(DateMathParser.parseMath(value, mathParam))); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DateParseFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DateParseFunction.java index 83c0ba7256d8..9462e4385521 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DateParseFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DateParseFunction.java @@ -61,146 +61,151 @@ else if (params[0] instanceof StringValueStream) { "Incorrect parameter: "+params[0].getExpressionStr()); } }); -} -class LongToDateParseFunction extends AbstractDateValue { - private final LongValue param; - public static final String name = DateParseFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public LongToDateParseFunction(LongValue param) throws SolrException { - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } - @Override - public long getLong() { - return param.getLong(); - } - @Override - public boolean exists() { - return param.exists(); - } + static class LongToDateParseFunction extends AbstractDateValue { + private final LongValue param; + public static final String name = DateParseFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongStreamToDateParseFunction extends AbstractDateValueStream { - private final LongValueStream param; - public static final String name = DateParseFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public LongStreamToDateParseFunction(LongValueStream param) throws SolrException { - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } + public LongToDateParseFunction(LongValue param) throws SolrException { + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - @Override - public void streamLongs(LongConsumer cons) { - param.streamLongs(cons); - } + @Override + public long getLong() { + return param.getLong(); + } + @Override + public boolean exists() { + return param.exists(); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class StringToDateParseFunction extends AbstractDateValue { - private final StringValue param; - public static final String name = DateParseFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StringToDateParseFunction(StringValue param) throws SolrException { - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - private boolean exists = false; - @Override - public long getLong() { - long value = 0; - try { - String paramStr = param.getString(); - exists = param.exists(); - if (exists) { - value = Instant.parse(paramStr).toEpochMilli(); - } - } catch (DateTimeParseException e) { - exists = false; + static class LongStreamToDateParseFunction extends AbstractDateValueStream { + private final LongValueStream param; + public static final String name = DateParseFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public LongStreamToDateParseFunction(LongValueStream param) throws SolrException { + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class StringStreamToDateParseFunction extends AbstractDateValueStream { - private final StringValueStream param; - public static final String name = DateParseFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StringStreamToDateParseFunction(StringValueStream param) throws SolrException { - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + @Override + public void streamLongs(LongConsumer cons) { + param.streamLongs(cons); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamLongs(LongConsumer cons) { - param.streamStrings(value -> { + static class StringToDateParseFunction extends AbstractDateValue { + private final StringValue param; + public static final String name = DateParseFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StringToDateParseFunction(StringValue param) throws SolrException { + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } + + private boolean exists = false; + @Override + public long getLong() { + long value = 0; try { - cons.accept(Instant.parse(value).toEpochMilli()); - } catch (DateTimeParseException e) {} - }); - } + String paramStr = param.getString(); + exists = param.exists(); + if (exists) { + value = Instant.parse(paramStr).toEpochMilli(); + } + } catch (DateTimeParseException e) { + exists = false; + } + return value; + } + @Override + public boolean exists() { + return exists; + } - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public ExpressionType getExpressionType() { - return funcType; + + static class StringStreamToDateParseFunction extends AbstractDateValueStream { + private final StringValueStream param; + public static final String name = DateParseFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StringStreamToDateParseFunction(StringValueStream param) throws SolrException { + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } + + @Override + public void streamLongs(LongConsumer cons) { + param.streamStrings(value -> { + try { + cons.accept(Instant.parse(value).toEpochMilli()); + } catch (DateTimeParseException e) {} + }); + } + + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } } + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DecimalNumericConversionFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DecimalNumericConversionFunction.java index c8881ee7b22b..fcc84006797a 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DecimalNumericConversionFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/DecimalNumericConversionFunction.java @@ -20,8 +20,6 @@ import java.util.function.LongConsumer; import org.apache.solr.analytics.ExpressionFactory.CreatorFunction; -import org.apache.solr.analytics.function.mapping.DecimalNumericConversionFunction.ConvertDoubleFunction; -import org.apache.solr.analytics.function.mapping.DecimalNumericConversionFunction.ConvertFloatFunction; import org.apache.solr.analytics.value.AnalyticsValueStream; import org.apache.solr.analytics.value.DoubleValue; import org.apache.solr.analytics.value.DoubleValueStream; @@ -103,7 +101,7 @@ public static class CeilingFunction { public static class RoundFunction { public static final String name = "round"; public static final CreatorFunction creatorFunction = (params -> { - return DecimalNumericConversionFunction.createDecimalConversionFunction(name, val -> (int)Math.round(val), val -> (long)Math.round(val), params); + return DecimalNumericConversionFunction.createDecimalConversionFunction(name, val -> Math.round(val), val -> Math.round(val), params); }); } @@ -116,156 +114,161 @@ public static interface ConvertFloatFunction { public static interface ConvertDoubleFunction { public long convert(double value); } -} -/** - * A function to convert a {@link FloatValue} to a {@link IntValue}. - */ -class ConvertFloatValueFunction extends AbstractIntValue { - private final String name; - private final FloatValue param; - private final ConvertFloatFunction conv; - private final String funcStr; - private final ExpressionType funcType; - public ConvertFloatValueFunction(String name, FloatValue param, ConvertFloatFunction conv) { - this.name = name; - this.param = param; - this.conv = conv; - this.funcStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,param); - } + /** + * A function to convert a {@link FloatValue} to a {@link IntValue}. + */ + static class ConvertFloatValueFunction extends AbstractIntValue { + private final String name; + private final FloatValue param; + private final ConvertFloatFunction conv; + private final String funcStr; + private final ExpressionType funcType; - @Override - public int getInt() { - return conv.convert(param.getFloat()); - } - @Override - public boolean exists() { - return param.exists(); - } + public ConvertFloatValueFunction(String name, FloatValue param, ConvertFloatFunction conv) { + this.name = name; + this.param = param; + this.conv = conv; + this.funcStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,param); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -/** - * A function to convert a {@link FloatValueStream} to a {@link IntValueStream}. - */ -class ConvertFloatStreamFunction extends AbstractIntValueStream { - private final String name; - private final FloatValueStream param; - private final ConvertFloatFunction conv; - private final String funcStr; - private final ExpressionType funcType; + @Override + public int getInt() { + return conv.convert(param.getFloat()); + } + @Override + public boolean exists() { + return param.exists(); + } - public ConvertFloatStreamFunction(String name, FloatValueStream param, ConvertFloatFunction conv) { - this.name = name; - this.param = param; - this.conv = conv; - this.funcStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,param); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamInts(IntConsumer cons) { - param.streamFloats( value -> cons.accept(conv.convert(value))); - } + /** + * A function to convert a {@link FloatValueStream} to a {@link IntValueStream}. + */ + static class ConvertFloatStreamFunction extends AbstractIntValueStream { + private final String name; + private final FloatValueStream param; + private final ConvertFloatFunction conv; + private final String funcStr; + private final ExpressionType funcType; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -/** - * A function to convert a {@link DoubleValue} to a {@link LongValue}. - */ -class ConvertDoubleValueFunction extends AbstractLongValue { - private final String name; - private final DoubleValue param; - private final ConvertDoubleFunction conv; - private final String funcStr; - private final ExpressionType funcType; + public ConvertFloatStreamFunction(String name, FloatValueStream param, ConvertFloatFunction conv) { + this.name = name; + this.param = param; + this.conv = conv; + this.funcStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,param); + } - public ConvertDoubleValueFunction(String name, DoubleValue param, ConvertDoubleFunction conv) { - this.name = name; - this.param = param; - this.conv = conv; - this.funcStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,param); - } + @Override + public void streamInts(IntConsumer cons) { + param.streamFloats( value -> cons.accept(conv.convert(value))); + } - @Override - public long getLong() { - return conv.convert(param.getDouble()); - } - @Override - public boolean exists() { - return param.exists(); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -/** - * A function to convert a {@link DoubleValueStream} to a {@link LongValueStream}. - */ -class ConvertDoubleStreamFunction extends AbstractLongValueStream { - private final String name; - private final DoubleValueStream param; - private final ConvertDoubleFunction conv; - private final String funcStr; - private final ExpressionType funcType; + /** + * A function to convert a {@link DoubleValue} to a {@link LongValue}. + */ + static class ConvertDoubleValueFunction extends AbstractLongValue { + private final String name; + private final DoubleValue param; + private final ConvertDoubleFunction conv; + private final String funcStr; + private final ExpressionType funcType; - public ConvertDoubleStreamFunction(String name, DoubleValueStream param, ConvertDoubleFunction conv) { - this.name = name; - this.param = param; - this.conv = conv; - this.funcStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,param); - } + public ConvertDoubleValueFunction(String name, DoubleValue param, ConvertDoubleFunction conv) { + this.name = name; + this.param = param; + this.conv = conv; + this.funcStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,param); + } - @Override - public void streamLongs(LongConsumer cons) { - param.streamDoubles( value -> cons.accept(conv.convert(value))); - } + @Override + public long getLong() { + return conv.convert(param.getDouble()); + } + @Override + public boolean exists() { + return param.exists(); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public ExpressionType getExpressionType() { - return funcType; + + /** + * A function to convert a {@link DoubleValueStream} to a {@link LongValueStream}. + */ + static class ConvertDoubleStreamFunction extends AbstractLongValueStream { + private final String name; + private final DoubleValueStream param; + private final ConvertDoubleFunction conv; + private final String funcStr; + private final ExpressionType funcType; + + public ConvertDoubleStreamFunction(String name, DoubleValueStream param, ConvertDoubleFunction conv) { + this.name = name; + this.param = param; + this.conv = conv; + this.funcStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,param); + } + + @Override + public void streamLongs(LongConsumer cons) { + param.streamDoubles( value -> cons.accept(conv.convert(value))); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/EqualFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/EqualFunction.java index 18a8bce206de..9d1bbf20da4e 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/EqualFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/EqualFunction.java @@ -73,166 +73,171 @@ public class EqualFunction { } throw new SolrException(ErrorCode.BAD_REQUEST,"The "+name+" function requires that at least 1 parameter be single-valued."); }); -} -/** - * An equal function for two {@link BooleanValue}s. - */ -class BooleanValueEqualFunction extends AbstractBooleanValue { - private final BooleanValue exprA; - private final BooleanValue exprB; - public static final String name = EqualFunction.name; - private final String funcStr; - private final ExpressionType funcType; - - public BooleanValueEqualFunction(BooleanValue exprA, BooleanValue exprB) { - this.exprA = exprA; - this.exprB = exprB; - this.funcStr = AnalyticsValueStream.createExpressionString(name,exprA,exprB); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,exprA,exprB); - } - private boolean exists = false; - @Override - public boolean getBoolean() { - boolean valueA = exprA.getBoolean(); - boolean valueB = exprB.getBoolean(); - exists = exprA.exists() && exprB.exists(); - return exists ? valueA == valueB : false; - } - @Override - public boolean exists() { - return exists; - } + /** + * An equal function for two {@link BooleanValue}s. + */ + static class BooleanValueEqualFunction extends AbstractBooleanValue { + private final BooleanValue exprA; + private final BooleanValue exprB; + public static final String name = EqualFunction.name; + private final String funcStr; + private final ExpressionType funcType; + + public BooleanValueEqualFunction(BooleanValue exprA, BooleanValue exprB) { + this.exprA = exprA; + this.exprB = exprB; + this.funcStr = AnalyticsValueStream.createExpressionString(name,exprA,exprB); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,exprA,exprB); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -/** - * An equal function for a {@link BooleanValue} and a {@link BooleanValueStream}. - */ -class BooleanStreamEqualFunction extends AbstractBooleanValueStream { - private final BooleanValue baseExpr; - private final BooleanValueStream compExpr; - public static final String name = EqualFunction.name; - private final String funcStr; - private final ExpressionType funcType; - - public BooleanStreamEqualFunction(BooleanValue baseExpr, BooleanValueStream compExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.funcStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,baseExpr,compExpr); - } + private boolean exists = false; + @Override + public boolean getBoolean() { + boolean valueA = exprA.getBoolean(); + boolean valueB = exprB.getBoolean(); + exists = exprA.exists() && exprB.exists(); + return exists ? valueA == valueB : false; + } + @Override + public boolean exists() { + return exists; + } - @Override - public void streamBooleans(BooleanConsumer cons) { - boolean baseValue = baseExpr.getBoolean(); - if (baseExpr.exists()) { - compExpr.streamBooleans(compValue -> cons.accept(baseValue == compValue)); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -/** - * A catch-all equal function for two {@link AnalyticsValue}s. - */ -class ValueEqualFunction extends AbstractBooleanValue { - private final AnalyticsValue exprA; - private final AnalyticsValue exprB; - public static final String name = EqualFunction.name; - private final String funcStr; - private final ExpressionType funcType; - - public ValueEqualFunction(AnalyticsValue exprA, AnalyticsValue exprB) { - this.exprA = exprA; - this.exprB = exprB; - this.funcStr = AnalyticsValueStream.createExpressionString(name,exprA,exprB); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,exprA,exprB); - } + /** + * An equal function for a {@link BooleanValue} and a {@link BooleanValueStream}. + */ + static class BooleanStreamEqualFunction extends AbstractBooleanValueStream { + private final BooleanValue baseExpr; + private final BooleanValueStream compExpr; + public static final String name = EqualFunction.name; + private final String funcStr; + private final ExpressionType funcType; + + public BooleanStreamEqualFunction(BooleanValue baseExpr, BooleanValueStream compExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.funcStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,baseExpr,compExpr); + } - private boolean exists = false; - @Override - public boolean getBoolean() { - Object valueA = exprA.getObject(); - Object valueB = exprB.getObject(); - exists = exprA.exists() && exprB.exists(); - return exists ? valueA.equals(valueB) : false; - } - @Override - public boolean exists() { - return exists; - } + @Override + public void streamBooleans(BooleanConsumer cons) { + boolean baseValue = baseExpr.getBoolean(); + if (baseExpr.exists()) { + compExpr.streamBooleans(compValue -> cons.accept(baseValue == compValue)); + } + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -/** - * A catch-all equal function for an {@link AnalyticsValue} and an {@link AnalyticsValueStream}. - */ -class StreamEqualFunction extends AbstractBooleanValueStream { - private final AnalyticsValue baseExpr; - private final AnalyticsValueStream compExpr; - public static final String name = EqualFunction.name; - private final String funcStr; - private final ExpressionType funcType; - - public StreamEqualFunction(AnalyticsValue baseExpr, AnalyticsValueStream compExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.funcStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,baseExpr,compExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamBooleans(BooleanConsumer cons) { - Object baseValue = baseExpr.getObject(); - if (baseExpr.exists()) { - compExpr.streamObjects(compValue -> cons.accept(baseValue.equals(compValue))); + /** + * A catch-all equal function for two {@link AnalyticsValue}s. + */ + static class ValueEqualFunction extends AbstractBooleanValue { + private final AnalyticsValue exprA; + private final AnalyticsValue exprB; + public static final String name = EqualFunction.name; + private final String funcStr; + private final ExpressionType funcType; + + public ValueEqualFunction(AnalyticsValue exprA, AnalyticsValue exprB) { + this.exprA = exprA; + this.exprB = exprB; + this.funcStr = AnalyticsValueStream.createExpressionString(name,exprA,exprB); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,exprA,exprB); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return funcStr; + private boolean exists = false; + @Override + public boolean getBoolean() { + Object valueA = exprA.getObject(); + Object valueB = exprB.getObject(); + exists = exprA.exists() && exprB.exists(); + return exists ? valueA.equals(valueB) : false; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public ExpressionType getExpressionType() { - return funcType; + + /** + * A catch-all equal function for an {@link AnalyticsValue} and an {@link AnalyticsValueStream}. + */ + static class StreamEqualFunction extends AbstractBooleanValueStream { + private final AnalyticsValue baseExpr; + private final AnalyticsValueStream compExpr; + public static final String name = EqualFunction.name; + private final String funcStr; + private final ExpressionType funcType; + + public StreamEqualFunction(AnalyticsValue baseExpr, AnalyticsValueStream compExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.funcStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(funcStr,baseExpr,compExpr); + } + + @Override + public void streamBooleans(BooleanConsumer cons) { + Object baseValue = baseExpr.getObject(); + if (baseExpr.exists()) { + compExpr.streamObjects(compValue -> cons.accept(baseValue.equals(compValue))); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return funcStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } } + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ExistsFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ExistsFunction.java index 0e2891751481..44f571db7420 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ExistsFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ExistsFunction.java @@ -42,82 +42,85 @@ public class ExistsFunction { } return new ValueStreamExistsFunction(param); }); -} -/** - * Exists function that supports {@link AnalyticsValueStream}s. - */ -class ValueStreamExistsFunction extends AbstractBooleanValue { - private final AnalyticsValueStream param; - public static final String name = ExistsFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public ValueStreamExistsFunction(AnalyticsValueStream param) throws SolrException { - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } + /** + * Exists function that supports {@link AnalyticsValueStream}s. + */ + static class ValueStreamExistsFunction extends AbstractBooleanValue { + private final AnalyticsValueStream param; + public static final String name = ExistsFunction.name; + private final String exprStr; + private final ExpressionType funcType; - private boolean exists; - @Override - public boolean getBoolean() { - exists = false; - param.streamObjects(val -> exists = true); - return exists; - } - @Override - public boolean exists() { - return true; - } + public ValueStreamExistsFunction(AnalyticsValueStream param) throws SolrException { + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -/** - * Exists function that supports {@link AnalyticsValue}s. - */ -class ValueExistsFunction extends AbstractBooleanValue { - private final AnalyticsValue param; - public static final String name = ExistsFunction.name; - private final String exprStr; - private final ExpressionType funcType; + private boolean exists; + @Override + public boolean getBoolean() { + exists = false; + param.streamObjects(val -> exists = true); + return exists; + } + @Override + public boolean exists() { + return true; + } - public ValueExistsFunction(AnalyticsValue param) throws SolrException { - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public boolean getBoolean() { - param.getObject(); - return param.exists(); - } - @Override - public boolean exists() { - return true; - } + /** + * Exists function that supports {@link AnalyticsValue}s. + */ + static class ValueExistsFunction extends AbstractBooleanValue { + private final AnalyticsValue param; + public static final String name = ExistsFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + public ValueExistsFunction(AnalyticsValue param) throws SolrException { + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } + + @Override + public boolean getBoolean() { + param.getObject(); + return param.exists(); + } + @Override + public boolean exists() { + return true; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } } + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/FillMissingFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/FillMissingFunction.java index 7bc38fd3fa6c..592fc1a825b4 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/FillMissingFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/FillMissingFunction.java @@ -128,716 +128,733 @@ public class FillMissingFunction { } return new StreamFillMissingFunction(baseExpr,fillExpr); }); -} -class StreamFillMissingFunction extends AbstractAnalyticsValueStream implements Consumer { - private final AnalyticsValueStream baseExpr; - private final AnalyticsValueStream fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StreamFillMissingFunction(AnalyticsValueStream baseExpr, AnalyticsValueStream fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); - } - boolean exists = false; - Consumer cons; + static class StreamFillMissingFunction extends AbstractAnalyticsValueStream implements Consumer { + private final AnalyticsValueStream baseExpr; + private final AnalyticsValueStream fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public void streamObjects(Consumer cons) { - exists = false; - this.cons = cons; - baseExpr.streamObjects(this); - if (!exists) { - fillExpr.streamObjects(cons); + public StreamFillMissingFunction(AnalyticsValueStream baseExpr, AnalyticsValueStream fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); } - } - @Override - public void accept(Object value) { - exists = true; - cons.accept(value); - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class ValueFillMissingFunction extends AbstractAnalyticsValue { - private final AnalyticsValue baseExpr; - private final AnalyticsValue fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public ValueFillMissingFunction(AnalyticsValue baseExpr, AnalyticsValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); - } + boolean exists = false; + Consumer cons; - boolean exists = false; + @Override + public void streamObjects(Consumer cons) { + exists = false; + this.cons = cons; + baseExpr.streamObjects(this); + if (!exists) { + fillExpr.streamObjects(cons); + } + } + @Override + public void accept(Object value) { + exists = true; + cons.accept(value); + } - @Override - public Object getObject() { - Object value = baseExpr.getObject(); - exists = true; - if (!baseExpr.exists()) { - value = fillExpr.getObject(); - exists = fillExpr.exists(); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } - return value; - } - @Override - public boolean exists() { - return exists; } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanStreamFillMissingFunction extends AbstractBooleanValueStream implements BooleanConsumer { - private final BooleanValueStream baseExpr; - private final BooleanValueStream fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public BooleanStreamFillMissingFunction(BooleanValueStream baseExpr, BooleanValueStream fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); - } + static class ValueFillMissingFunction extends AbstractAnalyticsValue { + private final AnalyticsValue baseExpr; + private final AnalyticsValue fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public ValueFillMissingFunction(AnalyticsValue baseExpr, AnalyticsValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + } - boolean exists = false; - BooleanConsumer cons; + boolean exists = false; - @Override - public void streamBooleans(BooleanConsumer cons) { - exists = false; - this.cons = cons; - baseExpr.streamBooleans(this); - if (!exists) { - fillExpr.streamBooleans(cons); + @Override + public Object getObject() { + Object value = baseExpr.getObject(); + exists = true; + if (!baseExpr.exists()) { + value = fillExpr.getObject(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public void accept(boolean value) { - exists = true; - cons.accept(value); - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanFillMissingFunction extends AbstractBooleanValue { - private final BooleanValue baseExpr; - private final BooleanValue fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public BooleanFillMissingFunction(BooleanValue baseExpr, BooleanValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class BooleanStreamFillMissingFunction extends AbstractBooleanValueStream implements BooleanConsumer { + private final BooleanValueStream baseExpr; + private final BooleanValueStream fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public boolean getBoolean() { - boolean value = baseExpr.getBoolean(); - exists = true; - if (!baseExpr.exists()) { - value = fillExpr.getBoolean(); - exists = fillExpr.exists(); + public BooleanStreamFillMissingFunction(BooleanValueStream baseExpr, BooleanValueStream fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class IntStreamFillMissingFunction extends AbstractIntValueStream implements IntConsumer { - private final IntValueStream baseExpr; - private final IntValueStream fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public IntStreamFillMissingFunction(IntValueStream baseExpr, IntValueStream fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); - } + boolean exists = false; + BooleanConsumer cons; - boolean exists = false; - IntConsumer cons; + @Override + public void streamBooleans(BooleanConsumer cons) { + exists = false; + this.cons = cons; + baseExpr.streamBooleans(this); + if (!exists) { + fillExpr.streamBooleans(cons); + } + } + @Override + public void accept(boolean value) { + exists = true; + cons.accept(value); + } - @Override - public void streamInts(IntConsumer cons) { - exists = false; - this.cons = cons; - baseExpr.streamInts(this); - if (!exists) { - fillExpr.streamInts(cons); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } - } - @Override - public void accept(int value) { - exists = true; - cons.accept(value); } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class IntFillMissingFunction extends AbstractIntValue { - private final IntValue baseExpr; - private final IntValue fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public IntFillMissingFunction(IntValue baseExpr, IntValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); - } + static class BooleanFillMissingFunction extends AbstractBooleanValue { + private final BooleanValue baseExpr; + private final BooleanValue fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public BooleanFillMissingFunction(BooleanValue baseExpr, BooleanValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + } - boolean exists = false; + boolean exists = false; - @Override - public int getInt() { - int value = baseExpr.getInt(); - exists = true; - if (!baseExpr.exists()) { - value = fillExpr.getInt(); - exists = fillExpr.exists(); + @Override + public boolean getBoolean() { + boolean value = baseExpr.getBoolean(); + exists = true; + if (!baseExpr.exists()) { + value = fillExpr.getBoolean(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongStreamFillMissingFunction extends AbstractLongValueStream implements LongConsumer { - private final LongValueStream baseExpr; - private final LongValueStream fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public LongStreamFillMissingFunction(LongValueStream baseExpr, LongValueStream fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; - LongConsumer cons; + static class IntStreamFillMissingFunction extends AbstractIntValueStream implements IntConsumer { + private final IntValueStream baseExpr; + private final IntValueStream fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public void streamLongs(LongConsumer cons) { - exists = false; - this.cons = cons; - baseExpr.streamLongs(this); - if (!exists) { - fillExpr.streamLongs(cons); + public IntStreamFillMissingFunction(IntValueStream baseExpr, IntValueStream fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); } - } - @Override - public void accept(long value) { - exists = true; - cons.accept(value); - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongFillMissingFunction extends AbstractLongValue { - private final LongValue baseExpr; - private final LongValue fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public LongFillMissingFunction(LongValue baseExpr, LongValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); - } + boolean exists = false; + IntConsumer cons; - boolean exists = false; + @Override + public void streamInts(IntConsumer cons) { + exists = false; + this.cons = cons; + baseExpr.streamInts(this); + if (!exists) { + fillExpr.streamInts(cons); + } + } + @Override + public void accept(int value) { + exists = true; + cons.accept(value); + } - @Override - public long getLong() { - long value = baseExpr.getLong(); - exists = true; - if (!baseExpr.exists()) { - value = fillExpr.getLong(); - exists = fillExpr.exists(); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } - return value; - } - @Override - public boolean exists() { - return exists; } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatStreamFillMissingFunction extends AbstractFloatValueStream implements FloatConsumer { - private final FloatValueStream baseExpr; - private final FloatValueStream fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public FloatStreamFillMissingFunction(FloatValueStream baseExpr, FloatValueStream fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); - } + static class IntFillMissingFunction extends AbstractIntValue { + private final IntValue baseExpr; + private final IntValue fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public IntFillMissingFunction(IntValue baseExpr, IntValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + } - boolean exists = false; - FloatConsumer cons; + boolean exists = false; - @Override - public void streamFloats(FloatConsumer cons) { - exists = false; - this.cons = cons; - baseExpr.streamFloats(this); - if (!exists) { - fillExpr.streamFloats(cons); + @Override + public int getInt() { + int value = baseExpr.getInt(); + exists = true; + if (!baseExpr.exists()) { + value = fillExpr.getInt(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public void accept(float value) { - exists = true; - cons.accept(value); - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatFillMissingFunction extends AbstractFloatValue { - private final FloatValue baseExpr; - private final FloatValue fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public FloatFillMissingFunction(FloatValue baseExpr, FloatValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class LongStreamFillMissingFunction extends AbstractLongValueStream implements LongConsumer { + private final LongValueStream baseExpr; + private final LongValueStream fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public float getFloat() { - float value = baseExpr.getFloat(); - exists = true; - if (!baseExpr.exists()) { - value = fillExpr.getFloat(); - exists = fillExpr.exists(); + public LongStreamFillMissingFunction(LongValueStream baseExpr, LongValueStream fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DoubleStreamFillMissingFunction extends AbstractDoubleValueStream implements DoubleConsumer { - private final DoubleValueStream baseExpr; - private final DoubleValueStream fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DoubleStreamFillMissingFunction(DoubleValueStream baseExpr, DoubleValueStream fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); - } + boolean exists = false; + LongConsumer cons; - boolean exists = false; - DoubleConsumer cons; + @Override + public void streamLongs(LongConsumer cons) { + exists = false; + this.cons = cons; + baseExpr.streamLongs(this); + if (!exists) { + fillExpr.streamLongs(cons); + } + } + @Override + public void accept(long value) { + exists = true; + cons.accept(value); + } - @Override - public void streamDoubles(DoubleConsumer cons) { - exists = false; - this.cons = cons; - baseExpr.streamDoubles(this); - if (!exists) { - fillExpr.streamDoubles(cons); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } - } - @Override - public void accept(double value) { - exists = true; - cons.accept(value); } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DoubleFillMissingFunction extends AbstractDoubleValue { - private final DoubleValue baseExpr; - private final DoubleValue fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DoubleFillMissingFunction(DoubleValue baseExpr, DoubleValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); - } + static class LongFillMissingFunction extends AbstractLongValue { + private final LongValue baseExpr; + private final LongValue fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public LongFillMissingFunction(LongValue baseExpr, LongValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + } - boolean exists = false; + boolean exists = false; - @Override - public double getDouble() { - double value = baseExpr.getDouble(); - exists = true; - if (!baseExpr.exists()) { - value = fillExpr.getDouble(); - exists = fillExpr.exists(); + @Override + public long getLong() { + long value = baseExpr.getLong(); + exists = true; + if (!baseExpr.exists()) { + value = fillExpr.getLong(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DateStreamFillMissingFunction extends AbstractDateValueStream implements LongConsumer { - private final DateValueStream baseExpr; - private final DateValueStream fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DateStreamFillMissingFunction(DateValueStream baseExpr, DateValueStream fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; - LongConsumer cons; + static class FloatStreamFillMissingFunction extends AbstractFloatValueStream implements FloatConsumer { + private final FloatValueStream baseExpr; + private final FloatValueStream fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public void streamLongs(LongConsumer cons) { - exists = false; - this.cons = cons; - baseExpr.streamLongs(this); - if (!exists) { - fillExpr.streamLongs(cons); + public FloatStreamFillMissingFunction(FloatValueStream baseExpr, FloatValueStream fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); } - } - @Override - public void accept(long value) { - exists = true; - cons.accept(value); - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DateFillMissingFunction extends AbstractDateValue { - private final DateValue baseExpr; - private final DateValue fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DateFillMissingFunction(DateValue baseExpr, DateValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); - } + boolean exists = false; + FloatConsumer cons; - boolean exists = false; + @Override + public void streamFloats(FloatConsumer cons) { + exists = false; + this.cons = cons; + baseExpr.streamFloats(this); + if (!exists) { + fillExpr.streamFloats(cons); + } + } + @Override + public void accept(float value) { + exists = true; + cons.accept(value); + } - @Override - public long getLong() { - long value = baseExpr.getLong(); - exists = true; - if (!baseExpr.exists()) { - value = fillExpr.getLong(); - exists = fillExpr.exists(); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } - return value; - } - @Override - public boolean exists() { - return exists; } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class StringStreamFillMissingFunction extends AbstractStringValueStream implements Consumer { - private final StringValueStream baseExpr; - private final StringValueStream fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StringStreamFillMissingFunction(StringValueStream baseExpr, StringValueStream fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); - } + static class FloatFillMissingFunction extends AbstractFloatValue { + private final FloatValue baseExpr; + private final FloatValue fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; - boolean exists = false; - Consumer cons; + public FloatFillMissingFunction(FloatValue baseExpr, FloatValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + } - @Override - public void streamStrings(Consumer cons) { - exists = false; - this.cons = cons; - baseExpr.streamStrings(this); - if (!exists) { - fillExpr.streamStrings(cons); + boolean exists = false; + + @Override + public float getFloat() { + float value = baseExpr.getFloat(); + exists = true; + if (!baseExpr.exists()) { + value = fillExpr.getFloat(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public void accept(String value) { - exists = true; - cons.accept(value); - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class StringFillMissingFunction extends AbstractStringValue { - private final StringValue baseExpr; - private final StringValue fillExpr; - public static final String name = FillMissingFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StringFillMissingFunction(StringValue baseExpr, StringValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + + static class DoubleStreamFillMissingFunction extends AbstractDoubleValueStream implements DoubleConsumer { + private final DoubleValueStream baseExpr; + private final DoubleValueStream fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DoubleStreamFillMissingFunction(DoubleValueStream baseExpr, DoubleValueStream fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + } + + boolean exists = false; + DoubleConsumer cons; + + @Override + public void streamDoubles(DoubleConsumer cons) { + exists = false; + this.cons = cons; + baseExpr.streamDoubles(this); + if (!exists) { + fillExpr.streamDoubles(cons); + } + } + @Override + public void accept(double value) { + exists = true; + cons.accept(value); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class DoubleFillMissingFunction extends AbstractDoubleValue { + private final DoubleValue baseExpr; + private final DoubleValue fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public String getString() { - String value = baseExpr.getString(); - exists = true; - if (!baseExpr.exists()) { - value = fillExpr.getString(); - exists = fillExpr.exists(); + public DoubleFillMissingFunction(DoubleValue baseExpr, DoubleValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + } + + boolean exists = false; + + @Override + public double getDouble() { + double value = baseExpr.getDouble(); + exists = true; + if (!baseExpr.exists()) { + value = fillExpr.getDouble(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } - return value; } - @Override - public boolean exists() { - return exists; + + static class DateStreamFillMissingFunction extends AbstractDateValueStream implements LongConsumer { + private final DateValueStream baseExpr; + private final DateValueStream fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DateStreamFillMissingFunction(DateValueStream baseExpr, DateValueStream fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + } + + boolean exists = false; + LongConsumer cons; + + @Override + public void streamLongs(LongConsumer cons) { + exists = false; + this.cons = cons; + baseExpr.streamLongs(this); + if (!exists) { + fillExpr.streamLongs(cons); + } + } + @Override + public void accept(long value) { + exists = true; + cons.accept(value); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getName() { - return name; + static class DateFillMissingFunction extends AbstractDateValue { + private final DateValue baseExpr; + private final DateValue fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DateFillMissingFunction(DateValue baseExpr, DateValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + } + + boolean exists = false; + + @Override + public long getLong() { + long value = baseExpr.getLong(); + exists = true; + if (!baseExpr.exists()) { + value = fillExpr.getLong(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getExpressionStr() { - return exprStr; + + static class StringStreamFillMissingFunction extends AbstractStringValueStream implements Consumer { + private final StringValueStream baseExpr; + private final StringValueStream fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StringStreamFillMissingFunction(StringValueStream baseExpr, StringValueStream fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + } + + boolean exists = false; + Consumer cons; + + @Override + public void streamStrings(Consumer cons) { + exists = false; + this.cons = cons; + baseExpr.streamStrings(this); + if (!exists) { + fillExpr.streamStrings(cons); + } + } + @Override + public void accept(String value) { + exists = true; + cons.accept(value); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public ExpressionType getExpressionType() { - return funcType; + + static class StringFillMissingFunction extends AbstractStringValue { + private final StringValue baseExpr; + private final StringValue fillExpr; + public static final String name = FillMissingFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StringFillMissingFunction(StringValue baseExpr, StringValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,fillExpr); + } + + boolean exists = false; + + @Override + public String getString() { + String value = baseExpr.getString(); + exists = true; + if (!baseExpr.exists()) { + value = fillExpr.getString(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/FilterFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/FilterFunction.java index 6dac746bf7a9..f266e796cce6 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/FilterFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/FilterFunction.java @@ -128,596 +128,613 @@ public class FilterFunction { } return new StreamFilterFunction(baseExpr,filterExpr); }); -} -class StreamFilterFunction extends AbstractAnalyticsValueStream { - private final AnalyticsValueStream baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StreamFilterFunction(AnalyticsValueStream baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); - } - @Override - public void streamObjects(Consumer cons) { - if (filterExpr.getBoolean() && filterExpr.exists()) { - baseExpr.streamObjects(cons); + static class StreamFilterFunction extends AbstractAnalyticsValueStream { + private final AnalyticsValueStream baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StreamFilterFunction(AnalyticsValueStream baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class ValueFilterFunction extends AbstractAnalyticsValue { - private final AnalyticsValue baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public ValueFilterFunction(AnalyticsValue baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + @Override + public void streamObjects(Consumer cons) { + if (filterExpr.getBoolean() && filterExpr.exists()) { + baseExpr.streamObjects(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class ValueFilterFunction extends AbstractAnalyticsValue { + private final AnalyticsValue baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public Object getObject() { - Object value = baseExpr.getObject(); - exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + public ValueFilterFunction(AnalyticsValue baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanStreamFilterFunction extends AbstractBooleanValueStream { - private final BooleanValueStream baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public BooleanStreamFilterFunction(BooleanValueStream baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); - } + boolean exists = false; - @Override - public void streamBooleans(BooleanConsumer cons) { - if (filterExpr.getBoolean() && filterExpr.exists()) { - baseExpr.streamBooleans(cons); + @Override + public Object getObject() { + Object value = baseExpr.getObject(); + exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanFilterFunction extends AbstractBooleanValue { - private final BooleanValue baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public BooleanFilterFunction(BooleanValue baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class BooleanStreamFilterFunction extends AbstractBooleanValueStream { + private final BooleanValueStream baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public boolean getBoolean() { - boolean value = baseExpr.getBoolean(); - exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + public BooleanStreamFilterFunction(BooleanValueStream baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class IntStreamFilterFunction extends AbstractIntValueStream { - private final IntValueStream baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public IntStreamFilterFunction(IntValueStream baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); - } + @Override + public void streamBooleans(BooleanConsumer cons) { + if (filterExpr.getBoolean() && filterExpr.exists()) { + baseExpr.streamBooleans(cons); + } + } - @Override - public void streamInts(IntConsumer cons) { - if (filterExpr.getBoolean() && filterExpr.exists()) { - baseExpr.streamInts(cons); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class IntFilterFunction extends AbstractIntValue { - private final IntValue baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public IntFilterFunction(IntValue baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); - } + static class BooleanFilterFunction extends AbstractBooleanValue { + private final BooleanValue baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; - boolean exists = false; + public BooleanFilterFunction(BooleanValue baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } - @Override - public int getInt() { - int value = baseExpr.getInt(); - exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + boolean exists = false; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongStreamFilterFunction extends AbstractLongValueStream { - private final LongValueStream baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public LongStreamFilterFunction(LongValueStream baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); - } + @Override + public boolean getBoolean() { + boolean value = baseExpr.getBoolean(); + exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } - @Override - public void streamLongs(LongConsumer cons) { - if (filterExpr.getBoolean() && filterExpr.exists()) { - baseExpr.streamLongs(cons); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongFilterFunction extends AbstractLongValue { - private final LongValue baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public LongFilterFunction(LongValue baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); - } + static class IntStreamFilterFunction extends AbstractIntValueStream { + private final IntValueStream baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; - boolean exists = false; + public IntStreamFilterFunction(IntValueStream baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } - @Override - public long getLong() { - long value = baseExpr.getLong(); - exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + @Override + public void streamInts(IntConsumer cons) { + if (filterExpr.getBoolean() && filterExpr.exists()) { + baseExpr.streamInts(cons); + } + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatStreamFilterFunction extends AbstractFloatValueStream { - private final FloatValueStream baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public FloatStreamFilterFunction(FloatValueStream baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamFloats(FloatConsumer cons) { - if (filterExpr.getBoolean() && filterExpr.exists()) { - baseExpr.streamFloats(cons); + static class IntFilterFunction extends AbstractIntValue { + private final IntValue baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public IntFilterFunction(IntValue baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatFilterFunction extends AbstractFloatValue { - private final FloatValue baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public FloatFilterFunction(FloatValue baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); - } + boolean exists = false; - boolean exists = false; + @Override + public int getInt() { + int value = baseExpr.getInt(); + exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } - @Override - public float getFloat() { - float value = baseExpr.getFloat(); - exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); - return value; - } - @Override - public boolean exists() { - return exists; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DoubleStreamFilterFunction extends AbstractDoubleValueStream { - private final DoubleValueStream baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DoubleStreamFilterFunction(DoubleValueStream baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); - } + static class LongStreamFilterFunction extends AbstractLongValueStream { + private final LongValueStream baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public void streamDoubles(DoubleConsumer cons) { - if (filterExpr.getBoolean() && filterExpr.exists()) { - baseExpr.streamDoubles(cons); + public LongStreamFilterFunction(LongValueStream baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DoubleFilterFunction extends AbstractDoubleValue { - private final DoubleValue baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DoubleFilterFunction(DoubleValue baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + @Override + public void streamLongs(LongConsumer cons) { + if (filterExpr.getBoolean() && filterExpr.exists()) { + baseExpr.streamLongs(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class LongFilterFunction extends AbstractLongValue { + private final LongValue baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public double getDouble() { - double value = baseExpr.getDouble(); - exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + public LongFilterFunction(LongValue baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DateStreamFilterFunction extends AbstractDateValueStream { - private final DateValueStream baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DateStreamFilterFunction(DateValueStream baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); - } + boolean exists = false; - @Override - public void streamLongs(LongConsumer cons) { - if (filterExpr.getBoolean() && filterExpr.exists()) { - baseExpr.streamLongs(cons); + @Override + public long getLong() { + long value = baseExpr.getLong(); + exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DateFilterFunction extends AbstractDateValue { - private final DateValue baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DateFilterFunction(DateValue baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class FloatStreamFilterFunction extends AbstractFloatValueStream { + private final FloatValueStream baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public long getLong() { - long value = baseExpr.getLong(); - exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + public FloatStreamFilterFunction(FloatValueStream baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class StringStreamFilterFunction extends AbstractStringValueStream { - private final StringValueStream baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StringStreamFilterFunction(StringValueStream baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); - } + @Override + public void streamFloats(FloatConsumer cons) { + if (filterExpr.getBoolean() && filterExpr.exists()) { + baseExpr.streamFloats(cons); + } + } - @Override - public void streamStrings(Consumer cons) { - if (filterExpr.getBoolean() && filterExpr.exists()) { - baseExpr.streamStrings(cons); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + static class FloatFilterFunction extends AbstractFloatValue { + private final FloatValue baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public FloatFilterFunction(FloatValue baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } + + boolean exists = false; + + @Override + public float getFloat() { + float value = baseExpr.getFloat(); + exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class StringFilterFunction extends AbstractStringValue { - private final StringValue baseExpr; - private final BooleanValue filterExpr; - public static final String name = FilterFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StringFilterFunction(StringValue baseExpr, BooleanValue filterExpr) throws SolrException { - this.baseExpr = baseExpr; - this.filterExpr = filterExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + + static class DoubleStreamFilterFunction extends AbstractDoubleValueStream { + private final DoubleValueStream baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DoubleStreamFilterFunction(DoubleValueStream baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } + + @Override + public void streamDoubles(DoubleConsumer cons) { + if (filterExpr.getBoolean() && filterExpr.exists()) { + baseExpr.streamDoubles(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class DoubleFilterFunction extends AbstractDoubleValue { + private final DoubleValue baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public String getString() { - String value = baseExpr.getString(); - exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); - return value; + public DoubleFilterFunction(DoubleValue baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } + + boolean exists = false; + + @Override + public double getDouble() { + double value = baseExpr.getDouble(); + exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public boolean exists() { - return exists; + + static class DateStreamFilterFunction extends AbstractDateValueStream { + private final DateValueStream baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DateStreamFilterFunction(DateValueStream baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } + + @Override + public void streamLongs(LongConsumer cons) { + if (filterExpr.getBoolean() && filterExpr.exists()) { + baseExpr.streamLongs(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getName() { - return name; + static class DateFilterFunction extends AbstractDateValue { + private final DateValue baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DateFilterFunction(DateValue baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } + + boolean exists = false; + + @Override + public long getLong() { + long value = baseExpr.getLong(); + exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getExpressionStr() { - return exprStr; + + static class StringStreamFilterFunction extends AbstractStringValueStream { + private final StringValueStream baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StringStreamFilterFunction(StringValueStream baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } + + @Override + public void streamStrings(Consumer cons) { + if (filterExpr.getBoolean() && filterExpr.exists()) { + baseExpr.streamStrings(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public ExpressionType getExpressionType() { - return funcType; + + static class StringFilterFunction extends AbstractStringValue { + private final StringValue baseExpr; + private final BooleanValue filterExpr; + public static final String name = FilterFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StringFilterFunction(StringValue baseExpr, BooleanValue filterExpr) throws SolrException { + this.baseExpr = baseExpr; + this.filterExpr = filterExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,filterExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,filterExpr); + } + + boolean exists = false; + + @Override + public String getString() { + String value = baseExpr.getString(); + exists = baseExpr.exists() && filterExpr.getBoolean() && filterExpr.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/IfFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/IfFunction.java index ff28b9059871..a506e46cefd5 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/IfFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/IfFunction.java @@ -172,722 +172,738 @@ public String getExpressionStr() { public ExpressionType getExpressionType() { return funcType; } -} -class ValueIfFunction extends AbstractAnalyticsValue { - private final BooleanValue ifExpr; - private final AnalyticsValue thenExpr; - private final AnalyticsValue elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public ValueIfFunction(BooleanValue ifExpr, AnalyticsValue thenExpr, AnalyticsValue elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } - - private boolean exists = false; - @Override - public Object getObject() { - exists = false; - Object value = null; - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - value = thenExpr.getObject(); - exists = thenExpr.exists(); - } - else { - value = elseExpr.getObject(); - exists = elseExpr.exists(); - } + static class ValueIfFunction extends AbstractAnalyticsValue { + private final BooleanValue ifExpr; + private final AnalyticsValue thenExpr; + private final AnalyticsValue elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public ValueIfFunction(BooleanValue ifExpr, AnalyticsValue thenExpr, AnalyticsValue elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanStreamIfFunction extends AbstractBooleanValueStream { - private final BooleanValue ifExpr; - private final BooleanValueStream thenExpr; - private final BooleanValueStream elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; + private boolean exists = false; - public BooleanStreamIfFunction(BooleanValue ifExpr, BooleanValueStream thenExpr, BooleanValueStream elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } - - @Override - public void streamBooleans(BooleanConsumer cons) { - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - thenExpr.streamBooleans(cons); - } - else { - elseExpr.streamBooleans(cons); - } + @Override + public Object getObject() { + exists = false; + Object value = null; + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + value = thenExpr.getObject(); + exists = thenExpr.exists(); + } + else { + value = elseExpr.getObject(); + exists = elseExpr.exists(); + } + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + static class BooleanStreamIfFunction extends AbstractBooleanValueStream { + private final BooleanValue ifExpr; + private final BooleanValueStream thenExpr; + private final BooleanValueStream elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public BooleanStreamIfFunction(BooleanValue ifExpr, BooleanValueStream thenExpr, BooleanValueStream elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } + + @Override + public void streamBooleans(BooleanConsumer cons) { + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + thenExpr.streamBooleans(cons); + } + else { + elseExpr.streamBooleans(cons); + } + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class BooleanIfFunction extends AbstractBooleanValue { - private final BooleanValue ifExpr; - private final BooleanValue thenExpr; - private final BooleanValue elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public BooleanIfFunction(BooleanValue ifExpr, BooleanValue thenExpr, BooleanValue elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class BooleanIfFunction extends AbstractBooleanValue { + private final BooleanValue ifExpr; + private final BooleanValue thenExpr; + private final BooleanValue elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public BooleanIfFunction(BooleanValue ifExpr, BooleanValue thenExpr, BooleanValue elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - private boolean exists = false; + private boolean exists = false; - @Override - public boolean getBoolean() { - exists = false; - boolean value = false; - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - value = thenExpr.getBoolean(); - exists = thenExpr.exists(); - } - else { - value = elseExpr.getBoolean(); - exists = elseExpr.exists(); + @Override + public boolean getBoolean() { + exists = false; + boolean value = false; + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + value = thenExpr.getBoolean(); + exists = thenExpr.exists(); + } + else { + value = elseExpr.getBoolean(); + exists = elseExpr.exists(); + } } + return value; + } + @Override + public boolean exists() { + return exists; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class IntStreamIfFunction extends AbstractIntValueStream { - private final BooleanValue ifExpr; - private final IntValueStream thenExpr; - private final IntValueStream elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public IntStreamIfFunction(BooleanValue ifExpr, IntValueStream thenExpr, IntValueStream elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class IntStreamIfFunction extends AbstractIntValueStream { + private final BooleanValue ifExpr; + private final IntValueStream thenExpr; + private final IntValueStream elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public IntStreamIfFunction(BooleanValue ifExpr, IntValueStream thenExpr, IntValueStream elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - @Override - public void streamInts(IntConsumer cons) { - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - thenExpr.streamInts(cons); - } - else { - elseExpr.streamInts(cons); + @Override + public void streamInts(IntConsumer cons) { + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + thenExpr.streamInts(cons); + } + else { + elseExpr.streamInts(cons); + } } } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class IntIfFunction extends AbstractIntValue { - private final BooleanValue ifExpr; - private final IntValue thenExpr; - private final IntValue elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public IntIfFunction(BooleanValue ifExpr, IntValue thenExpr, IntValue elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class IntIfFunction extends AbstractIntValue { + private final BooleanValue ifExpr; + private final IntValue thenExpr; + private final IntValue elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public IntIfFunction(BooleanValue ifExpr, IntValue thenExpr, IntValue elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - private boolean exists = false; + private boolean exists = false; - @Override - public int getInt() { - exists = false; - int value = 0; - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - value = thenExpr.getInt(); - exists = thenExpr.exists(); - } - else { - value = elseExpr.getInt(); - exists = elseExpr.exists(); + @Override + public int getInt() { + exists = false; + int value = 0; + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + value = thenExpr.getInt(); + exists = thenExpr.exists(); + } + else { + value = elseExpr.getInt(); + exists = elseExpr.exists(); + } } + return value; + } + @Override + public boolean exists() { + return exists; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class LongStreamIfFunction extends AbstractLongValueStream { - private final BooleanValue ifExpr; - private final LongValueStream thenExpr; - private final LongValueStream elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public LongStreamIfFunction(BooleanValue ifExpr, LongValueStream thenExpr, LongValueStream elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class LongStreamIfFunction extends AbstractLongValueStream { + private final BooleanValue ifExpr; + private final LongValueStream thenExpr; + private final LongValueStream elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public LongStreamIfFunction(BooleanValue ifExpr, LongValueStream thenExpr, LongValueStream elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - @Override - public void streamLongs(LongConsumer cons) { - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - thenExpr.streamLongs(cons); - } - else { - elseExpr.streamLongs(cons); + @Override + public void streamLongs(LongConsumer cons) { + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + thenExpr.streamLongs(cons); + } + else { + elseExpr.streamLongs(cons); + } } } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class LongIfFunction extends AbstractLongValue { - private final BooleanValue ifExpr; - private final LongValue thenExpr; - private final LongValue elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public LongIfFunction(BooleanValue ifExpr, LongValue thenExpr, LongValue elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class LongIfFunction extends AbstractLongValue { + private final BooleanValue ifExpr; + private final LongValue thenExpr; + private final LongValue elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public LongIfFunction(BooleanValue ifExpr, LongValue thenExpr, LongValue elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - private boolean exists = false; + private boolean exists = false; - @Override - public long getLong() { - exists = false; - long value = 0; - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - value = thenExpr.getLong(); - exists = thenExpr.exists(); - } - else { - value = elseExpr.getLong(); - exists = elseExpr.exists(); + @Override + public long getLong() { + exists = false; + long value = 0; + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + value = thenExpr.getLong(); + exists = thenExpr.exists(); + } + else { + value = elseExpr.getLong(); + exists = elseExpr.exists(); + } } + return value; + } + @Override + public boolean exists() { + return exists; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class FloatStreamIfFunction extends AbstractFloatValueStream { - private final BooleanValue ifExpr; - private final FloatValueStream thenExpr; - private final FloatValueStream elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public FloatStreamIfFunction(BooleanValue ifExpr, FloatValueStream thenExpr, FloatValueStream elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class FloatStreamIfFunction extends AbstractFloatValueStream { + private final BooleanValue ifExpr; + private final FloatValueStream thenExpr; + private final FloatValueStream elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public FloatStreamIfFunction(BooleanValue ifExpr, FloatValueStream thenExpr, FloatValueStream elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - @Override - public void streamFloats(FloatConsumer cons) { - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - thenExpr.streamFloats(cons); - } - else { - elseExpr.streamFloats(cons); + @Override + public void streamFloats(FloatConsumer cons) { + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + thenExpr.streamFloats(cons); + } + else { + elseExpr.streamFloats(cons); + } } } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class FloatIfFunction extends AbstractFloatValue { - private final BooleanValue ifExpr; - private final FloatValue thenExpr; - private final FloatValue elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public FloatIfFunction(BooleanValue ifExpr, FloatValue thenExpr, FloatValue elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class FloatIfFunction extends AbstractFloatValue { + private final BooleanValue ifExpr; + private final FloatValue thenExpr; + private final FloatValue elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public FloatIfFunction(BooleanValue ifExpr, FloatValue thenExpr, FloatValue elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - private boolean exists = false; + private boolean exists = false; - @Override - public float getFloat() { - exists = false; - float value = 0; - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - value = thenExpr.getFloat(); - exists = thenExpr.exists(); - } - else { - value = elseExpr.getFloat(); - exists = elseExpr.exists(); + @Override + public float getFloat() { + exists = false; + float value = 0; + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + value = thenExpr.getFloat(); + exists = thenExpr.exists(); + } + else { + value = elseExpr.getFloat(); + exists = elseExpr.exists(); + } } + return value; + } + @Override + public boolean exists() { + return exists; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class DoubleStreamIfFunction extends AbstractDoubleValueStream { - private final BooleanValue ifExpr; - private final DoubleValueStream thenExpr; - private final DoubleValueStream elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public DoubleStreamIfFunction(BooleanValue ifExpr, DoubleValueStream thenExpr, DoubleValueStream elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class DoubleStreamIfFunction extends AbstractDoubleValueStream { + private final BooleanValue ifExpr; + private final DoubleValueStream thenExpr; + private final DoubleValueStream elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DoubleStreamIfFunction(BooleanValue ifExpr, DoubleValueStream thenExpr, DoubleValueStream elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - @Override - public void streamDoubles(DoubleConsumer cons) { - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - thenExpr.streamDoubles(cons); - } - else { - elseExpr.streamDoubles(cons); + @Override + public void streamDoubles(DoubleConsumer cons) { + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + thenExpr.streamDoubles(cons); + } + else { + elseExpr.streamDoubles(cons); + } } } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class DoubleIfFunction extends AbstractDoubleValue { - private final BooleanValue ifExpr; - private final DoubleValue thenExpr; - private final DoubleValue elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public DoubleIfFunction(BooleanValue ifExpr, DoubleValue thenExpr, DoubleValue elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class DoubleIfFunction extends AbstractDoubleValue { + private final BooleanValue ifExpr; + private final DoubleValue thenExpr; + private final DoubleValue elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DoubleIfFunction(BooleanValue ifExpr, DoubleValue thenExpr, DoubleValue elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - private boolean exists = false; + private boolean exists = false; - @Override - public double getDouble() { - exists = false; - double value = 0; - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - value = thenExpr.getDouble(); - exists = thenExpr.exists(); - } - else { - value = elseExpr.getDouble(); - exists = elseExpr.exists(); + @Override + public double getDouble() { + exists = false; + double value = 0; + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + value = thenExpr.getDouble(); + exists = thenExpr.exists(); + } + else { + value = elseExpr.getDouble(); + exists = elseExpr.exists(); + } } + return value; + } + @Override + public boolean exists() { + return exists; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class DateStreamIfFunction extends AbstractDateValueStream { - private final BooleanValue ifExpr; - private final DateValueStream thenExpr; - private final DateValueStream elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public DateStreamIfFunction(BooleanValue ifExpr, DateValueStream thenExpr, DateValueStream elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class DateStreamIfFunction extends AbstractDateValueStream { + private final BooleanValue ifExpr; + private final DateValueStream thenExpr; + private final DateValueStream elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DateStreamIfFunction(BooleanValue ifExpr, DateValueStream thenExpr, DateValueStream elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - @Override - public void streamLongs(LongConsumer cons) { - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - thenExpr.streamLongs(cons); - } - else { - elseExpr.streamLongs(cons); + @Override + public void streamLongs(LongConsumer cons) { + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + thenExpr.streamLongs(cons); + } + else { + elseExpr.streamLongs(cons); + } } } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class DateIfFunction extends AbstractDateValue { - private final BooleanValue ifExpr; - private final DateValue thenExpr; - private final DateValue elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public DateIfFunction(BooleanValue ifExpr, DateValue thenExpr, DateValue elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class DateIfFunction extends AbstractDateValue { + private final BooleanValue ifExpr; + private final DateValue thenExpr; + private final DateValue elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DateIfFunction(BooleanValue ifExpr, DateValue thenExpr, DateValue elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - private boolean exists = false; + private boolean exists = false; - @Override - public long getLong() { - exists = false; - long value = 0; - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - value = thenExpr.getLong(); - exists = thenExpr.exists(); - } - else { - value = elseExpr.getLong(); - exists = elseExpr.exists(); + @Override + public long getLong() { + exists = false; + long value = 0; + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + value = thenExpr.getLong(); + exists = thenExpr.exists(); + } + else { + value = elseExpr.getLong(); + exists = elseExpr.exists(); + } } + return value; + } + @Override + public boolean exists() { + return exists; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class StringStreamIfFunction extends AbstractStringValueStream { - private final BooleanValue ifExpr; - private final StringValueStream thenExpr; - private final StringValueStream elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public StringStreamIfFunction(BooleanValue ifExpr, StringValueStream thenExpr, StringValueStream elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class StringStreamIfFunction extends AbstractStringValueStream { + private final BooleanValue ifExpr; + private final StringValueStream thenExpr; + private final StringValueStream elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public void streamStrings(Consumer cons) { - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - thenExpr.streamStrings(cons); - } - else { - elseExpr.streamStrings(cons); + public StringStreamIfFunction(BooleanValue ifExpr, StringValueStream thenExpr, StringValueStream elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } + + @Override + public void streamStrings(Consumer cons) { + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + thenExpr.streamStrings(cons); + } + else { + elseExpr.streamStrings(cons); + } } } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class StringIfFunction extends AbstractStringValue { - private final BooleanValue ifExpr; - private final StringValue thenExpr; - private final StringValue elseExpr; - public static final String name = IfFunction.name; - private final String exprStr; - private final ExpressionType funcType; - public StringIfFunction(BooleanValue ifExpr, StringValue thenExpr, StringValue elseExpr) throws SolrException { - this.ifExpr = ifExpr; - this.thenExpr = thenExpr; - this.elseExpr = elseExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); - } + static class StringIfFunction extends AbstractStringValue { + private final BooleanValue ifExpr; + private final StringValue thenExpr; + private final StringValue elseExpr; + public static final String name = IfFunction.name; + private final String exprStr; + private final ExpressionType funcType; - private boolean exists = false; + public StringIfFunction(BooleanValue ifExpr, StringValue thenExpr, StringValue elseExpr) throws SolrException { + this.ifExpr = ifExpr; + this.thenExpr = thenExpr; + this.elseExpr = elseExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,ifExpr,thenExpr,elseExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,ifExpr,thenExpr,elseExpr); + } - @Override - public String getString() { - exists = false; - String value = null; - boolean ifValue = ifExpr.getBoolean(); - if (ifExpr.exists()) { - if (ifValue) { - value = thenExpr.getString(); - exists = thenExpr.exists(); - } - else { - value = elseExpr.getString(); - exists = elseExpr.exists(); + private boolean exists = false; + + @Override + public String getString() { + exists = false; + String value = null; + boolean ifValue = ifExpr.getBoolean(); + if (ifExpr.exists()) { + if (ifValue) { + value = thenExpr.getString(); + exists = thenExpr.exists(); + } + else { + value = elseExpr.getString(); + exists = elseExpr.exists(); + } } + return value; + } + @Override + public boolean exists() { + return exists; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/LambdaFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/LambdaFunction.java index fcaf332b8994..ff72becc6b6c 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/LambdaFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/LambdaFunction.java @@ -21,18 +21,6 @@ import java.util.function.IntConsumer; import java.util.function.LongConsumer; -import org.apache.solr.analytics.function.mapping.LambdaFunction.BoolInBoolOutLambda; -import org.apache.solr.analytics.function.mapping.LambdaFunction.DoubleInDoubleOutLambda; -import org.apache.solr.analytics.function.mapping.LambdaFunction.FloatInFloatOutLambda; -import org.apache.solr.analytics.function.mapping.LambdaFunction.IntInIntOutLambda; -import org.apache.solr.analytics.function.mapping.LambdaFunction.LongInLongOutLambda; -import org.apache.solr.analytics.function.mapping.LambdaFunction.StringInStringOutLambda; -import org.apache.solr.analytics.function.mapping.LambdaFunction.TwoBoolInBoolOutLambda; -import org.apache.solr.analytics.function.mapping.LambdaFunction.TwoDoubleInDoubleOutLambda; -import org.apache.solr.analytics.function.mapping.LambdaFunction.TwoFloatInFloatOutLambda; -import org.apache.solr.analytics.function.mapping.LambdaFunction.TwoIntInIntOutLambda; -import org.apache.solr.analytics.function.mapping.LambdaFunction.TwoLongInLongOutLambda; -import org.apache.solr.analytics.function.mapping.LambdaFunction.TwoStringInStringOutLambda; import org.apache.solr.analytics.util.function.BooleanConsumer; import org.apache.solr.analytics.util.function.FloatConsumer; import org.apache.solr.analytics.value.AnalyticsValueStream; @@ -958,2263 +946,2313 @@ public static interface TwoFloatInStringOutLambda { String apply(float a, flo public static interface TwoDoubleInStringOutLambda { String apply(double a, double b); } @FunctionalInterface public static interface TwoStringInStringOutLambda { String apply(String a, String b); } -} -class BooleanValueInBooleanValueOutFunction extends AbstractBooleanValue { - private final BooleanValue param; - private final BoolInBoolOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public BooleanValueInBooleanValueOutFunction(String name, BoolInBoolOutLambda lambda, BooleanValue param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } + static class BooleanValueInBooleanValueOutFunction extends AbstractBooleanValue { + private final BooleanValue param; + private final BoolInBoolOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public BooleanValueInBooleanValueOutFunction(String name, BoolInBoolOutLambda lambda, BooleanValue param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - private boolean exists = false; + private boolean exists = false; - @Override - public boolean getBoolean() { - boolean value = lambda.apply(param.getBoolean()); - exists = param.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + @Override + public boolean getBoolean() { + boolean value = lambda.apply(param.getBoolean()); + exists = param.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class BooleanStreamInBooleanStreamOutFunction extends AbstractBooleanValueStream { - private final BooleanValueStream param; - private final BoolInBoolOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public BooleanStreamInBooleanStreamOutFunction(String name, BoolInBoolOutLambda lambda, BooleanValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } + static class BooleanStreamInBooleanStreamOutFunction extends AbstractBooleanValueStream { + private final BooleanValueStream param; + private final BoolInBoolOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public void streamBooleans(BooleanConsumer cons) { - param.streamBooleans(value -> cons.accept(lambda.apply(value))); - } + public BooleanStreamInBooleanStreamOutFunction(String name, BoolInBoolOutLambda lambda, BooleanValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanStreamInBooleanValueOutFunction extends AbstractBooleanValue implements BooleanConsumer { - private final BooleanValueStream param; - private final TwoBoolInBoolOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - - public BooleanStreamInBooleanValueOutFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } - - private boolean exists = false; - private boolean value; - - @Override - public boolean getBoolean() { - exists = false; - param.streamBooleans(this); - return value; - } - @Override - public boolean exists() { - return exists; - } - public void accept(boolean paramValue) { - if (!exists) { - exists = true; - value = paramValue; - } else { - value = lambda.apply(value, paramValue); + @Override + public void streamBooleans(BooleanConsumer cons) { + param.streamBooleans(value -> cons.accept(lambda.apply(value))); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class TwoBooleanValueInBooleanValueOutFunction extends AbstractBooleanValue { - private final BooleanValue param1; - private final BooleanValue param2; - private final TwoBoolInBoolOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public TwoBooleanValueInBooleanValueOutFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValue param1, BooleanValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + static class BooleanStreamInBooleanValueOutFunction extends AbstractBooleanValue implements BooleanConsumer { + private final BooleanValueStream param; + private final TwoBoolInBoolOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - private boolean exists = false; + public BooleanStreamInBooleanValueOutFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - @Override - public boolean getBoolean() { - boolean value = lambda.apply(param1.getBoolean(), param2.getBoolean()); - exists = param1.exists() && param2.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + private boolean exists = false; + private boolean value; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanValueBooleanStreamInBooleanStreamOutFunction extends AbstractBooleanValueStream { - private final BooleanValue param1; - private final BooleanValueStream param2; - private final TwoBoolInBoolOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + @Override + public boolean getBoolean() { + exists = false; + param.streamBooleans(this); + return value; + } + @Override + public boolean exists() { + return exists; + } + public void accept(boolean paramValue) { + if (!exists) { + exists = true; + value = paramValue; + } else { + value = lambda.apply(value, paramValue); + } + } - public BooleanValueBooleanStreamInBooleanStreamOutFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValue param1, BooleanValueStream param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamBooleans(BooleanConsumer cons) { - boolean value1 = param1.getBoolean(); - if (param1.exists()) { - param2.streamBooleans(value2 -> cons.accept(lambda.apply(value1,value2))); + static class TwoBooleanValueInBooleanValueOutFunction extends AbstractBooleanValue { + private final BooleanValue param1; + private final BooleanValue param2; + private final TwoBoolInBoolOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public TwoBooleanValueInBooleanValueOutFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValue param1, BooleanValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanStreamBooleanValueInBooleanStreamOutFunction extends AbstractBooleanValueStream { - private final BooleanValueStream param1; - private final BooleanValue param2; - private final TwoBoolInBoolOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + private boolean exists = false; - public BooleanStreamBooleanValueInBooleanStreamOutFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValueStream param1, BooleanValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + @Override + public boolean getBoolean() { + boolean value = lambda.apply(param1.getBoolean(), param2.getBoolean()); + exists = param1.exists() && param2.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } - @Override - public void streamBooleans(BooleanConsumer cons) { - boolean value2 = param2.getBoolean(); - if (param2.exists()) { - param1.streamBooleans(value1 -> cons.accept(lambda.apply(value1,value2))); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -abstract class MultiBooleanValueInBooleanValueOutFunction extends AbstractBooleanValue { - protected final BooleanValue[] params; - protected final TwoBoolInBoolOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + static class BooleanValueBooleanStreamInBooleanStreamOutFunction extends AbstractBooleanValueStream { + private final BooleanValue param1; + private final BooleanValueStream param2; + private final TwoBoolInBoolOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public BooleanValueBooleanStreamInBooleanStreamOutFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValue param1, BooleanValueStream param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - public MultiBooleanValueInBooleanValueOutFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValue[] params) { - this.name = name; - this.lambda = lambda; - this.params = params; - this.exprStr = AnalyticsValueStream.createExpressionString(name,params); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); - } + @Override + public void streamBooleans(BooleanConsumer cons) { + boolean value1 = param1.getBoolean(); + if (param1.exists()) { + param2.streamBooleans(value2 -> cons.accept(lambda.apply(value1,value2))); + } + } - protected boolean exists = false; - protected boolean temp; - @Override - public boolean exists() { - return exists; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class MultiBooleanValueInBooleanValueOutRequireAllFunction extends MultiBooleanValueInBooleanValueOutFunction { + static class BooleanStreamBooleanValueInBooleanStreamOutFunction extends AbstractBooleanValueStream { + private final BooleanValueStream param1; + private final BooleanValue param2; + private final TwoBoolInBoolOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public BooleanStreamBooleanValueInBooleanStreamOutFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValueStream param1, BooleanValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - public MultiBooleanValueInBooleanValueOutRequireAllFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValue[] params) { - super(name, lambda, params); - } + @Override + public void streamBooleans(BooleanConsumer cons) { + boolean value2 = param2.getBoolean(); + if (param2.exists()) { + param1.streamBooleans(value1 -> cons.accept(lambda.apply(value1,value2))); + } + } - @Override - public boolean getBoolean() { - boolean value = params[0].getBoolean(); - exists = params[0].exists(); - for (int i = 1; i < params.length && exists; ++i) { - value = lambda.apply(value, params[i].getBoolean()); - exists = params[i].exists(); + @Override + public String getName() { + return name; } - return value; - } -} -class MultiBooleanValueInBooleanValueOutRequireOneFunction extends MultiBooleanValueInBooleanValueOutFunction { - - public MultiBooleanValueInBooleanValueOutRequireOneFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValue[] params) { - super(name, lambda, params); - } - - @Override - public boolean getBoolean() { - int i = -1; - boolean value = false; - exists = false; - while (++i < params.length) { - value = params[i].getBoolean(); - exists = params[i].exists(); - if (exists) { - break; - } + @Override + public String getExpressionStr() { + return exprStr; } - while (++i < params.length) { - temp = params[i].getBoolean(); - if (params[i].exists()) { - value = lambda.apply(value, temp); - } + @Override + public ExpressionType getExpressionType() { + return funcType; } - return value; - } -} -class IntValueInIntValueOutFunction extends AbstractIntValue { - private final IntValue param; - private final IntInIntOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - - public IntValueInIntValueOutFunction(String name, IntInIntOutLambda lambda, IntValue param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); } - private boolean exists = false; + abstract static class MultiBooleanValueInBooleanValueOutFunction extends AbstractBooleanValue { + protected final BooleanValue[] params; + protected final TwoBoolInBoolOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public int getInt() { - int value = lambda.apply(param.getInt()); - exists = param.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + public MultiBooleanValueInBooleanValueOutFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValue[] params) { + this.name = name; + this.lambda = lambda; + this.params = params; + this.exprStr = AnalyticsValueStream.createExpressionString(name,params); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class IntStreamInIntStreamOutFunction extends AbstractIntValueStream { - private final IntValueStream param; - private final IntInIntOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + protected boolean exists = false; + protected boolean temp; + @Override + public boolean exists() { + return exists; + } - public IntStreamInIntStreamOutFunction(String name, IntInIntOutLambda lambda, IntValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamInts(IntConsumer cons) { - param.streamInts(value -> cons.accept(lambda.apply(value))); - } + static class MultiBooleanValueInBooleanValueOutRequireAllFunction extends MultiBooleanValueInBooleanValueOutFunction { - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class IntStreamInIntValueOutFunction extends AbstractIntValue implements IntConsumer { - private final IntValueStream param; - private final TwoIntInIntOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - - public IntStreamInIntValueOutFunction(String name, TwoIntInIntOutLambda lambda, IntValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } - - private boolean exists = false; - private int value; - - @Override - public int getInt() { - exists = false; - param.streamInts(this); - return value; - } - @Override - public boolean exists() { - return exists; - } - public void accept(int paramValue) { - if (!exists) { - exists = true; - value = paramValue; - } else { - value = lambda.apply(value, paramValue); + public MultiBooleanValueInBooleanValueOutRequireAllFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValue[] params) { + super(name, lambda, params); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public boolean getBoolean() { + boolean value = params[0].getBoolean(); + exists = params[0].exists(); + for (int i = 1; i < params.length && exists; ++i) { + value = lambda.apply(value, params[i].getBoolean()); + exists = params[i].exists(); + } + return value; + } } -} -class TwoIntValueInIntValueOutFunction extends AbstractIntValue { - private final IntValue param1; - private final IntValue param2; - private final TwoIntInIntOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public TwoIntValueInIntValueOutFunction(String name, TwoIntInIntOutLambda lambda, IntValue param1, IntValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + static class MultiBooleanValueInBooleanValueOutRequireOneFunction extends MultiBooleanValueInBooleanValueOutFunction { - private boolean exists = false; + public MultiBooleanValueInBooleanValueOutRequireOneFunction(String name, TwoBoolInBoolOutLambda lambda, BooleanValue[] params) { + super(name, lambda, params); + } - @Override - public int getInt() { - int value = lambda.apply(param1.getInt(), param2.getInt()); - exists = param1.exists() && param2.exists(); - return value; - } - @Override - public boolean exists() { - return exists; + @Override + public boolean getBoolean() { + int i = -1; + boolean value = false; + exists = false; + while (++i < params.length) { + value = params[i].getBoolean(); + exists = params[i].exists(); + if (exists) { + break; + } + } + while (++i < params.length) { + temp = params[i].getBoolean(); + if (params[i].exists()) { + value = lambda.apply(value, temp); + } + } + return value; + } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class IntValueIntStreamInIntStreamOutFunction extends AbstractIntValueStream { - private final IntValue param1; - private final IntValueStream param2; - private final TwoIntInIntOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + static class IntValueInIntValueOutFunction extends AbstractIntValue { + private final IntValue param; + private final IntInIntOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - public IntValueIntStreamInIntStreamOutFunction(String name, TwoIntInIntOutLambda lambda, IntValue param1, IntValueStream param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + public IntValueInIntValueOutFunction(String name, IntInIntOutLambda lambda, IntValue param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } + + private boolean exists = false; - @Override - public void streamInts(IntConsumer cons) { - int value1 = param1.getInt(); - if (param1.exists()) { - param2.streamInts(value2 -> cons.accept(lambda.apply(value1,value2))); + @Override + public int getInt() { + int value = lambda.apply(param.getInt()); + exists = param.exists(); + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class IntStreamIntValueInIntStreamOutFunction extends AbstractIntValueStream { - private final IntValueStream param1; - private final IntValue param2; - private final TwoIntInIntOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public IntStreamIntValueInIntStreamOutFunction(String name, TwoIntInIntOutLambda lambda, IntValueStream param1, IntValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + static class IntStreamInIntStreamOutFunction extends AbstractIntValueStream { + private final IntValueStream param; + private final IntInIntOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public void streamInts(IntConsumer cons) { - int value2 = param2.getInt(); - if (param2.exists()) { - param1.streamInts(value1 -> cons.accept(lambda.apply(value1,value2))); + public IntStreamInIntStreamOutFunction(String name, IntInIntOutLambda lambda, IntValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -abstract class MultiIntValueInIntValueOutFunction extends AbstractIntValue { - protected final IntValue[] params; - protected final TwoIntInIntOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + @Override + public void streamInts(IntConsumer cons) { + param.streamInts(value -> cons.accept(lambda.apply(value))); + } - public MultiIntValueInIntValueOutFunction(String name, TwoIntInIntOutLambda lambda, IntValue[] params) { - this.name = name; - this.lambda = lambda; - this.params = params; - this.exprStr = AnalyticsValueStream.createExpressionString(name,params); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - protected boolean exists = false; - protected int temp; - @Override - public boolean exists() { - return exists; - } + static class IntStreamInIntValueOutFunction extends AbstractIntValue implements IntConsumer { + private final IntValueStream param; + private final TwoIntInIntOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class MultiIntValueInIntValueOutRequireAllFunction extends MultiIntValueInIntValueOutFunction { + public IntStreamInIntValueOutFunction(String name, TwoIntInIntOutLambda lambda, IntValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - public MultiIntValueInIntValueOutRequireAllFunction(String name, TwoIntInIntOutLambda lambda, IntValue[] params) { - super(name, lambda, params); - } + private boolean exists = false; + private int value; - @Override - public int getInt() { - int value = params[0].getInt(); - exists = params[0].exists(); - for (int i = 1; i < params.length && exists; ++i) { - value = lambda.apply(value, params[i].getInt()); - exists = params[i].exists(); + @Override + public int getInt() { + exists = false; + param.streamInts(this); + return value; } - return value; - } -} -class MultiIntValueInIntValueOutRequireOneFunction extends MultiIntValueInIntValueOutFunction { - - public MultiIntValueInIntValueOutRequireOneFunction(String name, TwoIntInIntOutLambda lambda, IntValue[] params) { - super(name, lambda, params); - } - - @Override - public int getInt() { - int i = -1; - int value = 0; - exists = false; - while (++i < params.length) { - value = params[i].getInt(); - exists = params[i].exists(); - if (exists) { - break; - } + @Override + public boolean exists() { + return exists; } - while (++i < params.length) { - temp = params[i].getInt(); - if (params[i].exists()) { - value = lambda.apply(value, temp); + public void accept(int paramValue) { + if (!exists) { + exists = true; + value = paramValue; + } else { + value = lambda.apply(value, paramValue); } } - return value; - } -} -class LongValueInLongValueOutFunction extends AbstractLongValue { - private final LongValue param; - private final LongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - - public LongValueInLongValueOutFunction(String name, LongInLongOutLambda lambda, LongValue param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } - - private boolean exists = false; - - @Override - public long getLong() { - long value = lambda.apply(param.getLong()); - exists = param.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class LongStreamInLongStreamOutFunction extends AbstractLongValueStream { - private final LongValueStream param; - private final LongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public LongStreamInLongStreamOutFunction(String name, LongInLongOutLambda lambda, LongValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } + static class TwoIntValueInIntValueOutFunction extends AbstractIntValue { + private final IntValue param1; + private final IntValue param2; + private final TwoIntInIntOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public TwoIntValueInIntValueOutFunction(String name, TwoIntInIntOutLambda lambda, IntValue param1, IntValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - @Override - public void streamLongs(LongConsumer cons) { - param.streamLongs(value -> cons.accept(lambda.apply(value))); - } + private boolean exists = false; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongStreamInLongValueOutFunction extends AbstractLongValue implements LongConsumer { - private final LongValueStream param; - private final TwoLongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - - public LongStreamInLongValueOutFunction(String name, TwoLongInLongOutLambda lambda, LongValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } - - private boolean exists = false; - private long value; - - @Override - public long getLong() { - exists = false; - param.streamLongs(this); - return value; - } - @Override - public boolean exists() { - return exists; - } - public void accept(long paramValue) { - if (!exists) { - exists = true; - value = paramValue; - } else { - value = lambda.apply(value, paramValue); + @Override + public int getInt() { + int value = lambda.apply(param1.getInt(), param2.getInt()); + exists = param1.exists() && param2.exists(); + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class TwoLongValueInLongValueOutFunction extends AbstractLongValue { - private final LongValue param1; - private final LongValue param2; - private final TwoLongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public TwoLongValueInLongValueOutFunction(String name, TwoLongInLongOutLambda lambda, LongValue param1, LongValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + static class IntValueIntStreamInIntStreamOutFunction extends AbstractIntValueStream { + private final IntValue param1; + private final IntValueStream param2; + private final TwoIntInIntOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public IntValueIntStreamInIntStreamOutFunction(String name, TwoIntInIntOutLambda lambda, IntValue param1, IntValueStream param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - private boolean exists = false; + @Override + public void streamInts(IntConsumer cons) { + int value1 = param1.getInt(); + if (param1.exists()) { + param2.streamInts(value2 -> cons.accept(lambda.apply(value1,value2))); + } + } - @Override - public long getLong() { - long value = lambda.apply(param1.getLong(), param2.getLong()); - exists = param1.exists() && param2.exists(); - return value; - } - @Override - public boolean exists() { - return exists; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongValueLongStreamInLongStreamOutFunction extends AbstractLongValueStream { - private final LongValue param1; - private final LongValueStream param2; - private final TwoLongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + static class IntStreamIntValueInIntStreamOutFunction extends AbstractIntValueStream { + private final IntValueStream param1; + private final IntValue param2; + private final TwoIntInIntOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public IntStreamIntValueInIntStreamOutFunction(String name, TwoIntInIntOutLambda lambda, IntValueStream param1, IntValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - public LongValueLongStreamInLongStreamOutFunction(String name, TwoLongInLongOutLambda lambda, LongValue param1, LongValueStream param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + @Override + public void streamInts(IntConsumer cons) { + int value2 = param2.getInt(); + if (param2.exists()) { + param1.streamInts(value1 -> cons.accept(lambda.apply(value1,value2))); + } + } - @Override - public void streamLongs(LongConsumer cons) { - long value1 = param1.getLong(); - if (param1.exists()) { - param2.streamLongs(value2 -> cons.accept(lambda.apply(value1,value2))); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongStreamLongValueInLongStreamOutFunction extends AbstractLongValueStream { - private final LongValueStream param1; - private final LongValue param2; - private final TwoLongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + abstract static class MultiIntValueInIntValueOutFunction extends AbstractIntValue { + protected final IntValue[] params; + protected final TwoIntInIntOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - public LongStreamLongValueInLongStreamOutFunction(String name, TwoLongInLongOutLambda lambda, LongValueStream param1, LongValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + public MultiIntValueInIntValueOutFunction(String name, TwoIntInIntOutLambda lambda, IntValue[] params) { + this.name = name; + this.lambda = lambda; + this.params = params; + this.exprStr = AnalyticsValueStream.createExpressionString(name,params); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); + } - @Override - public void streamLongs(LongConsumer cons) { - long value2 = param2.getLong(); - if (param2.exists()) { - param1.streamLongs(value1 -> cons.accept(lambda.apply(value1,value2))); + protected boolean exists = false; + protected int temp; + @Override + public boolean exists() { + return exists; } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -abstract class MultiLongValueInLongValueOutFunction extends AbstractLongValue { - protected final LongValue[] params; - protected final TwoLongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public MultiLongValueInLongValueOutFunction(String name, TwoLongInLongOutLambda lambda, LongValue[] params) { - this.name = name; - this.lambda = lambda; - this.params = params; - this.exprStr = AnalyticsValueStream.createExpressionString(name,params); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); - } + static class MultiIntValueInIntValueOutRequireAllFunction extends MultiIntValueInIntValueOutFunction { - protected boolean exists = false; - protected long temp; - @Override - public boolean exists() { - return exists; - } + public MultiIntValueInIntValueOutRequireAllFunction(String name, TwoIntInIntOutLambda lambda, IntValue[] params) { + super(name, lambda, params); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public int getInt() { + int value = params[0].getInt(); + exists = params[0].exists(); + for (int i = 1; i < params.length && exists; ++i) { + value = lambda.apply(value, params[i].getInt()); + exists = params[i].exists(); + } + return value; + } } -} -class MultiLongValueInLongValueOutRequireAllFunction extends MultiLongValueInLongValueOutFunction { - public MultiLongValueInLongValueOutRequireAllFunction(String name, TwoLongInLongOutLambda lambda, LongValue[] params) { - super(name, lambda, params); - } + static class MultiIntValueInIntValueOutRequireOneFunction extends MultiIntValueInIntValueOutFunction { - @Override - public long getLong() { - long value = params[0].getLong(); - exists = params[0].exists(); - for (int i = 1; i < params.length && exists; ++i) { - value = lambda.apply(value, params[i].getLong()); - exists = params[i].exists(); + public MultiIntValueInIntValueOutRequireOneFunction(String name, TwoIntInIntOutLambda lambda, IntValue[] params) { + super(name, lambda, params); } - return value; - } -} -class MultiLongValueInLongValueOutRequireOneFunction extends MultiLongValueInLongValueOutFunction { - - public MultiLongValueInLongValueOutRequireOneFunction(String name, TwoLongInLongOutLambda lambda, LongValue[] params) { - super(name, lambda, params); - } - - @Override - public long getLong() { - int i = -1; - long value = 0; - exists = false; - while (++i < params.length) { - value = params[i].getLong(); - exists = params[i].exists(); - if (exists) { - break; + + @Override + public int getInt() { + int i = -1; + int value = 0; + exists = false; + while (++i < params.length) { + value = params[i].getInt(); + exists = params[i].exists(); + if (exists) { + break; + } } - } - while (++i < params.length) { - temp = params[i].getLong(); - if (params[i].exists()) { - value = lambda.apply(value, temp); + while (++i < params.length) { + temp = params[i].getInt(); + if (params[i].exists()) { + value = lambda.apply(value, temp); + } } + return value; } - return value; } -} -class FloatValueInFloatValueOutFunction extends AbstractFloatValue { - private final FloatValue param; - private final FloatInFloatOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public FloatValueInFloatValueOutFunction(String name, FloatInFloatOutLambda lambda, FloatValue param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } + static class LongValueInLongValueOutFunction extends AbstractLongValue { + private final LongValue param; + private final LongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - private boolean exists = false; + public LongValueInLongValueOutFunction(String name, LongInLongOutLambda lambda, LongValue param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - @Override - public float getFloat() { - float value = lambda.apply(param.getFloat()); - exists = param.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + private boolean exists = false; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatStreamInFloatStreamOutFunction extends AbstractFloatValueStream { - private final FloatValueStream param; - private final FloatInFloatOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + @Override + public long getLong() { + long value = lambda.apply(param.getLong()); + exists = param.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } - public FloatStreamInFloatStreamOutFunction(String name, FloatInFloatOutLambda lambda, FloatValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamFloats(FloatConsumer cons) { - param.streamFloats(value -> cons.accept(lambda.apply(value))); - } + static class LongStreamInLongStreamOutFunction extends AbstractLongValueStream { + private final LongValueStream param; + private final LongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatStreamInFloatValueOutFunction extends AbstractFloatValue implements FloatConsumer { - private final FloatValueStream param; - private final TwoFloatInFloatOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - - public FloatStreamInFloatValueOutFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } - - private boolean exists = false; - private float value; - - @Override - public float getFloat() { - exists = false; - param.streamFloats(this); - return value; - } - @Override - public boolean exists() { - return exists; - } - public void accept(float paramValue) { - if (!exists) { - exists = true; - value = paramValue; - } else { - value = lambda.apply(value, paramValue); + public LongStreamInLongStreamOutFunction(String name, LongInLongOutLambda lambda, LongValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class TwoFloatValueInFloatValueOutFunction extends AbstractFloatValue { - private final FloatValue param1; - private final FloatValue param2; - private final TwoFloatInFloatOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + @Override + public void streamLongs(LongConsumer cons) { + param.streamLongs(value -> cons.accept(lambda.apply(value))); + } - public TwoFloatValueInFloatValueOutFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValue param1, FloatValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - private boolean exists = false; + static class LongStreamInLongValueOutFunction extends AbstractLongValue implements LongConsumer { + private final LongValueStream param; + private final TwoLongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public float getFloat() { - float value = lambda.apply(param1.getFloat(), param2.getFloat()); - exists = param1.exists() && param2.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + public LongStreamInLongValueOutFunction(String name, TwoLongInLongOutLambda lambda, LongValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatValueFloatStreamInFloatStreamOutFunction extends AbstractFloatValueStream { - private final FloatValue param1; - private final FloatValueStream param2; - private final TwoFloatInFloatOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + private boolean exists = false; + private long value; - public FloatValueFloatStreamInFloatStreamOutFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValue param1, FloatValueStream param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + @Override + public long getLong() { + exists = false; + param.streamLongs(this); + return value; + } + @Override + public boolean exists() { + return exists; + } + public void accept(long paramValue) { + if (!exists) { + exists = true; + value = paramValue; + } else { + value = lambda.apply(value, paramValue); + } + } - @Override - public void streamFloats(FloatConsumer cons) { - float value1 = param1.getFloat(); - if (param1.exists()) { - param2.streamFloats(value2 -> cons.accept(lambda.apply(value1,value2))); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatStreamFloatValueInFloatStreamOutFunction extends AbstractFloatValueStream { - private final FloatValueStream param1; - private final FloatValue param2; - private final TwoFloatInFloatOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + static class TwoLongValueInLongValueOutFunction extends AbstractLongValue { + private final LongValue param1; + private final LongValue param2; + private final TwoLongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public TwoLongValueInLongValueOutFunction(String name, TwoLongInLongOutLambda lambda, LongValue param1, LongValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - public FloatStreamFloatValueInFloatStreamOutFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValueStream param1, FloatValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + private boolean exists = false; - @Override - public void streamFloats(FloatConsumer cons) { - float value2 = param2.getFloat(); - if (param2.exists()) { - param1.streamFloats(value1 -> cons.accept(lambda.apply(value1,value2))); + @Override + public long getLong() { + long value = lambda.apply(param1.getLong(), param2.getLong()); + exists = param1.exists() && param2.exists(); + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -abstract class MultiFloatValueInFloatValueOutFunction extends AbstractFloatValue { - protected final FloatValue[] params; - protected final TwoFloatInFloatOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public MultiFloatValueInFloatValueOutFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValue[] params) { - this.name = name; - this.lambda = lambda; - this.params = params; - this.exprStr = AnalyticsValueStream.createExpressionString(name,params); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); - } + static class LongValueLongStreamInLongStreamOutFunction extends AbstractLongValueStream { + private final LongValue param1; + private final LongValueStream param2; + private final TwoLongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public LongValueLongStreamInLongStreamOutFunction(String name, TwoLongInLongOutLambda lambda, LongValue param1, LongValueStream param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - protected boolean exists = false; - protected float temp; - @Override - public boolean exists() { - return exists; - } + @Override + public void streamLongs(LongConsumer cons) { + long value1 = param1.getLong(); + if (param1.exists()) { + param2.streamLongs(value2 -> cons.accept(lambda.apply(value1,value2))); + } + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class MultiFloatValueInFloatValueOutRequireAllFunction extends MultiFloatValueInFloatValueOutFunction { - public MultiFloatValueInFloatValueOutRequireAllFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValue[] params) { - super(name, lambda, params); + static class LongStreamLongValueInLongStreamOutFunction extends AbstractLongValueStream { + private final LongValueStream param1; + private final LongValue param2; + private final TwoLongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public LongStreamLongValueInLongStreamOutFunction(String name, TwoLongInLongOutLambda lambda, LongValueStream param1, LongValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } + + @Override + public void streamLongs(LongConsumer cons) { + long value2 = param2.getLong(); + if (param2.exists()) { + param1.streamLongs(value1 -> cons.accept(lambda.apply(value1,value2))); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public float getFloat() { - float value = params[0].getFloat(); - exists = params[0].exists(); - for (int i = 1; i < params.length && exists; ++i) { - value = lambda.apply(value, params[i].getFloat()); - exists = params[i].exists(); + abstract static class MultiLongValueInLongValueOutFunction extends AbstractLongValue { + protected final LongValue[] params; + protected final TwoLongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public MultiLongValueInLongValueOutFunction(String name, TwoLongInLongOutLambda lambda, LongValue[] params) { + this.name = name; + this.lambda = lambda; + this.params = params; + this.exprStr = AnalyticsValueStream.createExpressionString(name,params); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); + } + + protected boolean exists = false; + protected long temp; + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } - return value; } -} -class MultiFloatValueInFloatValueOutRequireOneFunction extends MultiFloatValueInFloatValueOutFunction { - - public MultiFloatValueInFloatValueOutRequireOneFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValue[] params) { - super(name, lambda, params); - } - - @Override - public float getFloat() { - int i = -1; - float value = 0; - exists = false; - while (++i < params.length) { - value = params[i].getFloat(); - exists = params[i].exists(); - if (exists) { - break; - } + + static class MultiLongValueInLongValueOutRequireAllFunction extends MultiLongValueInLongValueOutFunction { + + public MultiLongValueInLongValueOutRequireAllFunction(String name, TwoLongInLongOutLambda lambda, LongValue[] params) { + super(name, lambda, params); } - while (++i < params.length) { - temp = params[i].getFloat(); - if (params[i].exists()) { - value = lambda.apply(value, temp); + + @Override + public long getLong() { + long value = params[0].getLong(); + exists = params[0].exists(); + for (int i = 1; i < params.length && exists; ++i) { + value = lambda.apply(value, params[i].getLong()); + exists = params[i].exists(); } + return value; } - return value; } -} -class DoubleValueInDoubleValueOutFunction extends AbstractDoubleValue { - private final DoubleValue param; - private final DoubleInDoubleOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public DoubleValueInDoubleValueOutFunction(String name, DoubleInDoubleOutLambda lambda, DoubleValue param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } + static class MultiLongValueInLongValueOutRequireOneFunction extends MultiLongValueInLongValueOutFunction { - private boolean exists = false; + public MultiLongValueInLongValueOutRequireOneFunction(String name, TwoLongInLongOutLambda lambda, LongValue[] params) { + super(name, lambda, params); + } - @Override - public double getDouble() { - double value = lambda.apply(param.getDouble()); - exists = param.exists(); - return value; - } - @Override - public boolean exists() { - return exists; + @Override + public long getLong() { + int i = -1; + long value = 0; + exists = false; + while (++i < params.length) { + value = params[i].getLong(); + exists = params[i].exists(); + if (exists) { + break; + } + } + while (++i < params.length) { + temp = params[i].getLong(); + if (params[i].exists()) { + value = lambda.apply(value, temp); + } + } + return value; + } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DoubleStreamInDoubleStreamOutFunction extends AbstractDoubleValueStream { - private final DoubleValueStream param; - private final DoubleInDoubleOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + static class FloatValueInFloatValueOutFunction extends AbstractFloatValue { + private final FloatValue param; + private final FloatInFloatOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - public DoubleStreamInDoubleStreamOutFunction(String name, DoubleInDoubleOutLambda lambda, DoubleValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } + public FloatValueInFloatValueOutFunction(String name, FloatInFloatOutLambda lambda, FloatValue param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - @Override - public void streamDoubles(DoubleConsumer cons) { - param.streamDoubles(value -> cons.accept(lambda.apply(value))); - } + private boolean exists = false; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DoubleStreamInDoubleValueOutFunction extends AbstractDoubleValue implements DoubleConsumer { - private final DoubleValueStream param; - private final TwoDoubleInDoubleOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - - public DoubleStreamInDoubleValueOutFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } - - private boolean exists = false; - private double value; - - @Override - public double getDouble() { - exists = false; - param.streamDoubles(this); - return value; - } - @Override - public boolean exists() { - return exists; - } - public void accept(double paramValue) { - if (!exists) { - exists = true; - value = paramValue; - } else { - value = lambda.apply(value, paramValue); + @Override + public float getFloat() { + float value = lambda.apply(param.getFloat()); + exists = param.exists(); + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class TwoDoubleValueInDoubleValueOutFunction extends AbstractDoubleValue { - private final DoubleValue param1; - private final DoubleValue param2; - private final TwoDoubleInDoubleOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public TwoDoubleValueInDoubleValueOutFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValue param1, DoubleValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + static class FloatStreamInFloatStreamOutFunction extends AbstractFloatValueStream { + private final FloatValueStream param; + private final FloatInFloatOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - private boolean exists = false; + public FloatStreamInFloatStreamOutFunction(String name, FloatInFloatOutLambda lambda, FloatValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - @Override - public double getDouble() { - double value = lambda.apply(param1.getDouble(), param2.getDouble()); - exists = param1.exists() && param2.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + @Override + public void streamFloats(FloatConsumer cons) { + param.streamFloats(value -> cons.accept(lambda.apply(value))); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class DoubleValueDoubleStreamInDoubleStreamOutFunction extends AbstractDoubleValueStream { - private final DoubleValue param1; - private final DoubleValueStream param2; - private final TwoDoubleInDoubleOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public DoubleValueDoubleStreamInDoubleStreamOutFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValue param1, DoubleValueStream param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + static class FloatStreamInFloatValueOutFunction extends AbstractFloatValue implements FloatConsumer { + private final FloatValueStream param; + private final TwoFloatInFloatOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public void streamDoubles(DoubleConsumer cons) { - double value1 = param1.getDouble(); - if (param1.exists()) { - param2.streamDoubles(value2 -> cons.accept(lambda.apply(value1,value2))); + public FloatStreamInFloatValueOutFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DoubleStreamDoubleValueInDoubleStreamOutFunction extends AbstractDoubleValueStream { - private final DoubleValueStream param1; - private final DoubleValue param2; - private final TwoDoubleInDoubleOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + private boolean exists = false; + private float value; - public DoubleStreamDoubleValueInDoubleStreamOutFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValueStream param1, DoubleValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + @Override + public float getFloat() { + exists = false; + param.streamFloats(this); + return value; + } + @Override + public boolean exists() { + return exists; + } + public void accept(float paramValue) { + if (!exists) { + exists = true; + value = paramValue; + } else { + value = lambda.apply(value, paramValue); + } + } - @Override - public void streamDoubles(DoubleConsumer cons) { - double value2 = param2.getDouble(); - if (param2.exists()) { - param1.streamDoubles(value1 -> cons.accept(lambda.apply(value1,value2))); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -abstract class MultiDoubleValueInDoubleValueOutFunction extends AbstractDoubleValue { - protected final DoubleValue[] params; - protected final TwoDoubleInDoubleOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + static class TwoFloatValueInFloatValueOutFunction extends AbstractFloatValue { + private final FloatValue param1; + private final FloatValue param2; + private final TwoFloatInFloatOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public TwoFloatValueInFloatValueOutFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValue param1, FloatValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - public MultiDoubleValueInDoubleValueOutFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValue[] params) { - this.name = name; - this.lambda = lambda; - this.params = params; - this.exprStr = AnalyticsValueStream.createExpressionString(name,params); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); - } + private boolean exists = false; - protected boolean exists = false; - protected double temp; - @Override - public boolean exists() { - return exists; - } + @Override + public float getFloat() { + float value = lambda.apply(param1.getFloat(), param2.getFloat()); + exists = param1.exists() && param2.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class MultiDoubleValueInDoubleValueOutRequireAllFunction extends MultiDoubleValueInDoubleValueOutFunction { - public MultiDoubleValueInDoubleValueOutRequireAllFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValue[] params) { - super(name, lambda, params); - } + static class FloatValueFloatStreamInFloatStreamOutFunction extends AbstractFloatValueStream { + private final FloatValue param1; + private final FloatValueStream param2; + private final TwoFloatInFloatOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public FloatValueFloatStreamInFloatStreamOutFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValue param1, FloatValueStream param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } + + @Override + public void streamFloats(FloatConsumer cons) { + float value1 = param1.getFloat(); + if (param1.exists()) { + param2.streamFloats(value2 -> cons.accept(lambda.apply(value1,value2))); + } + } - @Override - public double getDouble() { - double value = params[0].getDouble(); - exists = params[0].exists(); - for (int i = 1; i < params.length && exists; ++i) { - value = lambda.apply(value, params[i].getDouble()); - exists = params[i].exists(); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } - return value; } -} -class MultiDoubleValueInDoubleValueOutRequireOneFunction extends MultiDoubleValueInDoubleValueOutFunction { - - public MultiDoubleValueInDoubleValueOutRequireOneFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValue[] params) { - super(name, lambda, params); - } - - @Override - public double getDouble() { - int i = -1; - double value = 0; - exists = false; - while (++i < params.length) { - value = params[i].getDouble(); - exists = params[i].exists(); - if (exists) { - break; - } + + static class FloatStreamFloatValueInFloatStreamOutFunction extends AbstractFloatValueStream { + private final FloatValueStream param1; + private final FloatValue param2; + private final TwoFloatInFloatOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public FloatStreamFloatValueInFloatStreamOutFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValueStream param1, FloatValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); } - while (++i < params.length) { - temp = params[i].getDouble(); - if (params[i].exists()) { - value = lambda.apply(value, temp); + + @Override + public void streamFloats(FloatConsumer cons) { + float value2 = param2.getFloat(); + if (param2.exists()) { + param1.streamFloats(value1 -> cons.accept(lambda.apply(value1,value2))); } } - return value; - } -} -class DateValueInDateValueOutFunction extends AbstractDateValue { - private final DateValue param; - private final LongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public DateValueInDateValueOutFunction(String name, LongInLongOutLambda lambda, DateValue param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - private boolean exists = false; + abstract static class MultiFloatValueInFloatValueOutFunction extends AbstractFloatValue { + protected final FloatValue[] params; + protected final TwoFloatInFloatOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public long getLong() { - long value = lambda.apply(param.getLong()); - exists = param.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + public MultiFloatValueInFloatValueOutFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValue[] params) { + this.name = name; + this.lambda = lambda; + this.params = params; + this.exprStr = AnalyticsValueStream.createExpressionString(name,params); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DateStreamInDateStreamOutFunction extends AbstractDateValueStream { - private final DateValueStream param; - private final LongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + protected boolean exists = false; + protected float temp; + @Override + public boolean exists() { + return exists; + } - public DateStreamInDateStreamOutFunction(String name, LongInLongOutLambda lambda, DateValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamLongs(LongConsumer cons) { - param.streamLongs(value -> cons.accept(lambda.apply(value))); - } + static class MultiFloatValueInFloatValueOutRequireAllFunction extends MultiFloatValueInFloatValueOutFunction { - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DateStreamInDateValueOutFunction extends AbstractDateValue implements LongConsumer { - private final DateValueStream param; - private final TwoLongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - - public DateStreamInDateValueOutFunction(String name, TwoLongInLongOutLambda lambda, DateValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } - - private boolean exists = false; - private long value; - - @Override - public long getLong() { - exists = false; - param.streamLongs(this); - return value; - } - @Override - public boolean exists() { - return exists; - } - public void accept(long paramValue) { - if (!exists) { - exists = true; - value = paramValue; - } else { - value = lambda.apply(value, paramValue); + public MultiFloatValueInFloatValueOutRequireAllFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValue[] params) { + super(name, lambda, params); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public float getFloat() { + float value = params[0].getFloat(); + exists = params[0].exists(); + for (int i = 1; i < params.length && exists; ++i) { + value = lambda.apply(value, params[i].getFloat()); + exists = params[i].exists(); + } + return value; + } } -} -class TwoDateValueInDateValueOutFunction extends AbstractDateValue { - private final DateValue param1; - private final DateValue param2; - private final TwoLongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public TwoDateValueInDateValueOutFunction(String name, TwoLongInLongOutLambda lambda, DateValue param1, DateValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + static class MultiFloatValueInFloatValueOutRequireOneFunction extends MultiFloatValueInFloatValueOutFunction { - private boolean exists = false; + public MultiFloatValueInFloatValueOutRequireOneFunction(String name, TwoFloatInFloatOutLambda lambda, FloatValue[] params) { + super(name, lambda, params); + } - @Override - public long getLong() { - long value = lambda.apply(param1.getLong(), param2.getLong()); - exists = param1.exists() && param2.exists(); - return value; - } - @Override - public boolean exists() { - return exists; + @Override + public float getFloat() { + int i = -1; + float value = 0; + exists = false; + while (++i < params.length) { + value = params[i].getFloat(); + exists = params[i].exists(); + if (exists) { + break; + } + } + while (++i < params.length) { + temp = params[i].getFloat(); + if (params[i].exists()) { + value = lambda.apply(value, temp); + } + } + return value; + } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DateValueDateStreamInDateStreamOutFunction extends AbstractDateValueStream { - private final DateValue param1; - private final DateValueStream param2; - private final TwoLongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + static class DoubleValueInDoubleValueOutFunction extends AbstractDoubleValue { + private final DoubleValue param; + private final DoubleInDoubleOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - public DateValueDateStreamInDateStreamOutFunction(String name, TwoLongInLongOutLambda lambda, DateValue param1, DateValueStream param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + public DoubleValueInDoubleValueOutFunction(String name, DoubleInDoubleOutLambda lambda, DoubleValue param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } + + private boolean exists = false; - @Override - public void streamLongs(LongConsumer cons) { - long value1 = param1.getLong(); - if (param1.exists()) { - param2.streamLongs(value2 -> cons.accept(lambda.apply(value1,value2))); + @Override + public double getDouble() { + double value = lambda.apply(param.getDouble()); + exists = param.exists(); + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class DateStreamDateValueInDateStreamOutFunction extends AbstractDateValueStream { - private final DateValueStream param1; - private final DateValue param2; - private final TwoLongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public DateStreamDateValueInDateStreamOutFunction(String name, TwoLongInLongOutLambda lambda, DateValueStream param1, DateValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + static class DoubleStreamInDoubleStreamOutFunction extends AbstractDoubleValueStream { + private final DoubleValueStream param; + private final DoubleInDoubleOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public void streamLongs(LongConsumer cons) { - long value2 = param2.getLong(); - if (param2.exists()) { - param1.streamLongs(value1 -> cons.accept(lambda.apply(value1,value2))); + public DoubleStreamInDoubleStreamOutFunction(String name, DoubleInDoubleOutLambda lambda, DoubleValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -abstract class MultiDateValueInDateValueOutFunction extends AbstractDateValue { - protected final DateValue[] params; - protected final TwoLongInLongOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + @Override + public void streamDoubles(DoubleConsumer cons) { + param.streamDoubles(value -> cons.accept(lambda.apply(value))); + } - public MultiDateValueInDateValueOutFunction(String name, TwoLongInLongOutLambda lambda, DateValue[] params) { - this.name = name; - this.lambda = lambda; - this.params = params; - this.exprStr = AnalyticsValueStream.createExpressionString(name,params); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - protected boolean exists = false; - protected long temp; - @Override - public boolean exists() { - return exists; - } + static class DoubleStreamInDoubleValueOutFunction extends AbstractDoubleValue implements DoubleConsumer { + private final DoubleValueStream param; + private final TwoDoubleInDoubleOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class MultiDateValueInDateValueOutRequireAllFunction extends MultiDateValueInDateValueOutFunction { + public DoubleStreamInDoubleValueOutFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - public MultiDateValueInDateValueOutRequireAllFunction(String name, TwoLongInLongOutLambda lambda, DateValue[] params) { - super(name, lambda, params); - } + private boolean exists = false; + private double value; - @Override - public long getLong() { - long value = params[0].getLong(); - exists = params[0].exists(); - for (int i = 1; i < params.length && exists; ++i) { - value = lambda.apply(value, params[i].getLong()); - exists = params[i].exists(); + @Override + public double getDouble() { + exists = false; + param.streamDoubles(this); + return value; } - return value; - } -} -class MultiDateValueInDateValueOutRequireOneFunction extends MultiDateValueInDateValueOutFunction { - - public MultiDateValueInDateValueOutRequireOneFunction(String name, TwoLongInLongOutLambda lambda, DateValue[] params) { - super(name, lambda, params); - } - - @Override - public long getLong() { - int i = -1; - long value = 0; - exists = false; - while (++i < params.length) { - value = params[i].getLong(); - exists = params[i].exists(); - if (exists) { - break; - } + @Override + public boolean exists() { + return exists; } - while (++i < params.length) { - temp = params[i].getLong(); - if (params[i].exists()) { - value = lambda.apply(value, temp); + public void accept(double paramValue) { + if (!exists) { + exists = true; + value = paramValue; + } else { + value = lambda.apply(value, paramValue); } } - return value; - } -} -class StringValueInStringValueOutFunction extends AbstractStringValue { - private final StringValue param; - private final StringInStringOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public StringValueInStringValueOutFunction(String name, StringInStringOutLambda lambda, StringValue param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - private boolean exists = false; + static class TwoDoubleValueInDoubleValueOutFunction extends AbstractDoubleValue { + private final DoubleValue param1; + private final DoubleValue param2; + private final TwoDoubleInDoubleOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public TwoDoubleValueInDoubleValueOutFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValue param1, DoubleValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - @Override - public String getString() { - String value = lambda.apply(param.getString()); - exists = param.exists(); - return value; - } - @Override - public boolean exists() { - return exists; - } + private boolean exists = false; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class StringStreamInStringStreamOutFunction extends AbstractStringValueStream { - private final StringValueStream param; - private final StringInStringOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + @Override + public double getDouble() { + double value = lambda.apply(param1.getDouble(), param2.getDouble()); + exists = param1.exists() && param2.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } - public StringStreamInStringStreamOutFunction(String name, StringInStringOutLambda lambda, StringValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamStrings(Consumer cons) { - param.streamStrings(value -> cons.accept(lambda.apply(value))); - } + static class DoubleValueDoubleStreamInDoubleStreamOutFunction extends AbstractDoubleValueStream { + private final DoubleValue param1; + private final DoubleValueStream param2; + private final TwoDoubleInDoubleOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public DoubleValueDoubleStreamInDoubleStreamOutFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValue param1, DoubleValueStream param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class StringStreamInStringValueOutFunction extends AbstractStringValue implements Consumer { - private final StringValueStream param; - private final TwoStringInStringOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - - public StringStreamInStringValueOutFunction(String name, TwoStringInStringOutLambda lambda, StringValueStream param) { - this.name = name; - this.lambda = lambda; - this.param = param; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); - } - - private boolean exists = false; - private String value; - - @Override - public String getString() { - exists = false; - param.streamStrings(this); - return value; - } - @Override - public boolean exists() { - return exists; - } - public void accept(String paramValue) { - if (!exists) { - exists = true; - value = paramValue; - } else { - value = lambda.apply(value, paramValue); + @Override + public void streamDoubles(DoubleConsumer cons) { + double value1 = param1.getDouble(); + if (param1.exists()) { + param2.streamDoubles(value2 -> cons.accept(lambda.apply(value1,value2))); + } } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class TwoStringValueInStringValueOutFunction extends AbstractStringValue { - private final StringValue param1; - private final StringValue param2; - private final TwoStringInStringOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; - public TwoStringValueInStringValueOutFunction(String name, TwoStringInStringOutLambda lambda, StringValue param1, StringValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); - } + static class DoubleStreamDoubleValueInDoubleStreamOutFunction extends AbstractDoubleValueStream { + private final DoubleValueStream param1; + private final DoubleValue param2; + private final TwoDoubleInDoubleOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public DoubleStreamDoubleValueInDoubleStreamOutFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValueStream param1, DoubleValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - private boolean exists = false; + @Override + public void streamDoubles(DoubleConsumer cons) { + double value2 = param2.getDouble(); + if (param2.exists()) { + param1.streamDoubles(value1 -> cons.accept(lambda.apply(value1,value2))); + } + } - @Override - public String getString() { - String value = lambda.apply(param1.getString(), param2.getString()); - exists = param1.exists() && param2.exists(); - return value; - } - @Override - public boolean exists() { - return exists; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getName() { - return name; + abstract static class MultiDoubleValueInDoubleValueOutFunction extends AbstractDoubleValue { + protected final DoubleValue[] params; + protected final TwoDoubleInDoubleOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public MultiDoubleValueInDoubleValueOutFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValue[] params) { + this.name = name; + this.lambda = lambda; + this.params = params; + this.exprStr = AnalyticsValueStream.createExpressionString(name,params); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); + } + + protected boolean exists = false; + protected double temp; + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getExpressionStr() { - return exprStr; + + static class MultiDoubleValueInDoubleValueOutRequireAllFunction extends MultiDoubleValueInDoubleValueOutFunction { + + public MultiDoubleValueInDoubleValueOutRequireAllFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValue[] params) { + super(name, lambda, params); + } + + @Override + public double getDouble() { + double value = params[0].getDouble(); + exists = params[0].exists(); + for (int i = 1; i < params.length && exists; ++i) { + value = lambda.apply(value, params[i].getDouble()); + exists = params[i].exists(); + } + return value; + } } - @Override - public ExpressionType getExpressionType() { - return funcType; + + static class MultiDoubleValueInDoubleValueOutRequireOneFunction extends MultiDoubleValueInDoubleValueOutFunction { + + public MultiDoubleValueInDoubleValueOutRequireOneFunction(String name, TwoDoubleInDoubleOutLambda lambda, DoubleValue[] params) { + super(name, lambda, params); + } + + @Override + public double getDouble() { + int i = -1; + double value = 0; + exists = false; + while (++i < params.length) { + value = params[i].getDouble(); + exists = params[i].exists(); + if (exists) { + break; + } + } + while (++i < params.length) { + temp = params[i].getDouble(); + if (params[i].exists()) { + value = lambda.apply(value, temp); + } + } + return value; + } } -} -class StringValueStringStreamInStringStreamOutFunction extends AbstractStringValueStream { - private final StringValue param1; - private final StringValueStream param2; - private final TwoStringInStringOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + static class DateValueInDateValueOutFunction extends AbstractDateValue { + private final DateValue param; + private final LongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public DateValueInDateValueOutFunction(String name, LongInLongOutLambda lambda, DateValue param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } - public StringValueStringStreamInStringStreamOutFunction(String name, TwoStringInStringOutLambda lambda, StringValue param1, StringValueStream param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + private boolean exists = false; + + @Override + public long getLong() { + long value = lambda.apply(param.getLong()); + exists = param.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } + static class DateStreamInDateStreamOutFunction extends AbstractDateValueStream { + private final DateValueStream param; + private final LongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public DateStreamInDateStreamOutFunction(String name, LongInLongOutLambda lambda, DateValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } + + @Override + public void streamLongs(LongConsumer cons) { + param.streamLongs(value -> cons.accept(lambda.apply(value))); + } - @Override - public void streamStrings(Consumer cons) { - String value1 = param1.getString(); - if (param1.exists()) { - param2.streamStrings(value2 -> cons.accept(lambda.apply(value1,value2))); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } + static class DateStreamInDateValueOutFunction extends AbstractDateValue implements LongConsumer { + private final DateValueStream param; + private final TwoLongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public DateStreamInDateValueOutFunction(String name, TwoLongInLongOutLambda lambda, DateValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } + + private boolean exists = false; + private long value; - @Override - public String getName() { - return name; + @Override + public long getLong() { + exists = false; + param.streamLongs(this); + return value; + } + @Override + public boolean exists() { + return exists; + } + public void accept(long paramValue) { + if (!exists) { + exists = true; + value = paramValue; + } else { + value = lambda.apply(value, paramValue); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getExpressionStr() { - return exprStr; + static class TwoDateValueInDateValueOutFunction extends AbstractDateValue { + private final DateValue param1; + private final DateValue param2; + private final TwoLongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public TwoDateValueInDateValueOutFunction(String name, TwoLongInLongOutLambda lambda, DateValue param1, DateValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } + + private boolean exists = false; + + @Override + public long getLong() { + long value = lambda.apply(param1.getLong(), param2.getLong()); + exists = param1.exists() && param2.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public ExpressionType getExpressionType() { - return funcType; + static class DateValueDateStreamInDateStreamOutFunction extends AbstractDateValueStream { + private final DateValue param1; + private final DateValueStream param2; + private final TwoLongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public DateValueDateStreamInDateStreamOutFunction(String name, TwoLongInLongOutLambda lambda, DateValue param1, DateValueStream param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } + + @Override + public void streamLongs(LongConsumer cons) { + long value1 = param1.getLong(); + if (param1.exists()) { + param2.streamLongs(value2 -> cons.accept(lambda.apply(value1,value2))); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class StringStreamStringValueInStringStreamOutFunction extends AbstractStringValueStream { - private final StringValueStream param1; - private final StringValue param2; - private final TwoStringInStringOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + static class DateStreamDateValueInDateStreamOutFunction extends AbstractDateValueStream { + private final DateValueStream param1; + private final DateValue param2; + private final TwoLongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public DateStreamDateValueInDateStreamOutFunction(String name, TwoLongInLongOutLambda lambda, DateValueStream param1, DateValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } - public StringStreamStringValueInStringStreamOutFunction(String name, TwoStringInStringOutLambda lambda, StringValueStream param1, StringValue param2) { - this.name = name; - this.lambda = lambda; - this.param1 = param1; - this.param2 = param2; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + @Override + public void streamLongs(LongConsumer cons) { + long value2 = param2.getLong(); + if (param2.exists()) { + param1.streamLongs(value1 -> cons.accept(lambda.apply(value1,value2))); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamStrings(Consumer cons) { - String value2 = param2.getString(); - if (param2.exists()) { - param1.streamStrings(value1 -> cons.accept(lambda.apply(value1,value2))); + abstract static class MultiDateValueInDateValueOutFunction extends AbstractDateValue { + protected final DateValue[] params; + protected final TwoLongInLongOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public MultiDateValueInDateValueOutFunction(String name, TwoLongInLongOutLambda lambda, DateValue[] params) { + this.name = name; + this.lambda = lambda; + this.params = params; + this.exprStr = AnalyticsValueStream.createExpressionString(name,params); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); + } + + protected boolean exists = false; + protected long temp; + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } + static class MultiDateValueInDateValueOutRequireAllFunction extends LambdaFunction.MultiDateValueInDateValueOutFunction { + + public MultiDateValueInDateValueOutRequireAllFunction(String name, TwoLongInLongOutLambda lambda, DateValue[] params) { + super(name, lambda, params); + } - @Override - public String getName() { - return name; + @Override + public long getLong() { + long value = params[0].getLong(); + exists = params[0].exists(); + for (int i = 1; i < params.length && exists; ++i) { + value = lambda.apply(value, params[i].getLong()); + exists = params[i].exists(); + } + return value; + } } - @Override - public String getExpressionStr() { - return exprStr; + static class MultiDateValueInDateValueOutRequireOneFunction extends LambdaFunction.MultiDateValueInDateValueOutFunction { + + public MultiDateValueInDateValueOutRequireOneFunction(String name, TwoLongInLongOutLambda lambda, DateValue[] params) { + super(name, lambda, params); + } + + @Override + public long getLong() { + int i = -1; + long value = 0; + exists = false; + while (++i < params.length) { + value = params[i].getLong(); + exists = params[i].exists(); + if (exists) { + break; + } + } + while (++i < params.length) { + temp = params[i].getLong(); + if (params[i].exists()) { + value = lambda.apply(value, temp); + } + } + return value; + } } - @Override - public ExpressionType getExpressionType() { - return funcType; + static class StringValueInStringValueOutFunction extends AbstractStringValue { + private final StringValue param; + private final StringInStringOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public StringValueInStringValueOutFunction(String name, StringInStringOutLambda lambda, StringValue param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } + + private boolean exists = false; + + @Override + public String getString() { + String value = lambda.apply(param.getString()); + exists = param.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -abstract class MultiStringValueInStringValueOutFunction extends AbstractStringValue { - protected final StringValue[] params; - protected final TwoStringInStringOutLambda lambda; - private final String name; - private final String exprStr; - private final ExpressionType funcType; + static class StringStreamInStringStreamOutFunction extends AbstractStringValueStream { + private final StringValueStream param; + private final StringInStringOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public StringStreamInStringStreamOutFunction(String name, StringInStringOutLambda lambda, StringValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } + + @Override + public void streamStrings(Consumer cons) { + param.streamStrings(value -> cons.accept(lambda.apply(value))); + } - public MultiStringValueInStringValueOutFunction(String name, TwoStringInStringOutLambda lambda, StringValue[] params) { - this.name = name; - this.lambda = lambda; - this.params = params; - this.exprStr = AnalyticsValueStream.createExpressionString(name,params); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } + static class StringStreamInStringValueOutFunction extends AbstractStringValue implements Consumer { + private final StringValueStream param; + private final TwoStringInStringOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public StringStreamInStringValueOutFunction(String name, TwoStringInStringOutLambda lambda, StringValueStream param) { + this.name = name; + this.lambda = lambda; + this.param = param; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param); + } + + private boolean exists = false; + private String value; + + @Override + public String getString() { + exists = false; + param.streamStrings(this); + return value; + } + @Override + public boolean exists() { + return exists; + } + public void accept(String paramValue) { + if (!exists) { + exists = true; + value = paramValue; + } else { + value = lambda.apply(value, paramValue); + } + } - protected boolean exists = false; - protected String temp = null; - @Override - public boolean exists() { - return exists; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } + static class TwoStringValueInStringValueOutFunction extends AbstractStringValue { + private final StringValue param1; + private final StringValue param2; + private final TwoStringInStringOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public TwoStringValueInStringValueOutFunction(String name, TwoStringInStringOutLambda lambda, StringValue param1, StringValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } + + private boolean exists = false; - @Override - public String getName() { - return name; + @Override + public String getString() { + String value = lambda.apply(param1.getString(), param2.getString()); + exists = param1.exists() && param2.exists(); + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getExpressionStr() { - return exprStr; + static class StringValueStringStreamInStringStreamOutFunction extends AbstractStringValueStream { + private final StringValue param1; + private final StringValueStream param2; + private final TwoStringInStringOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public StringValueStringStreamInStringStreamOutFunction(String name, TwoStringInStringOutLambda lambda, StringValue param1, StringValueStream param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } + + @Override + public void streamStrings(Consumer cons) { + String value1 = param1.getString(); + if (param1.exists()) { + param2.streamStrings(value2 -> cons.accept(lambda.apply(value1,value2))); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public ExpressionType getExpressionType() { - return funcType; + static class StringStreamStringValueInStringStreamOutFunction extends AbstractStringValueStream { + private final StringValueStream param1; + private final StringValue param2; + private final TwoStringInStringOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public StringStreamStringValueInStringStreamOutFunction(String name, TwoStringInStringOutLambda lambda, StringValueStream param1, StringValue param2) { + this.name = name; + this.lambda = lambda; + this.param1 = param1; + this.param2 = param2; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param1,param2); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,param1,param2); + } + + @Override + public void streamStrings(Consumer cons) { + String value2 = param2.getString(); + if (param2.exists()) { + param1.streamStrings(value1 -> cons.accept(lambda.apply(value1,value2))); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class MultiStringValueInStringValueOutRequireAllFunction extends MultiStringValueInStringValueOutFunction { - public MultiStringValueInStringValueOutRequireAllFunction(String name, TwoStringInStringOutLambda lambda, StringValue[] params) { - super(name, lambda, params); + abstract static class MultiStringValueInStringValueOutFunction extends AbstractStringValue { + protected final StringValue[] params; + protected final TwoStringInStringOutLambda lambda; + private final String name; + private final String exprStr; + private final ExpressionType funcType; + + public MultiStringValueInStringValueOutFunction(String name, TwoStringInStringOutLambda lambda, StringValue[] params) { + this.name = name; + this.lambda = lambda; + this.params = params; + this.exprStr = AnalyticsValueStream.createExpressionString(name,params); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,params); + } + + protected boolean exists = false; + protected String temp = null; + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getString() { - String value = params[0].getString(); - exists = params[0].exists(); - for (int i = 1; i < params.length && exists; ++i) { - temp = params[i].getString(); - if (params[i].exists()) { - value = lambda.apply(value, temp); - } else { - exists = false; - value = null; + static class MultiStringValueInStringValueOutRequireAllFunction extends MultiStringValueInStringValueOutFunction { + + public MultiStringValueInStringValueOutRequireAllFunction(String name, TwoStringInStringOutLambda lambda, StringValue[] params) { + super(name, lambda, params); + } + + @Override + public String getString() { + String value = params[0].getString(); + exists = params[0].exists(); + for (int i = 1; i < params.length && exists; ++i) { + temp = params[i].getString(); + if (params[i].exists()) { + value = lambda.apply(value, temp); + } else { + exists = false; + value = null; + } } + return value; } - return value; } -} -class MultiStringValueInStringValueOutRequireOneFunction extends MultiStringValueInStringValueOutFunction { - - public MultiStringValueInStringValueOutRequireOneFunction(String name, TwoStringInStringOutLambda lambda, StringValue[] params) { - super(name, lambda, params); - } - - @Override - public String getString() { - int i = -1; - String value = null; - exists = false; - while (++i < params.length) { - value = params[i].getString(); - exists = params[i].exists(); - if (exists) { - break; - } + + static class MultiStringValueInStringValueOutRequireOneFunction extends MultiStringValueInStringValueOutFunction { + + public MultiStringValueInStringValueOutRequireOneFunction(String name, TwoStringInStringOutLambda lambda, StringValue[] params) { + super(name, lambda, params); } - while (++i < params.length) { - temp = params[i].getString(); - if (params[i].exists()) { - value = lambda.apply(value, temp); + + @Override + public String getString() { + int i = -1; + String value = null; + exists = false; + while (++i < params.length) { + value = params[i].getString(); + exists = params[i].exists(); + if (exists) { + break; + } } + while (++i < params.length) { + temp = params[i].getString(); + if (params[i].exists()) { + value = lambda.apply(value, temp); + } + } + return value; } - return value; } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/RemoveFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/RemoveFunction.java index 76605d675316..bda118097d07 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/RemoveFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/RemoveFunction.java @@ -131,667 +131,684 @@ public class RemoveFunction { } return new StreamRemoveFunction(baseExpr,removeExpr); }); -} -class StreamRemoveFunction extends AbstractAnalyticsValueStream { - private final AnalyticsValueStream baseExpr; - private final AnalyticsValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StreamRemoveFunction(AnalyticsValueStream baseExpr, AnalyticsValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); - } - @Override - public void streamObjects(Consumer cons) { - Object removeValue = removeExpr.getObject(); - if (removeExpr.exists()) { - baseExpr.streamObjects(value -> { - if (!removeValue.equals(value)) cons.accept(value); - }); - } else { - baseExpr.streamObjects(cons); + static class StreamRemoveFunction extends AbstractAnalyticsValueStream { + private final AnalyticsValueStream baseExpr; + private final AnalyticsValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StreamRemoveFunction(AnalyticsValueStream baseExpr, AnalyticsValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); } - } - @Override - public String getName() { - return name; - } + @Override + public void streamObjects(Consumer cons) { + Object removeValue = removeExpr.getObject(); + if (removeExpr.exists()) { + baseExpr.streamObjects(value -> { + if (!removeValue.equals(value)) cons.accept(value); + }); + } else { + baseExpr.streamObjects(cons); + } + } - @Override - public String getExpressionStr() { - return exprStr; - } + @Override + public String getName() { + return name; + } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class ValueRemoveFunction extends AbstractAnalyticsValue { - private final AnalyticsValue baseExpr; - private final AnalyticsValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public ValueRemoveFunction(AnalyticsValue baseExpr, AnalyticsValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + @Override + public String getExpressionStr() { + return exprStr; + } + + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class ValueRemoveFunction extends AbstractAnalyticsValue { + private final AnalyticsValue baseExpr; + private final AnalyticsValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public Object getObject() { - Object value = baseExpr.getObject(); - exists = false; - if (baseExpr.exists()) { - exists = value.equals(removeExpr.getObject()) ? (removeExpr.exists() ? false : true) : true; + public ValueRemoveFunction(AnalyticsValue baseExpr, AnalyticsValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); } - return value; - } - @Override - public boolean exists() { - return exists; - } + boolean exists = false; - @Override - public String getName() { - return name; - } + @Override + public Object getObject() { + Object value = baseExpr.getObject(); + exists = false; + if (baseExpr.exists()) { + exists = value.equals(removeExpr.getObject()) ? (removeExpr.exists() ? false : true) : true; + } + return value; + } - @Override - public String getExpressionStr() { - return exprStr; - } + @Override + public boolean exists() { + return exists; + } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanStreamRemoveFunction extends AbstractBooleanValueStream { - private final BooleanValueStream baseExpr; - private final BooleanValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public BooleanStreamRemoveFunction(BooleanValueStream baseExpr, BooleanValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); - } + @Override + public String getName() { + return name; + } - @Override - public void streamBooleans(BooleanConsumer cons) { - boolean removeValue = removeExpr.getBoolean(); - if (removeExpr.exists()) { - baseExpr.streamBooleans(value -> { - if (removeValue != value) cons.accept(value); - }); - } else { - baseExpr.streamBooleans(cons); + @Override + public String getExpressionStr() { + return exprStr; } - } - @Override - public String getName() { - return name; + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getExpressionStr() { - return exprStr; - } + static class BooleanStreamRemoveFunction extends AbstractBooleanValueStream { + private final BooleanValueStream baseExpr; + private final BooleanValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanRemoveFunction extends AbstractBooleanValue { - private final BooleanValue baseExpr; - private final BooleanValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public BooleanRemoveFunction(BooleanValue baseExpr, BooleanValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); - } + public BooleanStreamRemoveFunction(BooleanValueStream baseExpr, BooleanValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + } - boolean exists = false; + @Override + public void streamBooleans(BooleanConsumer cons) { + boolean removeValue = removeExpr.getBoolean(); + if (removeExpr.exists()) { + baseExpr.streamBooleans(value -> { + if (removeValue != value) cons.accept(value); + }); + } else { + baseExpr.streamBooleans(cons); + } + } - @Override - public boolean getBoolean() { - boolean value = baseExpr.getBoolean(); - exists = false; - if (baseExpr.exists()) { - exists = value==removeExpr.getBoolean() ? (removeExpr.exists() ? false : true) : true; + @Override + public String getName() { + return name; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class IntStreamRemoveFunction extends AbstractIntValueStream { - private final IntValueStream baseExpr; - private final IntValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public IntStreamRemoveFunction(IntValueStream baseExpr, IntValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); - } + @Override + public String getExpressionStr() { + return exprStr; + } - @Override - public void streamInts(IntConsumer cons) { - int removeValue = removeExpr.getInt(); - if (removeExpr.exists()) { - baseExpr.streamInts(value -> { - if (removeValue != value) cons.accept(value); - }); - } else { - baseExpr.streamInts(cons); + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class IntRemoveFunction extends AbstractIntValue { - private final IntValue baseExpr; - private final IntValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public IntRemoveFunction(IntValue baseExpr, IntValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); - } + static class BooleanRemoveFunction extends AbstractBooleanValue { + private final BooleanValue baseExpr; + private final BooleanValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public BooleanRemoveFunction(BooleanValue baseExpr, BooleanValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + } - boolean exists = false; + boolean exists = false; - @Override - public int getInt() { - int value = baseExpr.getInt(); - exists = false; - if (baseExpr.exists()) { - exists = value==removeExpr.getInt() ? (removeExpr.exists() ? false : true) : true; + @Override + public boolean getBoolean() { + boolean value = baseExpr.getBoolean(); + exists = false; + if (baseExpr.exists()) { + exists = value==removeExpr.getBoolean() ? (removeExpr.exists() ? false : true) : true; + } + return value; + } + @Override + public boolean exists() { + return exists; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongStreamRemoveFunction extends AbstractLongValueStream { - private final LongValueStream baseExpr; - private final LongValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public LongStreamRemoveFunction(LongValueStream baseExpr, LongValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamLongs(LongConsumer cons) { - long removeValue = removeExpr.getLong(); - if (removeExpr.exists()) { - baseExpr.streamLongs(value -> { - if (removeValue != value) cons.accept(value); - }); - } else { - baseExpr.streamLongs(cons); + static class IntStreamRemoveFunction extends AbstractIntValueStream { + private final IntValueStream baseExpr; + private final IntValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public IntStreamRemoveFunction(IntValueStream baseExpr, IntValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongRemoveFunction extends AbstractLongValue { - private final LongValue baseExpr; - private final LongValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public LongRemoveFunction(LongValue baseExpr, LongValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + @Override + public void streamInts(IntConsumer cons) { + int removeValue = removeExpr.getInt(); + if (removeExpr.exists()) { + baseExpr.streamInts(value -> { + if (removeValue != value) cons.accept(value); + }); + } else { + baseExpr.streamInts(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class IntRemoveFunction extends AbstractIntValue { + private final IntValue baseExpr; + private final IntValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public long getLong() { - long value = baseExpr.getLong(); - exists = false; - if (baseExpr.exists()) { - exists = value==removeExpr.getLong() ? (removeExpr.exists() ? false : true) : true; + public IntRemoveFunction(IntValue baseExpr, IntValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatStreamRemoveFunction extends AbstractFloatValueStream { - private final FloatValueStream baseExpr; - private final FloatValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public FloatStreamRemoveFunction(FloatValueStream baseExpr, FloatValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); - } + boolean exists = false; - @Override - public void streamFloats(FloatConsumer cons) { - float removeValue = removeExpr.getFloat(); - if (removeExpr.exists()) { - baseExpr.streamFloats(value -> { - if (removeValue != value) cons.accept(value); - }); - } else { - baseExpr.streamFloats(cons); + @Override + public int getInt() { + int value = baseExpr.getInt(); + exists = false; + if (baseExpr.exists()) { + exists = value==removeExpr.getInt() ? (removeExpr.exists() ? false : true) : true; + } + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatRemoveFunction extends AbstractFloatValue { - private final FloatValue baseExpr; - private final FloatValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public FloatRemoveFunction(FloatValue baseExpr, FloatValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class LongStreamRemoveFunction extends AbstractLongValueStream { + private final LongValueStream baseExpr; + private final LongValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public float getFloat() { - float value = baseExpr.getFloat(); - exists = false; - if (baseExpr.exists()) { - exists = value==removeExpr.getFloat() ? (removeExpr.exists() ? false : true) : true; + public LongStreamRemoveFunction(LongValueStream baseExpr, LongValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DoubleStreamRemoveFunction extends AbstractDoubleValueStream { - private final DoubleValueStream baseExpr; - private final DoubleValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DoubleStreamRemoveFunction(DoubleValueStream baseExpr, DoubleValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); - } + @Override + public void streamLongs(LongConsumer cons) { + long removeValue = removeExpr.getLong(); + if (removeExpr.exists()) { + baseExpr.streamLongs(value -> { + if (removeValue != value) cons.accept(value); + }); + } else { + baseExpr.streamLongs(cons); + } + } - @Override - public void streamDoubles(DoubleConsumer cons) { - double removeValue = removeExpr.getDouble(); - if (removeExpr.exists()) { - baseExpr.streamDoubles(value -> { - if (removeValue != value) cons.accept(value); - }); - } else { - baseExpr.streamDoubles(cons); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DoubleRemoveFunction extends AbstractDoubleValue { - private final DoubleValue baseExpr; - private final DoubleValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DoubleRemoveFunction(DoubleValue baseExpr, DoubleValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); - } + static class LongRemoveFunction extends AbstractLongValue { + private final LongValue baseExpr; + private final LongValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public LongRemoveFunction(LongValue baseExpr, LongValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + } - boolean exists = false; + boolean exists = false; - @Override - public double getDouble() { - double value = baseExpr.getDouble(); - exists = false; - if (baseExpr.exists()) { - exists = value==removeExpr.getDouble() ? (removeExpr.exists() ? false : true) : true; + @Override + public long getLong() { + long value = baseExpr.getLong(); + exists = false; + if (baseExpr.exists()) { + exists = value==removeExpr.getLong() ? (removeExpr.exists() ? false : true) : true; + } + return value; + } + @Override + public boolean exists() { + return exists; } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DateStreamRemoveFunction extends AbstractDateValueStream { - private final DateValueStream baseExpr; - private final DateValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DateStreamRemoveFunction(DateValueStream baseExpr, DateValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public void streamLongs(LongConsumer cons) { - long removeValue = removeExpr.getLong(); - if (removeExpr.exists()) { - baseExpr.streamLongs(value -> { - if (removeValue != value) cons.accept(value); - }); - } else { - baseExpr.streamLongs(cons); + static class FloatStreamRemoveFunction extends AbstractFloatValueStream { + private final FloatValueStream baseExpr; + private final FloatValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public FloatStreamRemoveFunction(FloatValueStream baseExpr, FloatValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DateRemoveFunction extends AbstractDateValue { - private final DateValue baseExpr; - private final DateValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DateRemoveFunction(DateValue baseExpr, DateValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + @Override + public void streamFloats(FloatConsumer cons) { + float removeValue = removeExpr.getFloat(); + if (removeExpr.exists()) { + baseExpr.streamFloats(value -> { + if (removeValue != value) cons.accept(value); + }); + } else { + baseExpr.streamFloats(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class FloatRemoveFunction extends AbstractFloatValue { + private final FloatValue baseExpr; + private final FloatValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public long getLong() { - long value = baseExpr.getLong(); - exists = false; - if (baseExpr.exists()) { - exists = value==removeExpr.getLong() ? (removeExpr.exists() ? false : true) : true; + public FloatRemoveFunction(FloatValue baseExpr, FloatValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class StringStreamRemoveFunction extends AbstractStringValueStream { - private final StringValueStream baseExpr; - private final StringValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StringStreamRemoveFunction(StringValueStream baseExpr, StringValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); - } + boolean exists = false; - @Override - public void streamStrings(Consumer cons) { - String removeValue = removeExpr.getString(); - if (removeExpr.exists()) { - baseExpr.streamStrings(value -> { - if (!removeValue.equals(value)) cons.accept(value); - }); - } else { - baseExpr.streamStrings(cons); + @Override + public float getFloat() { + float value = baseExpr.getFloat(); + exists = false; + if (baseExpr.exists()) { + exists = value==removeExpr.getFloat() ? (removeExpr.exists() ? false : true) : true; + } + return value; + } + @Override + public boolean exists() { + return exists; } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} -class StringRemoveFunction extends AbstractStringValue { - private final StringValue baseExpr; - private final StringValue removeExpr; - public static final String name = RemoveFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StringRemoveFunction(StringValue baseExpr, StringValue removeExpr) throws SolrException { - this.baseExpr = baseExpr; - this.removeExpr = removeExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + + static class DoubleStreamRemoveFunction extends AbstractDoubleValueStream { + private final DoubleValueStream baseExpr; + private final DoubleValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DoubleStreamRemoveFunction(DoubleValueStream baseExpr, DoubleValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + } + + @Override + public void streamDoubles(DoubleConsumer cons) { + double removeValue = removeExpr.getDouble(); + if (removeExpr.exists()) { + baseExpr.streamDoubles(value -> { + if (removeValue != value) cons.accept(value); + }); + } else { + baseExpr.streamDoubles(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - boolean exists = false; + static class DoubleRemoveFunction extends AbstractDoubleValue { + private final DoubleValue baseExpr; + private final DoubleValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; - @Override - public String getString() { - String value = baseExpr.getString(); - exists = false; - if (baseExpr.exists()) { - exists = value.equals(removeExpr.getString()) ? (removeExpr.exists() ? false : true) : true; + public DoubleRemoveFunction(DoubleValue baseExpr, DoubleValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + } + + boolean exists = false; + + @Override + public double getDouble() { + double value = baseExpr.getDouble(); + exists = false; + if (baseExpr.exists()) { + exists = value==removeExpr.getDouble() ? (removeExpr.exists() ? false : true) : true; + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; } - return value; } - @Override - public boolean exists() { - return exists; + + static class DateStreamRemoveFunction extends AbstractDateValueStream { + private final DateValueStream baseExpr; + private final DateValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DateStreamRemoveFunction(DateValueStream baseExpr, DateValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + } + + @Override + public void streamLongs(LongConsumer cons) { + long removeValue = removeExpr.getLong(); + if (removeExpr.exists()) { + baseExpr.streamLongs(value -> { + if (removeValue != value) cons.accept(value); + }); + } else { + baseExpr.streamLongs(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getName() { - return name; + static class DateRemoveFunction extends AbstractDateValue { + private final DateValue baseExpr; + private final DateValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DateRemoveFunction(DateValue baseExpr, DateValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + } + + boolean exists = false; + + @Override + public long getLong() { + long value = baseExpr.getLong(); + exists = false; + if (baseExpr.exists()) { + exists = value==removeExpr.getLong() ? (removeExpr.exists() ? false : true) : true; + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public String getExpressionStr() { - return exprStr; + + static class StringStreamRemoveFunction extends AbstractStringValueStream { + private final StringValueStream baseExpr; + private final StringValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StringStreamRemoveFunction(StringValueStream baseExpr, StringValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + } + + @Override + public void streamStrings(Consumer cons) { + String removeValue = removeExpr.getString(); + if (removeExpr.exists()) { + baseExpr.streamStrings(value -> { + if (!removeValue.equals(value)) cons.accept(value); + }); + } else { + baseExpr.streamStrings(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } - @Override - public ExpressionType getExpressionType() { - return funcType; + + static class StringRemoveFunction extends AbstractStringValue { + private final StringValue baseExpr; + private final StringValue removeExpr; + public static final String name = RemoveFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StringRemoveFunction(StringValue baseExpr, StringValue removeExpr) throws SolrException { + this.baseExpr = baseExpr; + this.removeExpr = removeExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,removeExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,removeExpr); + } + + boolean exists = false; + + @Override + public String getString() { + String value = baseExpr.getString(); + exists = false; + if (baseExpr.exists()) { + exists = value.equals(removeExpr.getString()) ? (removeExpr.exists() ? false : true) : true; + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ReplaceFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ReplaceFunction.java index 9c4204f5bd97..d7e853554ed9 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ReplaceFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/mapping/ReplaceFunction.java @@ -131,785 +131,802 @@ public class ReplaceFunction { return new StringStreamReplaceFunction((StringValueStream)baseExpr,(StringValue)compExpr,(StringValue)fillExpr); } if (baseExpr instanceof AnalyticsValue) { - return new ValueReplaceFunction((AnalyticsValue)baseExpr,(AnalyticsValue)compExpr,(AnalyticsValue)fillExpr); + return new ValueReplaceFunction((AnalyticsValue)baseExpr,compExpr,fillExpr); } return new StreamReplaceFunction(baseExpr,compExpr,fillExpr); }); -} -class StreamReplaceFunction extends AbstractAnalyticsValueStream { - private final AnalyticsValueStream baseExpr; - private final AnalyticsValue compExpr; - private final AnalyticsValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StreamReplaceFunction(AnalyticsValueStream baseExpr, AnalyticsValue compExpr, AnalyticsValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - @Override - public void streamObjects(Consumer cons) { - Object compValue = compExpr.getObject(); - if (compExpr.exists()) { - final Object fillValue = fillExpr.getObject(); - final boolean fillExists = fillExpr.exists(); - baseExpr.streamObjects(value -> { - if (value.equals(compValue)) { - if (fillExists) { - cons.accept(fillValue); + static class StreamReplaceFunction extends AbstractAnalyticsValueStream { + private final AnalyticsValueStream baseExpr; + private final AnalyticsValue compExpr; + private final AnalyticsValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StreamReplaceFunction(AnalyticsValueStream baseExpr, AnalyticsValue compExpr, AnalyticsValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + @Override + public void streamObjects(Consumer cons) { + Object compValue = compExpr.getObject(); + if (compExpr.exists()) { + final Object fillValue = fillExpr.getObject(); + final boolean fillExists = fillExpr.exists(); + baseExpr.streamObjects(value -> { + if (value.equals(compValue)) { + if (fillExists) { + cons.accept(fillValue); + } + } else { + cons.accept(value); } - } else { - cons.accept(value); - } - }); - } - else { - baseExpr.streamObjects(cons); - } - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class ValueReplaceFunction extends AbstractAnalyticsValue { - private final AnalyticsValue baseExpr; - private final AnalyticsValue compExpr; - private final AnalyticsValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public ValueReplaceFunction(AnalyticsValue baseExpr, AnalyticsValue compExpr, AnalyticsValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - boolean exists = false; - - @Override - public Object getObject() { - Object value = baseExpr.getObject(); - exists = baseExpr.exists(); - Object comp = compExpr.getObject(); - if (exists && compExpr.exists() && value.equals(comp)) { - value = fillExpr.getObject(); - exists = fillExpr.exists(); - } - return value; - } - @Override - public boolean exists() { - return exists; - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanStreamReplaceFunction extends AbstractBooleanValueStream { - private final BooleanValueStream baseExpr; - private final BooleanValue compExpr; - private final BooleanValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public BooleanStreamReplaceFunction(BooleanValueStream baseExpr, BooleanValue compExpr, BooleanValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - @Override - public void streamBooleans(BooleanConsumer cons) { - boolean compValue = compExpr.getBoolean(); - if (compExpr.exists()) { - final boolean fillValue = fillExpr.getBoolean(); - final boolean fillExists = fillExpr.exists(); - baseExpr.streamBooleans(value -> { - if (value == compValue) { - if (fillExists) { - cons.accept(fillValue); + }); + } + else { + baseExpr.streamObjects(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class ValueReplaceFunction extends AbstractAnalyticsValue { + private final AnalyticsValue baseExpr; + private final AnalyticsValue compExpr; + private final AnalyticsValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public ValueReplaceFunction(AnalyticsValue baseExpr, AnalyticsValue compExpr, AnalyticsValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + boolean exists = false; + + @Override + public Object getObject() { + Object value = baseExpr.getObject(); + exists = baseExpr.exists(); + Object comp = compExpr.getObject(); + if (exists && compExpr.exists() && value.equals(comp)) { + value = fillExpr.getObject(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class BooleanStreamReplaceFunction extends AbstractBooleanValueStream { + private final BooleanValueStream baseExpr; + private final BooleanValue compExpr; + private final BooleanValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public BooleanStreamReplaceFunction(BooleanValueStream baseExpr, BooleanValue compExpr, BooleanValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + @Override + public void streamBooleans(BooleanConsumer cons) { + boolean compValue = compExpr.getBoolean(); + if (compExpr.exists()) { + final boolean fillValue = fillExpr.getBoolean(); + final boolean fillExists = fillExpr.exists(); + baseExpr.streamBooleans(value -> { + if (value == compValue) { + if (fillExists) { + cons.accept(fillValue); + } + } else { + cons.accept(value); } - } else { - cons.accept(value); - } - }); - } - else { - baseExpr.streamBooleans(cons); - } - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class BooleanReplaceFunction extends AbstractBooleanValue { - private final BooleanValue baseExpr; - private final BooleanValue compExpr; - private final BooleanValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public BooleanReplaceFunction(BooleanValue baseExpr, BooleanValue compExpr, BooleanValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - boolean exists = false; - - @Override - public boolean getBoolean() { - boolean value = baseExpr.getBoolean(); - exists = baseExpr.exists(); - boolean comp = compExpr.getBoolean(); - if (exists && compExpr.exists() && value == comp) { - value = fillExpr.getBoolean(); - exists = fillExpr.exists(); - } - return value; - } - @Override - public boolean exists() { - return exists; - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class IntStreamReplaceFunction extends AbstractIntValueStream { - private final IntValueStream baseExpr; - private final IntValue compExpr; - private final IntValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public IntStreamReplaceFunction(IntValueStream baseExpr, IntValue compExpr, IntValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - @Override - public void streamInts(IntConsumer cons) { - int compValue = compExpr.getInt(); - if (compExpr.exists()) { - final int fillValue = fillExpr.getInt(); - final boolean fillExists = fillExpr.exists(); - baseExpr.streamInts(value -> { - if (value == compValue) { - if (fillExists) { - cons.accept(fillValue); + }); + } + else { + baseExpr.streamBooleans(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class BooleanReplaceFunction extends AbstractBooleanValue { + private final BooleanValue baseExpr; + private final BooleanValue compExpr; + private final BooleanValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public BooleanReplaceFunction(BooleanValue baseExpr, BooleanValue compExpr, BooleanValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + boolean exists = false; + + @Override + public boolean getBoolean() { + boolean value = baseExpr.getBoolean(); + exists = baseExpr.exists(); + boolean comp = compExpr.getBoolean(); + if (exists && compExpr.exists() && value == comp) { + value = fillExpr.getBoolean(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class IntStreamReplaceFunction extends AbstractIntValueStream { + private final IntValueStream baseExpr; + private final IntValue compExpr; + private final IntValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public IntStreamReplaceFunction(IntValueStream baseExpr, IntValue compExpr, IntValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + @Override + public void streamInts(IntConsumer cons) { + int compValue = compExpr.getInt(); + if (compExpr.exists()) { + final int fillValue = fillExpr.getInt(); + final boolean fillExists = fillExpr.exists(); + baseExpr.streamInts(value -> { + if (value == compValue) { + if (fillExists) { + cons.accept(fillValue); + } + } else { + cons.accept(value); } - } else { - cons.accept(value); - } - }); - } - else { - baseExpr.streamInts(cons); - } - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class IntReplaceFunction extends AbstractIntValue { - private final IntValue baseExpr; - private final IntValue compExpr; - private final IntValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public IntReplaceFunction(IntValue baseExpr, IntValue compExpr, IntValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - boolean exists = false; - - @Override - public int getInt() { - int value = baseExpr.getInt(); - exists = baseExpr.exists(); - int comp = compExpr.getInt(); - if (exists && compExpr.exists() && value == comp) { - value = fillExpr.getInt(); - exists = fillExpr.exists(); - } - return value; - } - @Override - public boolean exists() { - return exists; - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongStreamReplaceFunction extends AbstractLongValueStream { - private final LongValueStream baseExpr; - private final LongValue compExpr; - private final LongValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public LongStreamReplaceFunction(LongValueStream baseExpr, LongValue compExpr, LongValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - @Override - public void streamLongs(LongConsumer cons) { - long compValue = compExpr.getLong(); - if (compExpr.exists()) { - final long fillValue = fillExpr.getLong(); - final boolean fillExists = fillExpr.exists(); - baseExpr.streamLongs(value -> { - if (value == compValue) { - if (fillExists) { - cons.accept(fillValue); + }); + } + else { + baseExpr.streamInts(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class IntReplaceFunction extends AbstractIntValue { + private final IntValue baseExpr; + private final IntValue compExpr; + private final IntValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public IntReplaceFunction(IntValue baseExpr, IntValue compExpr, IntValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + boolean exists = false; + + @Override + public int getInt() { + int value = baseExpr.getInt(); + exists = baseExpr.exists(); + int comp = compExpr.getInt(); + if (exists && compExpr.exists() && value == comp) { + value = fillExpr.getInt(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class LongStreamReplaceFunction extends AbstractLongValueStream { + private final LongValueStream baseExpr; + private final LongValue compExpr; + private final LongValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public LongStreamReplaceFunction(LongValueStream baseExpr, LongValue compExpr, LongValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + @Override + public void streamLongs(LongConsumer cons) { + long compValue = compExpr.getLong(); + if (compExpr.exists()) { + final long fillValue = fillExpr.getLong(); + final boolean fillExists = fillExpr.exists(); + baseExpr.streamLongs(value -> { + if (value == compValue) { + if (fillExists) { + cons.accept(fillValue); + } + } else { + cons.accept(value); } - } else { - cons.accept(value); - } - }); - } - else { - baseExpr.streamLongs(cons); - } - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class LongReplaceFunction extends AbstractLongValue { - private final LongValue baseExpr; - private final LongValue compExpr; - private final LongValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public LongReplaceFunction(LongValue baseExpr, LongValue compExpr, LongValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - boolean exists = false; - - @Override - public long getLong() { - long value = baseExpr.getLong(); - exists = baseExpr.exists(); - long comp = compExpr.getLong(); - if (exists && compExpr.exists() && value == comp) { - value = fillExpr.getLong(); - exists = fillExpr.exists(); - } - return value; - } - @Override - public boolean exists() { - return exists; - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatStreamReplaceFunction extends AbstractFloatValueStream { - private final FloatValueStream baseExpr; - private final FloatValue compExpr; - private final FloatValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public FloatStreamReplaceFunction(FloatValueStream baseExpr, FloatValue compExpr, FloatValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - @Override - public void streamFloats(FloatConsumer cons) { - float compValue = compExpr.getFloat(); - if (compExpr.exists()) { - final float fillValue = fillExpr.getFloat(); - final boolean fillExists = fillExpr.exists(); - baseExpr.streamFloats(value -> { - if (value == compValue) { - if (fillExists) { - cons.accept(fillValue); + }); + } + else { + baseExpr.streamLongs(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class LongReplaceFunction extends AbstractLongValue { + private final LongValue baseExpr; + private final LongValue compExpr; + private final LongValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public LongReplaceFunction(LongValue baseExpr, LongValue compExpr, LongValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + boolean exists = false; + + @Override + public long getLong() { + long value = baseExpr.getLong(); + exists = baseExpr.exists(); + long comp = compExpr.getLong(); + if (exists && compExpr.exists() && value == comp) { + value = fillExpr.getLong(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class FloatStreamReplaceFunction extends AbstractFloatValueStream { + private final FloatValueStream baseExpr; + private final FloatValue compExpr; + private final FloatValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public FloatStreamReplaceFunction(FloatValueStream baseExpr, FloatValue compExpr, FloatValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + @Override + public void streamFloats(FloatConsumer cons) { + float compValue = compExpr.getFloat(); + if (compExpr.exists()) { + final float fillValue = fillExpr.getFloat(); + final boolean fillExists = fillExpr.exists(); + baseExpr.streamFloats(value -> { + if (value == compValue) { + if (fillExists) { + cons.accept(fillValue); + } + } else { + cons.accept(value); } - } else { - cons.accept(value); - } - }); - } - else { - baseExpr.streamFloats(cons); + }); + } + else { + baseExpr.streamFloats(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class FloatReplaceFunction extends AbstractFloatValue { + private final FloatValue baseExpr; + private final FloatValue compExpr; + private final FloatValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public FloatReplaceFunction(FloatValue baseExpr, FloatValue compExpr, FloatValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + boolean exists = false; + + @Override + public float getFloat() { + float value = baseExpr.getFloat(); + exists = baseExpr.exists(); + float comp = compExpr.getFloat(); + if (exists && compExpr.exists() && value == comp) { + value = fillExpr.getFloat(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class DoubleStreamReplaceFunction extends AbstractDoubleValueStream { + private final DoubleValueStream baseExpr; + private final DoubleValue compExpr; + private final DoubleValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DoubleStreamReplaceFunction(DoubleValueStream baseExpr, DoubleValue compExpr, DoubleValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + @Override + public void streamDoubles(DoubleConsumer cons) { + double compValue = compExpr.getDouble(); + if (compExpr.exists()) { + final double fillValue = fillExpr.getDouble(); + final boolean fillExists = fillExpr.exists(); + baseExpr.streamDoubles(value -> { + if (value == compValue) { + if (fillExists) { + cons.accept(fillValue); + } + } else { + cons.accept(value); + } + }); + } + else { + baseExpr.streamDoubles(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class DoubleReplaceFunction extends AbstractDoubleValue { + private final DoubleValue baseExpr; + private final DoubleValue compExpr; + private final DoubleValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DoubleReplaceFunction(DoubleValue baseExpr, DoubleValue compExpr, DoubleValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + boolean exists = false; + + @Override + public double getDouble() { + double value = baseExpr.getDouble(); + exists = baseExpr.exists(); + double comp = compExpr.getDouble(); + if (exists && compExpr.exists() && value == comp) { + value = fillExpr.getDouble(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class DateStreamReplaceFunction extends AbstractDateValueStream { + private final DateValueStream baseExpr; + private final DateValue compExpr; + private final DateValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DateStreamReplaceFunction(DateValueStream baseExpr, DateValue compExpr, DateValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + @Override + public void streamLongs(LongConsumer cons) { + long compValue = compExpr.getLong(); + if (compExpr.exists()) { + final long fillValue = fillExpr.getLong(); + final boolean fillExists = fillExpr.exists(); + baseExpr.streamLongs(value -> { + if (value == compValue) { + if (fillExists) { + cons.accept(fillValue); + } + } else { + cons.accept(value); + } + }); + } + else { + baseExpr.streamLongs(cons); + } + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class DateReplaceFunction extends AbstractDateValue { + private final DateValue baseExpr; + private final DateValue compExpr; + private final DateValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public DateReplaceFunction(DateValue baseExpr, DateValue compExpr, DateValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + boolean exists = false; + + @Override + public long getLong() { + long value = baseExpr.getLong(); + exists = baseExpr.exists(); + long comp = compExpr.getLong(); + if (exists && compExpr.exists() && value == comp) { + value = fillExpr.getLong(); + exists = fillExpr.exists(); + } + return value; + } + @Override + public boolean exists() { + return exists; + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return funcType; + } + } + + static class StringStreamReplaceFunction extends AbstractStringValueStream { + private final StringValueStream baseExpr; + private final StringValue compExpr; + private final StringValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; + + public StringStreamReplaceFunction(StringValueStream baseExpr, StringValue compExpr, StringValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); + } + + @Override + public void streamStrings(Consumer cons) { + String compValue = compExpr.getString(); + if (compExpr.exists()) { + final String fillValue = fillExpr.getString(); + final boolean fillExists = fillExpr.exists(); + baseExpr.streamStrings(value -> { + if (value.equals(compValue)) { + if (fillExists) { + cons.accept(fillValue); + } + } else { + cons.accept(value); + } + }); + } + else { + baseExpr.streamStrings(cons); + } } - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class FloatReplaceFunction extends AbstractFloatValue { - private final FloatValue baseExpr; - private final FloatValue compExpr; - private final FloatValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public FloatReplaceFunction(FloatValue baseExpr, FloatValue compExpr, FloatValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - boolean exists = false; - @Override - public float getFloat() { - float value = baseExpr.getFloat(); - exists = baseExpr.exists(); - float comp = compExpr.getFloat(); - if (exists && compExpr.exists() && value == comp) { - value = fillExpr.getFloat(); - exists = fillExpr.exists(); + @Override + public String getName() { + return name; } - return value; - } - @Override - public boolean exists() { - return exists; - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DoubleStreamReplaceFunction extends AbstractDoubleValueStream { - private final DoubleValueStream baseExpr; - private final DoubleValue compExpr; - private final DoubleValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DoubleStreamReplaceFunction(DoubleValueStream baseExpr, DoubleValue compExpr, DoubleValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - @Override - public void streamDoubles(DoubleConsumer cons) { - double compValue = compExpr.getDouble(); - if (compExpr.exists()) { - final double fillValue = fillExpr.getDouble(); - final boolean fillExists = fillExpr.exists(); - baseExpr.streamDoubles(value -> { - if (value == compValue) { - if (fillExists) { - cons.accept(fillValue); - } - } else { - cons.accept(value); - } - }); + @Override + public String getExpressionStr() { + return exprStr; } - else { - baseExpr.streamDoubles(cons); + @Override + public ExpressionType getExpressionType() { + return funcType; } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DoubleReplaceFunction extends AbstractDoubleValue { - private final DoubleValue baseExpr; - private final DoubleValue compExpr; - private final DoubleValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DoubleReplaceFunction(DoubleValue baseExpr, DoubleValue compExpr, DoubleValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } + static class StringReplaceFunction extends AbstractStringValue { + private final StringValue baseExpr; + private final StringValue compExpr; + private final StringValue fillExpr; + public static final String name = ReplaceFunction.name; + private final String exprStr; + private final ExpressionType funcType; - boolean exists = false; - - @Override - public double getDouble() { - double value = baseExpr.getDouble(); - exists = baseExpr.exists(); - double comp = compExpr.getDouble(); - if (exists && compExpr.exists() && value == comp) { - value = fillExpr.getDouble(); - exists = fillExpr.exists(); + public StringReplaceFunction(StringValue baseExpr, StringValue compExpr, StringValue fillExpr) throws SolrException { + this.baseExpr = baseExpr; + this.compExpr = compExpr; + this.fillExpr = fillExpr; + this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); + this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); } - return value; - } - @Override - public boolean exists() { - return exists; - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DateStreamReplaceFunction extends AbstractDateValueStream { - private final DateValueStream baseExpr; - private final DateValue compExpr; - private final DateValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DateStreamReplaceFunction(DateValueStream baseExpr, DateValue compExpr, DateValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } + boolean exists = false; - @Override - public void streamLongs(LongConsumer cons) { - long compValue = compExpr.getLong(); - if (compExpr.exists()) { - final long fillValue = fillExpr.getLong(); - final boolean fillExists = fillExpr.exists(); - baseExpr.streamLongs(value -> { - if (value == compValue) { - if (fillExists) { - cons.accept(fillValue); - } - } else { - cons.accept(value); - } - }); + @Override + public String getString() { + String value = baseExpr.getString(); + exists = baseExpr.exists(); + String comp = compExpr.getString(); + if (exists && compExpr.exists() && value.equals(comp)) { + value = fillExpr.getString(); + exists = fillExpr.exists(); + } + return value; } - else { - baseExpr.streamLongs(cons); + @Override + public boolean exists() { + return exists; } - } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class DateReplaceFunction extends AbstractDateValue { - private final DateValue baseExpr; - private final DateValue compExpr; - private final DateValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public DateReplaceFunction(DateValue baseExpr, DateValue compExpr, DateValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - boolean exists = false; - - @Override - public long getLong() { - long value = baseExpr.getLong(); - exists = baseExpr.exists(); - long comp = compExpr.getLong(); - if (exists && compExpr.exists() && value == comp) { - value = fillExpr.getLong(); - exists = fillExpr.exists(); + @Override + public String getName() { + return name; } - return value; - } - @Override - public boolean exists() { - return exists; - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} -class StringStreamReplaceFunction extends AbstractStringValueStream { - private final StringValueStream baseExpr; - private final StringValue compExpr; - private final StringValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StringStreamReplaceFunction(StringValueStream baseExpr, StringValue compExpr, StringValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - @Override - public void streamStrings(Consumer cons) { - String compValue = compExpr.getString(); - if (compExpr.exists()) { - final String fillValue = fillExpr.getString(); - final boolean fillExists = fillExpr.exists(); - baseExpr.streamStrings(value -> { - if (value.equals(compValue)) { - if (fillExists) { - cons.accept(fillValue); - } - } else { - cons.accept(value); - } - }); + @Override + public String getExpressionStr() { + return exprStr; } - else { - baseExpr.streamStrings(cons); + @Override + public ExpressionType getExpressionType() { + return funcType; } } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } } -class StringReplaceFunction extends AbstractStringValue { - private final StringValue baseExpr; - private final StringValue compExpr; - private final StringValue fillExpr; - public static final String name = ReplaceFunction.name; - private final String exprStr; - private final ExpressionType funcType; - - public StringReplaceFunction(StringValue baseExpr, StringValue compExpr, StringValue fillExpr) throws SolrException { - this.baseExpr = baseExpr; - this.compExpr = compExpr; - this.fillExpr = fillExpr; - this.exprStr = AnalyticsValueStream.createExpressionString(name,baseExpr,compExpr,fillExpr); - this.funcType = AnalyticsValueStream.determineMappingPhase(exprStr,baseExpr,compExpr,fillExpr); - } - - boolean exists = false; - @Override - public String getString() { - String value = baseExpr.getString(); - exists = baseExpr.exists(); - String comp = compExpr.getString(); - if (exists && compExpr.exists() && value.equals(comp)) { - value = fillExpr.getString(); - exists = fillExpr.exists(); - } - return value; - } - @Override - public boolean exists() { - return exists; - } - - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return funcType; - } -} \ No newline at end of file diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MaxFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MaxFunction.java index 8d142ea3fea5..af7be67e9d20 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MaxFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MaxFunction.java @@ -73,226 +73,233 @@ public class MaxFunction { throw new SolrException(ErrorCode.BAD_REQUEST,"The "+name+" function requires a comparable parameter. " + "Incorrect parameter: "+params[0].getExpressionStr()); }); -} -class IntMaxFunction extends AbstractIntValue implements ReductionFunction { - private IntMaxCollector collector; - public static final String name = MaxFunction.name; - private final String exprStr; - - public IntMaxFunction(IntValueStream param) { - this.collector = new IntMaxCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - } - @Override - public int getInt() { - return collector.exists() ? collector.max() : 0; - } - @Override - public boolean exists() { - return collector.exists(); - } + static class IntMaxFunction extends AbstractIntValue implements ReductionFunction { + private IntMaxCollector collector; + public static final String name = MaxFunction.name; + private final String exprStr; - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (IntMaxCollector)sync.apply(collector); - } + public IntMaxFunction(IntValueStream param) { + this.collector = new IntMaxCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class LongMaxFunction extends AbstractLongValue implements ReductionFunction { - private LongMaxCollector collector; - public static final String name = MaxFunction.name; - private final String exprStr; - - public LongMaxFunction(LongValueStream param) { - this.collector = new LongMaxCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - } + @Override + public int getInt() { + return collector.exists() ? collector.max() : 0; + } + @Override + public boolean exists() { + return collector.exists(); + } - @Override - public long getLong() { - return collector.exists() ? collector.max() : 0; - } - @Override - public boolean exists() { - return collector.exists(); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (IntMaxCollector)sync.apply(collector); + } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (LongMaxCollector)sync.apply(collector); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class FloatMaxFunction extends AbstractFloatValue implements ReductionFunction { - private FloatMaxCollector collector; - public static final String name = MaxFunction.name; - private final String exprStr; - - public FloatMaxFunction(FloatValueStream param) { - this.collector = new FloatMaxCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - } + static class LongMaxFunction extends AbstractLongValue implements ReductionFunction { + private LongMaxCollector collector; + public static final String name = MaxFunction.name; + private final String exprStr; - @Override - public float getFloat() { - return collector.exists() ? collector.max() : 0; - } - @Override - public boolean exists() { - return collector.exists(); - } + public LongMaxFunction(LongValueStream param) { + this.collector = new LongMaxCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (FloatMaxCollector)sync.apply(collector); - } + @Override + public long getLong() { + return collector.exists() ? collector.max() : 0; + } + @Override + public boolean exists() { + return collector.exists(); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class DoubleMaxFunction extends AbstractDoubleValue implements ReductionFunction { - private DoubleMaxCollector collector; - public static final String name = MaxFunction.name; - private final String exprStr; - - public DoubleMaxFunction(DoubleValueStream param) { - this.collector = new DoubleMaxCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (LongMaxCollector)sync.apply(collector); + } - @Override - public double getDouble() { - return collector.exists() ? collector.max() : 0; - } - @Override - public boolean exists() { - return collector.exists(); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (DoubleMaxCollector)sync.apply(collector); - } + static class FloatMaxFunction extends AbstractFloatValue implements ReductionFunction { + private FloatMaxCollector collector; + public static final String name = MaxFunction.name; + private final String exprStr; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class DateMaxFunction extends AbstractDateValue implements ReductionFunction { - private LongMaxCollector collector; - public static final String name = MaxFunction.name; - private final String exprStr; - - public DateMaxFunction(LongValueStream param) { - this.collector = new LongMaxCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - } + public FloatMaxFunction(FloatValueStream param) { + this.collector = new FloatMaxCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } - @Override - public long getLong() { - return collector.exists() ? collector.max() : 0; - } - @Override - public boolean exists() { - return collector.exists(); - } + @Override + public float getFloat() { + return collector.exists() ? collector.max() : 0; + } + @Override + public boolean exists() { + return collector.exists(); + } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (LongMaxCollector)sync.apply(collector); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (FloatMaxCollector)sync.apply(collector); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class StringMaxFunction extends AbstractStringValue implements ReductionFunction { - private StringMaxCollector collector; - public static final String name = MaxFunction.name; - private final String exprStr; - - public StringMaxFunction(StringValueStream param) { - this.collector = new StringMaxCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public String getString() { - return collector.exists() ? collector.max() : null; - } - @Override - public boolean exists() { - return collector.exists(); - } + static class DoubleMaxFunction extends AbstractDoubleValue implements ReductionFunction { + private DoubleMaxCollector collector; + public static final String name = MaxFunction.name; + private final String exprStr; - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (StringMaxCollector)sync.apply(collector); - } + public DoubleMaxFunction(DoubleValueStream param) { + this.collector = new DoubleMaxCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } + + @Override + public double getDouble() { + return collector.exists() ? collector.max() : 0; + } + @Override + public boolean exists() { + return collector.exists(); + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (DoubleMaxCollector)sync.apply(collector); + } - @Override - public String getName() { - return name; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public String getExpressionStr() { - return exprStr; + + static class DateMaxFunction extends AbstractDateValue implements ReductionFunction { + private LongMaxCollector collector; + public static final String name = MaxFunction.name; + private final String exprStr; + + public DateMaxFunction(LongValueStream param) { + this.collector = new LongMaxCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } + + @Override + public long getLong() { + return collector.exists() ? collector.max() : 0; + } + @Override + public boolean exists() { + return collector.exists(); + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (LongMaxCollector)sync.apply(collector); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; + + static class StringMaxFunction extends AbstractStringValue implements ReductionFunction { + private StringMaxCollector collector; + public static final String name = MaxFunction.name; + private final String exprStr; + + public StringMaxFunction(StringValueStream param) { + this.collector = new StringMaxCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } + + @Override + public String getString() { + return collector.exists() ? collector.max() : null; + } + @Override + public boolean exists() { + return collector.exists(); + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (StringMaxCollector)sync.apply(collector); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MedianFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MedianFunction.java index 90d0ea562767..13c262b34e98 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MedianFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MedianFunction.java @@ -60,141 +60,148 @@ public class MedianFunction { } throw new SolrException(ErrorCode.BAD_REQUEST,"The "+name+" function requires a date or numeric parameter."); }); -} -abstract class NumericMedianFunction> extends AbstractDoubleValue implements ReductionFunction { - protected SortedListCollector collector; - public static final String name = MedianFunction.name; - private final String exprStr; - - public NumericMedianFunction(DoubleValueStream param, SortedListCollector collector) { - this.collector = collector; - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - } - protected abstract double collectOrd(int ord); + abstract static class NumericMedianFunction> extends AbstractDoubleValue implements ReductionFunction { + protected SortedListCollector collector; + public static final String name = MedianFunction.name; + private final String exprStr; - @Override - public double getDouble() { - int size = collector.size(); - if (size == 0) { - return 0; + public NumericMedianFunction(DoubleValueStream param, SortedListCollector collector) { + this.collector = collector; + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); } - if (size % 2 == 0) { - return (collectOrd(size/2) + collectOrd(size/2 - 1))/2; - } else { - return collectOrd(size/2); + + protected abstract double collectOrd(int ord); + + @Override + public double getDouble() { + int size = collector.size(); + if (size == 0) { + return 0; + } + if (size % 2 == 0) { + return (collectOrd(size/2) + collectOrd(size/2 - 1))/2; + } else { + return collectOrd(size/2); + } + } + @Override + public boolean exists() { + return collector.size() > 0; } - } - @Override - public boolean exists() { - return collector.size() > 0; - } - @SuppressWarnings("unchecked") - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedListCollector)sync.apply(collector); - collector.calcMedian(); - } + @SuppressWarnings("unchecked") + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedListCollector)sync.apply(collector); + collector.calcMedian(); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class IntMedianFunction extends NumericMedianFunction { - public IntMedianFunction(IntValueStream param) { - super((DoubleValueStream) param, new SortedIntListCollector(param)); + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - protected double collectOrd(int ord) { - return collector.get(ord); - } -} -class LongMedianFunction extends NumericMedianFunction { - public LongMedianFunction(LongValueStream param) { - super((DoubleValueStream) param, new SortedLongListCollector(param)); - } + static class IntMedianFunction extends NumericMedianFunction { + public IntMedianFunction(IntValueStream param) { + super((DoubleValueStream) param, new SortedIntListCollector(param)); + } - @Override - protected double collectOrd(int ord) { - return collector.get(ord); - } -} -class FloatMedianFunction extends NumericMedianFunction { - public FloatMedianFunction(FloatValueStream param) { - super((DoubleValueStream) param, new SortedFloatListCollector(param)); + @Override + protected double collectOrd(int ord) { + return collector.get(ord); + } } - @Override - protected double collectOrd(int ord) { - return collector.get(ord); - } -} -class DoubleMedianFunction extends NumericMedianFunction { - public DoubleMedianFunction(DoubleValueStream param) { - super(param, new SortedDoubleListCollector(param)); - } + static class LongMedianFunction extends NumericMedianFunction { + public LongMedianFunction(LongValueStream param) { + super((DoubleValueStream) param, new SortedLongListCollector(param)); + } - @Override - protected double collectOrd(int ord) { - return collector.get(ord); - } -} -class DateMedianFunction extends AbstractDateValue implements ReductionFunction { - private SortedLongListCollector collector; - public static final String name = MedianFunction.name; - private final String exprStr; - - public DateMedianFunction(DateValueStream param) { - this.collector = new SortedLongListCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + @Override + protected double collectOrd(int ord) { + return collector.get(ord); + } } - @Override - public long getLong() { - int size = collector.size(); - if (size == 0) { - return 0; + static class FloatMedianFunction extends NumericMedianFunction { + public FloatMedianFunction(FloatValueStream param) { + super((DoubleValueStream) param, new SortedFloatListCollector(param)); } - if (size % 2 == 0) { - return (collector.get(size/2) + collector.get(size/2 - 1))/2; - } else { - return collector.get(size/2); + + @Override + protected double collectOrd(int ord) { + return collector.get(ord); } } - @Override - public boolean exists() { - return collector.size() > 0; - } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedLongListCollector)sync.apply(collector); - collector.calcMedian(); - } + static class DoubleMedianFunction extends NumericMedianFunction { + public DoubleMedianFunction(DoubleValueStream param) { + super(param, new SortedDoubleListCollector(param)); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; + @Override + protected double collectOrd(int ord) { + return collector.get(ord); + } } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; + static class DateMedianFunction extends AbstractDateValue implements ReductionFunction { + private SortedLongListCollector collector; + public static final String name = MedianFunction.name; + private final String exprStr; + + public DateMedianFunction(DateValueStream param) { + this.collector = new SortedLongListCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } + + @Override + public long getLong() { + int size = collector.size(); + if (size == 0) { + return 0; + } + if (size % 2 == 0) { + return (collector.get(size/2) + collector.get(size/2 - 1))/2; + } else { + return collector.get(size/2); + } + } + @Override + public boolean exists() { + return collector.size() > 0; + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedLongListCollector)sync.apply(collector); + collector.calcMedian(); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MinFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MinFunction.java index a7f654efaa6c..6298eab0098d 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MinFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/MinFunction.java @@ -73,226 +73,233 @@ public class MinFunction { throw new SolrException(ErrorCode.BAD_REQUEST,"The "+name+" function requires a comparable parameter. " + "Incorrect parameter: "+params[0].getExpressionStr()); }); -} -class IntMinFunction extends AbstractIntValue implements ReductionFunction { - private IntMinCollector collector; - public static final String name = MinFunction.name; - private final String exprStr; - - public IntMinFunction(IntValueStream param) { - this.collector = new IntMinCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - } - @Override - public int getInt() { - return collector.exists() ? collector.min() : 0; - } - @Override - public boolean exists() { - return collector.exists(); - } + static class IntMinFunction extends AbstractIntValue implements ReductionFunction { + private IntMinCollector collector; + public static final String name = MinFunction.name; + private final String exprStr; - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (IntMinCollector)sync.apply(collector); - } + public IntMinFunction(IntValueStream param) { + this.collector = new IntMinCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class LongMinFunction extends AbstractLongValue implements ReductionFunction { - private LongMinCollector collector; - public static final String name = MinFunction.name; - private final String exprStr; - - public LongMinFunction(LongValueStream param) { - this.collector = new LongMinCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - } + @Override + public int getInt() { + return collector.exists() ? collector.min() : 0; + } + @Override + public boolean exists() { + return collector.exists(); + } - @Override - public long getLong() { - return collector.exists() ? collector.min() : 0; - } - @Override - public boolean exists() { - return collector.exists(); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (IntMinCollector)sync.apply(collector); + } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (LongMinCollector)sync.apply(collector); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class FloatMinFunction extends AbstractFloatValue implements ReductionFunction { - private FloatMinCollector collector; - public static final String name = MinFunction.name; - private final String exprStr; - - public FloatMinFunction(FloatValueStream param) { - this.collector = new FloatMinCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - } + static class LongMinFunction extends AbstractLongValue implements ReductionFunction { + private LongMinCollector collector; + public static final String name = MinFunction.name; + private final String exprStr; - @Override - public float getFloat() { - return collector.exists() ? collector.min() : 0; - } - @Override - public boolean exists() { - return collector.exists(); - } + public LongMinFunction(LongValueStream param) { + this.collector = new LongMinCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (FloatMinCollector)sync.apply(collector); - } + @Override + public long getLong() { + return collector.exists() ? collector.min() : 0; + } + @Override + public boolean exists() { + return collector.exists(); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class DoubleMinFunction extends AbstractDoubleValue implements ReductionFunction { - private DoubleMinCollector collector; - public static final String name = MinFunction.name; - private final String exprStr; - - public DoubleMinFunction(DoubleValueStream param) { - this.collector = new DoubleMinCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (LongMinCollector)sync.apply(collector); + } - @Override - public double getDouble() { - return collector.exists() ? collector.min() : 0; - } - @Override - public boolean exists() { - return collector.exists(); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (DoubleMinCollector)sync.apply(collector); - } + static class FloatMinFunction extends AbstractFloatValue implements ReductionFunction { + private FloatMinCollector collector; + public static final String name = MinFunction.name; + private final String exprStr; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class DateMinFunction extends AbstractDateValue implements ReductionFunction { - private LongMinCollector collector; - public static final String name = MinFunction.name; - private final String exprStr; - - public DateMinFunction(LongValueStream param) { - this.collector = new LongMinCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); - } + public FloatMinFunction(FloatValueStream param) { + this.collector = new FloatMinCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } - @Override - public long getLong() { - return collector.exists() ? collector.min() : 0; - } - @Override - public boolean exists() { - return collector.exists(); - } + @Override + public float getFloat() { + return collector.exists() ? collector.min() : 0; + } + @Override + public boolean exists() { + return collector.exists(); + } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (LongMinCollector)sync.apply(collector); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (FloatMinCollector)sync.apply(collector); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class StringMinFunction extends AbstractStringValue implements ReductionFunction { - private StringMinCollector collector; - public static final String name = MinFunction.name; - private final String exprStr; - - public StringMinFunction(StringValueStream param) { - this.collector = new StringMinCollector(param); - this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public String getString() { - return collector.exists() ? collector.min() : null; - } - @Override - public boolean exists() { - return collector.exists(); - } + static class DoubleMinFunction extends AbstractDoubleValue implements ReductionFunction { + private DoubleMinCollector collector; + public static final String name = MinFunction.name; + private final String exprStr; - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (StringMinCollector)sync.apply(collector); - } + public DoubleMinFunction(DoubleValueStream param) { + this.collector = new DoubleMinCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } + + @Override + public double getDouble() { + return collector.exists() ? collector.min() : 0; + } + @Override + public boolean exists() { + return collector.exists(); + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (DoubleMinCollector)sync.apply(collector); + } - @Override - public String getName() { - return name; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public String getExpressionStr() { - return exprStr; + + static class DateMinFunction extends AbstractDateValue implements ReductionFunction { + private LongMinCollector collector; + public static final String name = MinFunction.name; + private final String exprStr; + + public DateMinFunction(LongValueStream param) { + this.collector = new LongMinCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } + + @Override + public long getLong() { + return collector.exists() ? collector.min() : 0; + } + @Override + public boolean exists() { + return collector.exists(); + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (LongMinCollector)sync.apply(collector); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; + + static class StringMinFunction extends AbstractStringValue implements ReductionFunction { + private StringMinCollector collector; + public static final String name = MinFunction.name; + private final String exprStr; + + public StringMinFunction(StringValueStream param) { + this.collector = new StringMinCollector(param); + this.exprStr = AnalyticsValueStream.createExpressionString(name,param); + } + + @Override + public String getString() { + return collector.exists() ? collector.min() : null; + } + @Override + public boolean exists() { + return collector.exists(); + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (StringMinCollector)sync.apply(collector); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/OrdinalFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/OrdinalFunction.java index e6ed5723ac63..a5aeed6bd2d6 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/OrdinalFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/OrdinalFunction.java @@ -87,274 +87,281 @@ protected static String createOrdinalExpressionString(AnalyticsValueStream param ord, param.getExpressionStr()); } -} -class IntOrdinalFunction extends AbstractIntValue implements ReductionFunction { - private SortedIntListCollector collector; - private int ordinal; - public static final String name = OrdinalFunction.name; - private final String exprStr; - - public IntOrdinalFunction(IntValueStream param, int ordinal) { - this.collector = new SortedIntListCollector(param); - this.ordinal = ordinal; - this.exprStr = OrdinalFunction.createOrdinalExpressionString(param, ordinal); - } - @Override - public int getInt() { - int size = collector.size(); - if (ordinal > 0) { - return ordinal <= size ? collector.get(ordinal - 1) : 0; - } else { - return (ordinal * -1) <= size ? collector.get(size + ordinal) : 0; + static class IntOrdinalFunction extends AbstractIntValue implements ReductionFunction { + private SortedIntListCollector collector; + private int ordinal; + public static final String name = OrdinalFunction.name; + private final String exprStr; + + public IntOrdinalFunction(IntValueStream param, int ordinal) { + this.collector = new SortedIntListCollector(param); + this.ordinal = ordinal; + this.exprStr = createOrdinalExpressionString(param, ordinal); } - } - @Override - public boolean exists() { - return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); - } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedIntListCollector)sync.apply(collector); - collector.calcOrdinal(ordinal); - } + @Override + public int getInt() { + int size = collector.size(); + if (ordinal > 0) { + return ordinal <= size ? collector.get(ordinal - 1) : 0; + } else { + return (ordinal * -1) <= size ? collector.get(size + ordinal) : 0; + } + } + @Override + public boolean exists() { + return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class LongOrdinalFunction extends AbstractLongValue implements ReductionFunction { - private SortedLongListCollector collector; - private int ordinal; - public static final String name = OrdinalFunction.name; - private final String exprStr; - - public LongOrdinalFunction(LongValueStream param, int ordinal) { - this.collector = new SortedLongListCollector(param); - this.ordinal = ordinal; - this.exprStr = OrdinalFunction.createOrdinalExpressionString(param, ordinal); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedIntListCollector)sync.apply(collector); + collector.calcOrdinal(ordinal); + } - @Override - public long getLong() { - int size = collector.size(); - if (ordinal > 0) { - return ordinal <= size ? collector.get(ordinal - 1) : 0; - } else { - return (ordinal * -1) <= size ? collector.get(size + ordinal) : 0; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; } - } - @Override - public boolean exists() { - return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedLongListCollector)sync.apply(collector); - collector.calcOrdinal(ordinal); - } + static class LongOrdinalFunction extends AbstractLongValue implements ReductionFunction { + private SortedLongListCollector collector; + private int ordinal; + public static final String name = OrdinalFunction.name; + private final String exprStr; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class FloatOrdinalFunction extends AbstractFloatValue implements ReductionFunction { - private SortedFloatListCollector collector; - private int ordinal; - public static final String name = OrdinalFunction.name; - private final String exprStr; - - public FloatOrdinalFunction(FloatValueStream param, int ordinal) { - this.collector = new SortedFloatListCollector(param); - this.ordinal = ordinal; - this.exprStr = OrdinalFunction.createOrdinalExpressionString(param, ordinal); - } + public LongOrdinalFunction(LongValueStream param, int ordinal) { + this.collector = new SortedLongListCollector(param); + this.ordinal = ordinal; + this.exprStr = createOrdinalExpressionString(param, ordinal); + } - @Override - public float getFloat() { - int size = collector.size(); - if (ordinal > 0) { - return ordinal <= size ? collector.get(ordinal - 1) : 0; - } else { - return (ordinal * -1) <= size ? collector.get(size + ordinal) : 0; + @Override + public long getLong() { + int size = collector.size(); + if (ordinal > 0) { + return ordinal <= size ? collector.get(ordinal - 1) : 0; + } else { + return (ordinal * -1) <= size ? collector.get(size + ordinal) : 0; + } + } + @Override + public boolean exists() { + return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); } - } - @Override - public boolean exists() { - return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); - } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedFloatListCollector)sync.apply(collector); - collector.calcOrdinal(ordinal); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedLongListCollector)sync.apply(collector); + collector.calcOrdinal(ordinal); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class DoubleOrdinalFunction extends AbstractDoubleValue implements ReductionFunction { - private SortedDoubleListCollector collector; - private int ordinal; - public static final String name = OrdinalFunction.name; - private final String exprStr; - - public DoubleOrdinalFunction(DoubleValueStream param, int ordinal) { - this.collector = new SortedDoubleListCollector(param); - this.ordinal = ordinal; - this.exprStr = OrdinalFunction.createOrdinalExpressionString(param, ordinal); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public double getDouble() { - int size = collector.size(); - if (ordinal > 0) { - return ordinal <= size ? collector.get(ordinal - 1) : 0; - } else { - return (ordinal * -1) <= size ? collector.get(size + ordinal) : 0; + static class FloatOrdinalFunction extends AbstractFloatValue implements ReductionFunction { + private SortedFloatListCollector collector; + private int ordinal; + public static final String name = OrdinalFunction.name; + private final String exprStr; + + public FloatOrdinalFunction(FloatValueStream param, int ordinal) { + this.collector = new SortedFloatListCollector(param); + this.ordinal = ordinal; + this.exprStr = createOrdinalExpressionString(param, ordinal); } - } - @Override - public boolean exists() { - return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); - } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedDoubleListCollector)sync.apply(collector); - collector.calcOrdinal(ordinal); - } + @Override + public float getFloat() { + int size = collector.size(); + if (ordinal > 0) { + return ordinal <= size ? collector.get(ordinal - 1) : 0; + } else { + return (ordinal * -1) <= size ? collector.get(size + ordinal) : 0; + } + } + @Override + public boolean exists() { + return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class DateOrdinalFunction extends AbstractDateValue implements ReductionFunction { - private SortedLongListCollector collector; - private int ordinal; - public static final String name = OrdinalFunction.name; - private final String exprStr; - - public DateOrdinalFunction(LongValueStream param, int ordinal) { - this.collector = new SortedLongListCollector(param); - this.ordinal = ordinal; - this.exprStr = OrdinalFunction.createOrdinalExpressionString(param, ordinal); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedFloatListCollector)sync.apply(collector); + collector.calcOrdinal(ordinal); + } - @Override - public long getLong() { - int size = collector.size(); - if (ordinal > 0) { - return ordinal <= size ? collector.get(ordinal - 1) : 0; - } else { - return (ordinal * -1) <= size ? collector.get(size + ordinal) : 0; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; } - } - @Override - public boolean exists() { - return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedLongListCollector)sync.apply(collector); - collector.calcOrdinal(ordinal); - } + static class DoubleOrdinalFunction extends AbstractDoubleValue implements ReductionFunction { + private SortedDoubleListCollector collector; + private int ordinal; + public static final String name = OrdinalFunction.name; + private final String exprStr; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class StringOrdinalFunction extends AbstractStringValue implements ReductionFunction { - private SortedStringListCollector collector; - private int ordinal; - public static final String name = OrdinalFunction.name; - private final String exprStr; - - public StringOrdinalFunction(StringValueStream param, int ordinal) { - this.collector = new SortedStringListCollector(param); - this.ordinal = ordinal; - this.exprStr = OrdinalFunction.createOrdinalExpressionString(param, ordinal); - } + public DoubleOrdinalFunction(DoubleValueStream param, int ordinal) { + this.collector = new SortedDoubleListCollector(param); + this.ordinal = ordinal; + this.exprStr = createOrdinalExpressionString(param, ordinal); + } - @Override - public String getString() { - int size = collector.size(); - if (ordinal > 0) { - return ordinal <= size ? collector.get(ordinal - 1) : null; - } else { - return (ordinal * -1) <= size ? collector.get(size + ordinal) : null; + @Override + public double getDouble() { + int size = collector.size(); + if (ordinal > 0) { + return ordinal <= size ? collector.get(ordinal - 1) : 0; + } else { + return (ordinal * -1) <= size ? collector.get(size + ordinal) : 0; + } + } + @Override + public boolean exists() { + return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); } - } - @Override - public boolean exists() { - return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); - } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedStringListCollector)sync.apply(collector); - collector.calcOrdinal(ordinal); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedDoubleListCollector)sync.apply(collector); + collector.calcOrdinal(ordinal); + } - @Override - public String getName() { - return name; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public String getExpressionStr() { - return exprStr; + + static class DateOrdinalFunction extends AbstractDateValue implements ReductionFunction { + private SortedLongListCollector collector; + private int ordinal; + public static final String name = OrdinalFunction.name; + private final String exprStr; + + public DateOrdinalFunction(LongValueStream param, int ordinal) { + this.collector = new SortedLongListCollector(param); + this.ordinal = ordinal; + this.exprStr = createOrdinalExpressionString(param, ordinal); + } + + @Override + public long getLong() { + int size = collector.size(); + if (ordinal > 0) { + return ordinal <= size ? collector.get(ordinal - 1) : 0; + } else { + return (ordinal * -1) <= size ? collector.get(size + ordinal) : 0; + } + } + @Override + public boolean exists() { + return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedLongListCollector)sync.apply(collector); + collector.calcOrdinal(ordinal); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; + + static class StringOrdinalFunction extends AbstractStringValue implements ReductionFunction { + private SortedStringListCollector collector; + private int ordinal; + public static final String name = OrdinalFunction.name; + private final String exprStr; + + public StringOrdinalFunction(StringValueStream param, int ordinal) { + this.collector = new SortedStringListCollector(param); + this.ordinal = ordinal; + this.exprStr = createOrdinalExpressionString(param, ordinal); + } + + @Override + public String getString() { + int size = collector.size(); + if (ordinal > 0) { + return ordinal <= size ? collector.get(ordinal - 1) : null; + } else { + return (ordinal * -1) <= size ? collector.get(size + ordinal) : null; + } + } + @Override + public boolean exists() { + return (ordinal > 0 ? ordinal : (ordinal * -1)) <= collector.size(); + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedStringListCollector)sync.apply(collector); + collector.calcOrdinal(ordinal); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/PercentileFunction.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/PercentileFunction.java index a5ff6be8a64c..981ee42ff8c2 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/PercentileFunction.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/function/reduction/PercentileFunction.java @@ -88,250 +88,257 @@ protected static String createPercentileExpressionString(AnalyticsValueStream pa perc, param.getExpressionStr()); } -} -class IntPercentileFunction extends AbstractIntValue implements ReductionFunction { - private SortedIntListCollector collector; - private double percentile; - public static final String name = PercentileFunction.name; - private final String exprStr; - - public IntPercentileFunction(IntValueStream param, double percentile) { - this.collector = new SortedIntListCollector(param); - this.percentile = percentile; - this.exprStr = PercentileFunction.createPercentileExpressionString(param, percentile); - } - @Override - public int getInt() { - int size = collector.size(); - return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : 0; - } - @Override - public boolean exists() { - return collector.size() > 0; - } + static class IntPercentileFunction extends AbstractIntValue implements ReductionFunction { + private SortedIntListCollector collector; + private double percentile; + public static final String name = PercentileFunction.name; + private final String exprStr; - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedIntListCollector)sync.apply(collector); - collector.calcPercentile(percentile); - } + public IntPercentileFunction(IntValueStream param, double percentile) { + this.collector = new SortedIntListCollector(param); + this.percentile = percentile; + this.exprStr = createPercentileExpressionString(param, percentile); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class LongPercentileFunction extends AbstractLongValue implements ReductionFunction { - private SortedLongListCollector collector; - private double percentile; - public static final String name = PercentileFunction.name; - private final String exprStr; - - public LongPercentileFunction(LongValueStream param, double percentile) { - this.collector = new SortedLongListCollector(param); - this.percentile = percentile; - this.exprStr = PercentileFunction.createPercentileExpressionString(param, percentile); - } + @Override + public int getInt() { + int size = collector.size(); + return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : 0; + } + @Override + public boolean exists() { + return collector.size() > 0; + } - @Override - public long getLong() { - int size = collector.size(); - return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : 0; - } - @Override - public boolean exists() { - return collector.size() > 0; - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedIntListCollector)sync.apply(collector); + collector.calcPercentile(percentile); + } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedLongListCollector)sync.apply(collector); - collector.calcPercentile(percentile); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class FloatPercentileFunction extends AbstractFloatValue implements ReductionFunction { - private SortedFloatListCollector collector; - private double percentile; - public static final String name = PercentileFunction.name; - private final String exprStr; - - public FloatPercentileFunction(FloatValueStream param, double percentile) { - this.collector = new SortedFloatListCollector(param); - this.percentile = percentile; - this.exprStr = PercentileFunction.createPercentileExpressionString(param, percentile); - } + static class LongPercentileFunction extends AbstractLongValue implements ReductionFunction { + private SortedLongListCollector collector; + private double percentile; + public static final String name = PercentileFunction.name; + private final String exprStr; - @Override - public float getFloat() { - int size = collector.size(); - return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : 0; - } - @Override - public boolean exists() { - return collector.size() > 0; - } + public LongPercentileFunction(LongValueStream param, double percentile) { + this.collector = new SortedLongListCollector(param); + this.percentile = percentile; + this.exprStr = createPercentileExpressionString(param, percentile); + } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedFloatListCollector)sync.apply(collector); - collector.calcPercentile(percentile); - } + @Override + public long getLong() { + int size = collector.size(); + return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : 0; + } + @Override + public boolean exists() { + return collector.size() > 0; + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class DoublePercentileFunction extends AbstractDoubleValue implements ReductionFunction { - private SortedDoubleListCollector collector; - private double percentile; - public static final String name = PercentileFunction.name; - private final String exprStr; - - public DoublePercentileFunction(DoubleValueStream param, double percentile) { - this.collector = new SortedDoubleListCollector(param); - this.percentile = percentile; - this.exprStr = PercentileFunction.createPercentileExpressionString(param, percentile); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedLongListCollector)sync.apply(collector); + collector.calcPercentile(percentile); + } - @Override - public double getDouble() { - int size = collector.size(); - return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : 0; - } - @Override - public boolean exists() { - return collector.size() > 0; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedDoubleListCollector)sync.apply(collector); - collector.calcPercentile(percentile); - } + static class FloatPercentileFunction extends AbstractFloatValue implements ReductionFunction { + private SortedFloatListCollector collector; + private double percentile; + public static final String name = PercentileFunction.name; + private final String exprStr; - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class DatePercentileFunction extends AbstractDateValue implements ReductionFunction { - private SortedLongListCollector collector; - private double percentile; - public static final String name = PercentileFunction.name; - private final String exprStr; - - public DatePercentileFunction(LongValueStream param, double percentile) { - this.collector = new SortedLongListCollector(param); - this.percentile = percentile; - this.exprStr = PercentileFunction.createPercentileExpressionString(param, percentile); - } + public FloatPercentileFunction(FloatValueStream param, double percentile) { + this.collector = new SortedFloatListCollector(param); + this.percentile = percentile; + this.exprStr = createPercentileExpressionString(param, percentile); + } - @Override - public long getLong() { - int size = collector.size(); - return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : 0; - } - @Override - public boolean exists() { - return collector.size() > 0; - } + @Override + public float getFloat() { + int size = collector.size(); + return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : 0; + } + @Override + public boolean exists() { + return collector.size() > 0; + } - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedLongListCollector)sync.apply(collector); - collector.calcPercentile(percentile); - } + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedFloatListCollector)sync.apply(collector); + collector.calcPercentile(percentile); + } - @Override - public String getName() { - return name; - } - @Override - public String getExpressionStr() { - return exprStr; - } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; - } -} -class StringPercentileFunction extends AbstractStringValue implements ReductionFunction { - private SortedStringListCollector collector; - private double percentile; - public static final String name = PercentileFunction.name; - private final String exprStr; - - public StringPercentileFunction(StringValueStream param, double percentile) { - this.collector = new SortedStringListCollector(param); - this.percentile = percentile; - this.exprStr = PercentileFunction.createPercentileExpressionString(param, percentile); + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public String getString() { - int size = collector.size(); - return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : null; - } - @Override - public boolean exists() { - return collector.size() > 0; - } + static class DoublePercentileFunction extends AbstractDoubleValue implements ReductionFunction { + private SortedDoubleListCollector collector; + private double percentile; + public static final String name = PercentileFunction.name; + private final String exprStr; - @Override - public void synchronizeDataCollectors(UnaryOperator> sync) { - collector = (SortedStringListCollector)sync.apply(collector); - collector.calcPercentile(percentile); - } + public DoublePercentileFunction(DoubleValueStream param, double percentile) { + this.collector = new SortedDoubleListCollector(param); + this.percentile = percentile; + this.exprStr = createPercentileExpressionString(param, percentile); + } + + @Override + public double getDouble() { + int size = collector.size(); + return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : 0; + } + @Override + public boolean exists() { + return collector.size() > 0; + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedDoubleListCollector)sync.apply(collector); + collector.calcPercentile(percentile); + } - @Override - public String getName() { - return name; + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public String getExpressionStr() { - return exprStr; + + static class DatePercentileFunction extends AbstractDateValue implements ReductionFunction { + private SortedLongListCollector collector; + private double percentile; + public static final String name = PercentileFunction.name; + private final String exprStr; + + public DatePercentileFunction(LongValueStream param, double percentile) { + this.collector = new SortedLongListCollector(param); + this.percentile = percentile; + this.exprStr = createPercentileExpressionString(param, percentile); + } + + @Override + public long getLong() { + int size = collector.size(); + return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : 0; + } + @Override + public boolean exists() { + return collector.size() > 0; + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedLongListCollector)sync.apply(collector); + collector.calcPercentile(percentile); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } - @Override - public ExpressionType getExpressionType() { - return ExpressionType.REDUCTION; + + static class StringPercentileFunction extends AbstractStringValue implements ReductionFunction { + private SortedStringListCollector collector; + private double percentile; + public static final String name = PercentileFunction.name; + private final String exprStr; + + public StringPercentileFunction(StringValueStream param, double percentile) { + this.collector = new SortedStringListCollector(param); + this.percentile = percentile; + this.exprStr = createPercentileExpressionString(param, percentile); + } + + @Override + public String getString() { + int size = collector.size(); + return size > 0 ? collector.get((int) Math.round(percentile * size - .5)) : null; + } + @Override + public boolean exists() { + return collector.size() > 0; + } + + @Override + public void synchronizeDataCollectors(UnaryOperator> sync) { + collector = (SortedStringListCollector)sync.apply(collector); + collector.calcPercentile(percentile); + } + + @Override + public String getName() { + return name; + } + @Override + public String getExpressionStr() { + return exprStr; + } + @Override + public ExpressionType getExpressionType() { + return ExpressionType.REDUCTION; + } } -} \ No newline at end of file +} + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/FacetRangeGenerator.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/FacetRangeGenerator.java index c0ec919ca662..ed842083d158 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/FacetRangeGenerator.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/FacetRangeGenerator.java @@ -273,80 +273,84 @@ public static FacetRangeGenerator> create(RangeFacet ran } return calc; } -} -class IntegerFacetRangeGenerator extends FacetRangeGenerator { - public IntegerFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); } - @Override - protected Integer parseVal(String rawval) { - return Integer.valueOf(rawval); - } - @Override - public Integer parseAndAddGap(Integer value, String gap) { - return value.intValue() + Integer.valueOf(gap).intValue(); - } -} -class LongFacetRangeGenerator extends FacetRangeGenerator { - public LongFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); } + static class IntegerFacetRangeGenerator extends FacetRangeGenerator { + public IntegerFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); } - @Override - protected Long parseVal(String rawval) { - return Long.valueOf(rawval); - } - @Override - public Long parseAndAddGap(Long value, String gap) { - return value.longValue() + Long.valueOf(gap).longValue(); + @Override + protected Integer parseVal(String rawval) { + return Integer.valueOf(rawval); + } + @Override + public Integer parseAndAddGap(Integer value, String gap) { + return value.intValue() + Integer.valueOf(gap).intValue(); + } } -} -class FloatFacetRangeGenerator extends FacetRangeGenerator { - public FloatFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); } + static class LongFacetRangeGenerator extends FacetRangeGenerator { + public LongFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); } - @Override - protected Float parseVal(String rawval) { - return Float.valueOf(rawval); - } - @Override - public Float parseAndAddGap(Float value, String gap) { - return value.floatValue() + Float.valueOf(gap).floatValue(); + @Override + protected Long parseVal(String rawval) { + return Long.valueOf(rawval); + } + @Override + public Long parseAndAddGap(Long value, String gap) { + return value.longValue() + Long.valueOf(gap).longValue(); + } } -} -class DoubleFacetRangeGenerator extends FacetRangeGenerator { - public DoubleFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); } + static class FloatFacetRangeGenerator extends FacetRangeGenerator { + public FloatFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); } - @Override - protected Double parseVal(String rawval) { - return Double.valueOf(rawval); - } - @Override - public Double parseAndAddGap(Double value, String gap) { - return value.doubleValue() + Double.valueOf(gap).doubleValue(); - } -} -class DateFacetRangeGenerator extends FacetRangeGenerator { - private final Date now; - public DateFacetRangeGenerator(final RangeFacet rangeFacet, final Date now) { - super(rangeFacet); - this.now = now; + @Override + protected Float parseVal(String rawval) { + return Float.valueOf(rawval); + } + @Override + public Float parseAndAddGap(Float value, String gap) { + return value.floatValue() + Float.valueOf(gap).floatValue(); + } } - @Override - public String formatValue(Date val) { - return val.toInstant().toString(); - } - @Override - protected Date parseVal(String rawval) { - return DateMathParser.parseMath(now, rawval); - } - @Override - protected Object parseGap(final String rawval) { - return rawval; + static class DoubleFacetRangeGenerator extends FacetRangeGenerator { + public DoubleFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); } + + @Override + protected Double parseVal(String rawval) { + return Double.valueOf(rawval); + } + @Override + public Double parseAndAddGap(Double value, String gap) { + return value.doubleValue() + Double.valueOf(gap).doubleValue(); + } } - @Override - public Date parseAndAddGap(Date value, String gap) throws java.text.ParseException { - final DateMathParser dmp = new DateMathParser(); - dmp.setNow(value); - return dmp.parseMath(gap); + + static class DateFacetRangeGenerator extends FacetRangeGenerator { + private final Date now; + public DateFacetRangeGenerator(final RangeFacet rangeFacet, final Date now) { + super(rangeFacet); + this.now = now; + } + + @Override + public String formatValue(Date val) { + return val.toInstant().toString(); + } + @Override + protected Date parseVal(String rawval) { + return DateMathParser.parseMath(now, rawval); + } + @Override + protected Object parseGap(final String rawval) { + return rawval; + } + @Override + public Date parseAndAddGap(Date value, String gap) throws java.text.ParseException { + final DateMathParser dmp = new DateMathParser(); + dmp.setNow(value); + return dmp.parseMath(gap); + } } } + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/MedianCalculator.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/MedianCalculator.java index 541cff07ca7b..1cc7a36091cb 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/MedianCalculator.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/MedianCalculator.java @@ -53,7 +53,7 @@ private static > void select(List list, double place, split = split(list, begin, end); } - Point result = partition(list, begin, end, split); + OrdinalCalculator.Point result = partition(list, begin, end, split); if (place < result.low) { select(list, place, begin, result.low); @@ -86,7 +86,7 @@ private static > T split(List list, int begin, int en return list.get(recursiveSize / 2 + begin); } - private static > Point partition(List list, int begin, int end, T indexElement) { + private static > OrdinalCalculator.Point partition(List list, int begin, int end, T indexElement) { T temp; int left, right; for (left = begin, right = end; left < right; left++, right--) { @@ -126,6 +126,6 @@ private static > Point partition(List list, int begin } rightMove++; } - return new Point(left, right); + return new OrdinalCalculator.Point(left, right); } } diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OrdinalCalculator.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OrdinalCalculator.java index 10975f6e0a56..5a28f4ae0e94 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OrdinalCalculator.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OrdinalCalculator.java @@ -157,14 +157,15 @@ private static > Point partition(List list, int begin } return new Point(left, right); } -} -class Point { - public int low; - public int high; + static class Point { + public int low; + public int high; - public Point(int low, int high) { - this.low = low; - this.high = high; + public Point(int low, int high) { + this.low = low; + this.high = high; + } } } + diff --git a/solr/contrib/analytics/src/java/org/apache/solr/handler/AnalyticsHandler.java b/solr/contrib/analytics/src/java/org/apache/solr/handler/AnalyticsHandler.java index b289d1c9638e..9ffbaf483e06 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/handler/AnalyticsHandler.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/handler/AnalyticsHandler.java @@ -28,8 +28,6 @@ import org.apache.solr.analytics.ExpressionFactory; import org.apache.solr.analytics.TimeExceededStubException; import org.apache.solr.analytics.stream.AnalyticsShardResponseParser; -import org.apache.solr.client.solrj.io.ModelCache; -import org.apache.solr.client.solrj.io.SolrClientCache; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.SolrParams; @@ -61,9 +59,6 @@ public class AnalyticsHandler extends RequestHandlerBase implements SolrCoreAwar public static final String NAME = "/analytics"; private IndexSchema indexSchema; - static SolrClientCache clientCache = new SolrClientCache(); - static ModelCache modelCache = null; - @Override public PermissionNameProvider.Name getPermissionName(AuthorizationContext request) { return PermissionNameProvider.Name.READ_PERM; @@ -72,7 +67,6 @@ public PermissionNameProvider.Name getPermissionName(AuthorizationContext reques @Override public void inform(SolrCore core) { core.registerResponseWriter(AnalyticsShardResponseWriter.NAME, new AnalyticsShardResponseWriter()); - indexSchema = core.getLatestSchema(); AnalyticsRequestParser.init(); } diff --git a/solr/contrib/analytics/src/java/org/apache/solr/handler/component/AnalyticsComponent.java b/solr/contrib/analytics/src/java/org/apache/solr/handler/component/AnalyticsComponent.java index 5e6760424959..5248181b76bd 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/handler/component/AnalyticsComponent.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/handler/component/AnalyticsComponent.java @@ -36,7 +36,7 @@ public class AnalyticsComponent extends SearchComponent { public static final String COMPONENT_NAME = "analytics"; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { AnalyticsRequestParser.init(); } diff --git a/solr/contrib/analytics/src/java/org/apache/solr/response/AnalyticsShardResponseWriter.java b/solr/contrib/analytics/src/java/org/apache/solr/response/AnalyticsShardResponseWriter.java index 67a71d01463e..ac796a68974d 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/response/AnalyticsShardResponseWriter.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/response/AnalyticsShardResponseWriter.java @@ -55,7 +55,7 @@ public String getContentType(SolrQueryRequest request, SolrQueryResponse respons } @Override - public void init(NamedList args) {} + public void init(@SuppressWarnings({"rawtypes"})NamedList args) {} /** * Manages the streaming of analytics reduction data if no exception occurred. diff --git a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/LegacyNoFacetCloudTest.java b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/LegacyNoFacetCloudTest.java index dbc9522aacee..76ff31b8e96d 100644 --- a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/LegacyNoFacetCloudTest.java +++ b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/LegacyNoFacetCloudTest.java @@ -131,22 +131,22 @@ public void sumTest() throws Exception { //Int Double intResult = getValue(response, "sr", "int_id"); - Double intTest = (Double)calculateNumberStat(intTestStart, "sum"); + Double intTest = calculateNumberStat(intTestStart, "sum"); assertEquals(responseStr, intResult,intTest); //Long Double longResult = getValue(response, "sr", "long_ld"); - Double longTest = (Double)calculateNumberStat(longTestStart, "sum"); + Double longTest = calculateNumberStat(longTestStart, "sum"); assertEquals(responseStr, longResult,longTest); //Float Double floatResult = getValue(response, "sr", "float_fd"); - Double floatTest = (Double)calculateNumberStat(floatTestStart, "sum"); + Double floatTest = calculateNumberStat(floatTestStart, "sum"); assertEquals(responseStr, floatResult,floatTest); //Double Double doubleResult = getValue(response, "sr", "double_dd"); - Double doubleTest = (Double) calculateNumberStat(doubleTestStart, "sum"); + Double doubleTest = calculateNumberStat(doubleTestStart, "sum"); assertEquals(responseStr, doubleResult,doubleTest); } @@ -163,22 +163,22 @@ public void meanTest() throws Exception { //Int Double intResult = getValue(response, "mr", "int_id"); - Double intTest = (Double)calculateNumberStat(intTestStart, "mean"); + Double intTest = calculateNumberStat(intTestStart, "mean"); assertEquals(responseStr, intResult,intTest); //Long Double longResult = getValue(response, "mr", "long_ld"); - Double longTest = (Double)calculateNumberStat(longTestStart, "mean"); + Double longTest = calculateNumberStat(longTestStart, "mean"); assertEquals(responseStr, longResult,longTest); //Float Double floatResult = getValue(response, "mr", "float_fd"); - Double floatTest = (Double)calculateNumberStat(floatTestStart, "mean"); + Double floatTest = calculateNumberStat(floatTestStart, "mean"); assertEquals(responseStr, floatResult,floatTest); //Double Double doubleResult = getValue(response, "mr", "double_dd"); - Double doubleTest = (Double)calculateNumberStat(doubleTestStart, "mean"); + Double doubleTest = calculateNumberStat(doubleTestStart, "mean"); assertEquals(responseStr, doubleResult,doubleTest); } @@ -195,23 +195,23 @@ public void stddevTest() throws Exception { //Int Double intResult = getValue(response, "str", "int_id"); - Double intTest = (Double)calculateNumberStat(intTestStart, "stddev"); + Double intTest = calculateNumberStat(intTestStart, "stddev"); assertEquals(responseStr, intResult, intTest, 0.00000000001); //Long Double longResult = getValue(response, "str", "long_ld"); - Double longTest = (Double)calculateNumberStat(longTestStart, "stddev"); + Double longTest = calculateNumberStat(longTestStart, "stddev"); assertEquals(responseStr, longResult, longTest, 0.00000000001); //Float Double floatResult = getValue(response, "str", "float_fd"); - Double floatTest = (Double)calculateNumberStat(floatTestStart, "stddev"); + Double floatTest = calculateNumberStat(floatTestStart, "stddev"); assertEquals(responseStr, floatResult, floatTest, 0.00000000001); //Double Double doubleResult = getValue(response, "str", "double_dd"); - Double doubleTest = (Double)calculateNumberStat(doubleTestStart, "stddev"); + Double doubleTest = calculateNumberStat(doubleTestStart, "stddev"); assertEquals(responseStr, doubleResult, doubleTest, 0.00000000001); } @@ -229,22 +229,22 @@ public void medianTest() throws Exception { //Int Double intResult = getValue(response, "medr", "int_id"); - Double intTest = (Double)calculateNumberStat(intTestStart, "median"); + Double intTest = calculateNumberStat(intTestStart, "median"); assertEquals(responseStr, intResult,intTest); //Long Double longResult = getValue(response, "medr", "long_ld"); - Double longTest = (Double)calculateNumberStat(longTestStart, "median"); + Double longTest = calculateNumberStat(longTestStart, "median"); assertEquals(responseStr, longResult,longTest); //Float Double floatResult = getValue(response, "medr", "float_fd"); - Double floatTest = (Double)calculateNumberStat(floatTestStart, "median"); + Double floatTest = calculateNumberStat(floatTestStart, "median"); assertEquals(responseStr, floatResult,floatTest); //Double Double doubleResult = getValue(response, "medr", "double_dd"); - Double doubleTest = (Double)calculateNumberStat(doubleTestStart, "median"); + Double doubleTest = calculateNumberStat(doubleTestStart, "median"); assertEquals(responseStr, doubleResult,doubleTest); // TODO: Add test for date median diff --git a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/LegacyNoFacetTest.java b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/LegacyNoFacetTest.java index da737c8853bc..310af596c3bd 100644 --- a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/LegacyNoFacetTest.java +++ b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/LegacyNoFacetTest.java @@ -139,22 +139,22 @@ public static void beforeClass() throws Exception { public void sumTest() throws Exception { //Int Double intResult = (Double)getStatResult("sr", "int_id", VAL_TYPE.DOUBLE); - Double intTest = (Double)calculateNumberStat(intTestStart, "sum"); + Double intTest = calculateNumberStat(intTestStart, "sum"); assertEquals(getRawResponse(), intResult,intTest); //Long Double longResult = (Double)getStatResult("sr", "long_ld", VAL_TYPE.DOUBLE); - Double longTest = (Double)calculateNumberStat(longTestStart, "sum"); + Double longTest = calculateNumberStat(longTestStart, "sum"); assertEquals(getRawResponse(), longResult,longTest); //Float Double floatResult = (Double)getStatResult("sr", "float_fd", VAL_TYPE.DOUBLE); - Double floatTest = (Double)calculateNumberStat(floatTestStart, "sum"); + Double floatTest = calculateNumberStat(floatTestStart, "sum"); assertEquals(getRawResponse(), floatResult,floatTest); //Double Double doubleResult = (Double)getStatResult("sr", "double_dd", VAL_TYPE.DOUBLE); - Double doubleTest = (Double) calculateNumberStat(doubleTestStart, "sum"); + Double doubleTest = calculateNumberStat(doubleTestStart, "sum"); assertEquals(getRawResponse(), doubleResult,doubleTest); } @@ -162,22 +162,22 @@ public void sumTest() throws Exception { public void meanTest() throws Exception { //Int Double intResult = (Double)getStatResult("mr", "int_id", VAL_TYPE.DOUBLE); - Double intTest = (Double)calculateNumberStat(intTestStart, "mean"); + Double intTest = calculateNumberStat(intTestStart, "mean"); assertEquals(getRawResponse(), intResult,intTest); //Long Double longResult = (Double)getStatResult("mr", "long_ld", VAL_TYPE.DOUBLE); - Double longTest = (Double)calculateNumberStat(longTestStart, "mean"); + Double longTest = calculateNumberStat(longTestStart, "mean"); assertEquals(getRawResponse(), longResult,longTest); //Float Double floatResult = (Double)getStatResult("mr", "float_fd", VAL_TYPE.DOUBLE); - Double floatTest = (Double)calculateNumberStat(floatTestStart, "mean"); + Double floatTest = calculateNumberStat(floatTestStart, "mean"); assertEquals(getRawResponse(), floatResult,floatTest); //Double Double doubleResult = (Double)getStatResult("mr", "double_dd", VAL_TYPE.DOUBLE); - Double doubleTest = (Double)calculateNumberStat(doubleTestStart, "mean"); + Double doubleTest = calculateNumberStat(doubleTestStart, "mean"); assertEquals(getRawResponse(), doubleResult,doubleTest); } @@ -185,23 +185,23 @@ public void meanTest() throws Exception { public void stddevTest() throws Exception { //Int Double intResult = (Double)getStatResult("str", "int_id", VAL_TYPE.DOUBLE); - Double intTest = (Double)calculateNumberStat(intTestStart, "stddev"); + Double intTest = calculateNumberStat(intTestStart, "stddev"); assertEquals(getRawResponse(), intResult, intTest, 0.00000000001); //Long Double longResult = (Double)getStatResult("str", "long_ld", VAL_TYPE.DOUBLE); - Double longTest = (Double)calculateNumberStat(longTestStart, "stddev"); + Double longTest = calculateNumberStat(longTestStart, "stddev"); assertEquals(getRawResponse(), longResult, longTest, 0.00000000001); //Float Double floatResult = (Double)getStatResult("str", "float_fd", VAL_TYPE.DOUBLE); - Double floatTest = (Double)calculateNumberStat(floatTestStart, "stddev"); + Double floatTest = calculateNumberStat(floatTestStart, "stddev"); assertEquals(getRawResponse(), floatResult, floatTest, 0.00000000001); //Double Double doubleResult = (Double)getStatResult("str", "double_dd", VAL_TYPE.DOUBLE); - Double doubleTest = (Double)calculateNumberStat(doubleTestStart, "stddev"); + Double doubleTest = calculateNumberStat(doubleTestStart, "stddev"); assertEquals(getRawResponse(), doubleResult, doubleTest, 0.00000000001); } @@ -209,22 +209,22 @@ public void stddevTest() throws Exception { public void medianTest() throws Exception { //Int Double intResult = (Double)getStatResult("medr", "int_id", VAL_TYPE.DOUBLE); - Double intTest = (Double)calculateNumberStat(intTestStart, "median"); + Double intTest = calculateNumberStat(intTestStart, "median"); assertEquals(getRawResponse(), intResult,intTest); //Long Double longResult = (Double)getStatResult("medr", "long_ld", VAL_TYPE.DOUBLE); - Double longTest = (Double)calculateNumberStat(longTestStart, "median"); + Double longTest = calculateNumberStat(longTestStart, "median"); assertEquals(getRawResponse(), longResult,longTest); //Float Double floatResult = (Double)getStatResult("medr", "float_fd", VAL_TYPE.DOUBLE); - Double floatTest = (Double)calculateNumberStat(floatTestStart, "median"); + Double floatTest = calculateNumberStat(floatTestStart, "median"); assertEquals(getRawResponse(), floatResult,floatTest); //Double Double doubleResult = (Double)getStatResult("medr", "double_dd", VAL_TYPE.DOUBLE); - Double doubleTest = (Double)calculateNumberStat(doubleTestStart, "median"); + Double doubleTest = calculateNumberStat(doubleTestStart, "median"); assertEquals(getRawResponse(), doubleResult,doubleTest); } diff --git a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetCloudTest.java b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetCloudTest.java index 02bb7efd9407..521b6df220be 100644 --- a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetCloudTest.java +++ b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetCloudTest.java @@ -171,7 +171,7 @@ public > ArrayList calculateFacetedStat(ArrayList(); for (List list : lists) { if( list.size() == 0) continue; - Collections.sort((List)list); + Collections.sort(list); result.add(list.get(0)); } } else { diff --git a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetTest.java b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetTest.java index d406b677ca29..94f679d1ba16 100644 --- a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetTest.java +++ b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetTest.java @@ -267,9 +267,9 @@ public > ArrayList calculateStat(ArrayList> } } else if (stat.equals("min")) { result = new ArrayList(); - for (List list : lists) { + for (List list : lists) { if( list.size() == 0) continue; - Collections.sort((List)list); + Collections.sort(list); result.add(list.get(0)); } } else { diff --git a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyFieldFacetTest.java b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyFieldFacetTest.java index 843c605d70aa..596b0bae3844 100644 --- a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyFieldFacetTest.java +++ b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyFieldFacetTest.java @@ -1074,6 +1074,7 @@ private void checkStddevs(ArrayList list1, ArrayList list2) { } } + @SuppressWarnings({"unchecked", "rawtypes"}) public static void assertEquals(String mes, Object actual, Object expected) { Collections.sort((List) actual); Collections.sort((List) expected); diff --git a/solr/contrib/clustering/build.gradle b/solr/contrib/clustering/build.gradle index e939a297d0fb..d308d2dc80a7 100644 --- a/solr/contrib/clustering/build.gradle +++ b/solr/contrib/clustering/build.gradle @@ -18,6 +18,8 @@ apply plugin: 'java-library' +description = 'Clustering Integraton' + dependencies { implementation project(':solr:core') implementation project(':lucene:analysis:common') diff --git a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/ClusteringComponent.java b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/ClusteringComponent.java index 5ff6a6dc62d2..1c2b471a281f 100644 --- a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/ClusteringComponent.java +++ b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/ClusteringComponent.java @@ -179,7 +179,7 @@ public void inform(SolrCore core) { if (!engine.isAvailable()) { if (optional) { - log.info("Optional clustering engine not available: " + name); + log.info("Optional clustering engine not available: {}", name); } else { throw new SolrException(ErrorCode.SERVER_ERROR, "A required clustering engine failed to initialize, check the logs: " + name); @@ -192,11 +192,11 @@ public void inform(SolrCore core) { } else if (engine instanceof DocumentClusteringEngine) { previousEntry = documentClusteringEngines.put(name, (DocumentClusteringEngine) engine); } else { - log.warn("Unknown type of a clustering engine for class: " + engineClassName); + log.warn("Unknown type of a clustering engine for class: {}", engineClassName); continue; } if (previousEntry != null) { - log.warn("Duplicate clustering engine component named '" + name + "'."); + log.warn("Duplicate clustering engine component named '{}'.", name); } } } @@ -237,7 +237,7 @@ public void process(ResponseBuilder rb) throws IOException { Object clusters = engine.cluster(rb.getQuery(), solrDocList, docIds, rb.req); rb.rsp.add("clusters", clusters); } else { - log.warn("No engine named: " + name); + log.warn("No engine named: {}", name); } } @@ -257,7 +257,7 @@ public void process(ResponseBuilder rb) throws IOException { } rb.rsp.add("clusters", nl); } else { - log.warn("No engine named: " + name); + log.warn("No engine named: {}", name); } } } @@ -312,7 +312,7 @@ public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest sreq.params.set(CommonParams.FL, fl + sb.toString()); } } else { - log.warn("No engine named: " + name); + log.warn("No engine named: {}", name); } } } @@ -342,7 +342,7 @@ public void finishStage(ResponseBuilder rb) { Object clusters = engine.cluster(rb.getQuery(), solrDocList, docIds, rb.req); rb.rsp.add("clusters", clusters); } else { - log.warn("No engine named: " + name); + log.warn("No engine named: {}", name); } } } @@ -383,9 +383,11 @@ private static void setupDefaultEngine(String type, } if (defaultEngine != null) { - log.info("Default engine for " + type + ": " + engineName + " [" + defaultEngine.getClass().getSimpleName() + "]"); + if (log.isInfoEnabled()) { + log.info("Default engine for {}: {} [{}]", type, engineName, defaultEngine.getClass().getSimpleName()); + } } else { - log.warn("No default engine for " + type + "."); + log.warn("No default engine for {}.", type); } } } diff --git a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java index b75b8c6deed3..8e1f62564b5e 100644 --- a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java +++ b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java @@ -33,6 +33,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.lucene.search.Query; +import org.apache.lucene.search.TotalHits; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrException; @@ -153,15 +154,16 @@ public String init(NamedList config, final SolrCore core) { // Load Carrot2-Workbench exported attribute XMLs based on the 'name' attribute // of this component. This by-name convention lookup is used to simplify configuring algorithms. String componentName = initParams.get(ClusteringEngine.ENGINE_NAME); - log.info("Initializing Clustering Engine '" + - MoreObjects.firstNonNull(componentName, "") + "'"); + if (log.isInfoEnabled()) { + log.info("Initializing Clustering Engine '{}'", MoreObjects.firstNonNull(componentName, "")); + } if (!Strings.isNullOrEmpty(componentName)) { IResource[] attributeXmls = resourceLookup.getAll(componentName + "-attributes.xml"); if (attributeXmls.length > 0) { if (attributeXmls.length > 1) { - log.warn("More than one attribute file found, first one will be used: " - + Arrays.toString(attributeXmls)); + log.warn("More than one attribute file found, first one will be used: {}" + , Arrays.toString(attributeXmls)); // logOk } withContextClassLoader(core.getResourceLoader().getClassLoader(), () -> { @@ -308,8 +310,8 @@ private List getDocuments(SolrDocumentList solrDocList, Map highlights = highlighter.doHighlighting(docAsList, theQuery, req, snippetFieldAry); if (highlights != null && highlights.size() == 1) { // should only be one value given our setup @@ -457,8 +459,8 @@ private Map getCustomFieldsMap(SolrParams solrParams) { if (split.length == 2 && StringUtils.isNotBlank(split[0]) && StringUtils.isNotBlank(split[1])) { customFields.put(split[0], split[1]); } else { - log.warn("Unsupported format for " + CarrotParams.CUSTOM_FIELD_NAME - + ": '" + customFieldSpec + "'. Skipping this field definition."); + log.warn("Unsupported format for {}: '{}'. Skipping this field definition." + , CarrotParams.CUSTOM_FIELD_NAME, customFieldSpec); } } } diff --git a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java index 584757c7cbc5..ae03c3d99601 100644 --- a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java +++ b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java @@ -137,17 +137,17 @@ public static IStemmer createStemmer(LanguageCode language) { .get(language); if (stemmerClazz == null) { - log.warn("No Snowball stemmer class for: " + language.name() - + ". Quality of clustering may be degraded."); + log.warn("No Snowball stemmer class for: {}. " + + "Quality of clustering may be degraded.", language.name()); return IdentityStemmer.INSTANCE; } try { return new SnowballStemmerAdapter(stemmerClazz.getConstructor().newInstance()); } catch (Exception e) { - log.warn("Could not instantiate snowball stemmer" - + " for language: " + language.name() - + ". Quality of clustering may be degraded.", e); + log.warn("Could not instantiate snowball stemmer for language: {}" + + ". Quality of clustering may be degraded." + , language.name(), e); return IdentityStemmer.INSTANCE; } diff --git a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java index 2cc67993a2ec..4c0979929e96 100644 --- a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java +++ b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java @@ -60,7 +60,7 @@ public static T firstNonNull(T... args) { @Override public IResource[] getAll(final String resource) { final String resourceName = carrot2ResourcesDir + "/" + resource; - log.debug("Looking for Solr resource: " + resourceName); + log.debug("Looking for Solr resource: {}", resourceName); InputStream resourceStream = null; final byte [] asBytes; @@ -68,8 +68,8 @@ public IResource[] getAll(final String resource) { resourceStream = resourceLoader.openResource(resourceName); asBytes = IOUtils.toByteArray(resourceStream); } catch (IOException e) { - log.debug("Resource not found in Solr's config: " + resourceName - + ". Using the default " + resource + " from Carrot JAR."); + log.debug("Resource not found in Solr's config: {}. Using the default {} from Carrot JAR." + , resourceName, resource); return new IResource[] {}; } finally { if (resourceStream != null) { @@ -81,7 +81,7 @@ public IResource[] getAll(final String resource) { } } - log.info("Loaded Solr resource: " + resourceName); + log.info("Loaded Solr resource: {}", resourceName); final IResource foundResource = new IResource() { @Override diff --git a/solr/contrib/dataimporthandler-extras/build.gradle b/solr/contrib/dataimporthandler-extras/build.gradle index 2207224e1901..fde00c3cec87 100644 --- a/solr/contrib/dataimporthandler-extras/build.gradle +++ b/solr/contrib/dataimporthandler-extras/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Data Import Handler Extras' + dependencies { implementation project(':solr:core') diff --git a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java index f19a4ca9ffd4..6e5ad92c12c2 100644 --- a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java +++ b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java @@ -112,7 +112,7 @@ public void init(Context context) { String varName = ConfigNameConstants.IMPORTER_NS_SHORT + "." + cname + "." + DocBuilder.LAST_INDEX_TIME; Object varValue = context.getVariableResolver().resolve(varName); - log.info(varName+"="+varValue); + log.info("{}={}", varName, varValue); if (varValue != null && !"".equals(varValue) && !"".equals(getStringFromContext("fetchMailsSince", ""))) { @@ -123,21 +123,21 @@ public void init(Context context) { try { tmp = sinceDateParser.parse((String)varValue); if (tmp.getTime() == 0) { - log.info("Ignoring initial value "+varValue+" for "+varName+ - " in favor of fetchMailsSince config parameter"); + log.info("Ignoring initial value {} for {} in favor of fetchMailsSince config parameter" + , varValue, varName); tmp = null; // don't use this value } } catch (ParseException e) { // probably ok to ignore this since we have other options below // as we're just trying to figure out if the date is 0 - log.warn("Failed to parse "+varValue+" from "+varName+" due to: "+e); + log.warn("Failed to parse {} from {} due to", varValue, varName, e); } if (tmp == null) { // favor fetchMailsSince in this case because the value from // dataimport.properties is the default/init value varValue = getStringFromContext("fetchMailsSince", ""); - log.info("fetchMailsSince="+varValue); + log.info("fetchMailsSince={}", varValue); } } @@ -145,7 +145,7 @@ public void init(Context context) { varName = ConfigNameConstants.IMPORTER_NS_SHORT + "." + DocBuilder.LAST_INDEX_TIME; varValue = context.getVariableResolver().resolve(varName); - log.info(varName+"="+varValue); + log.info("{}={}", varName, varValue); } if (varValue != null && varValue instanceof String) { @@ -157,13 +157,13 @@ public void init(Context context) { if (lastIndexTime == null) lastIndexTime = getStringFromContext("fetchMailsSince", ""); - log.info("Using lastIndexTime "+lastIndexTime+" for mail import"); + log.info("Using lastIndexTime {} for mail import", lastIndexTime); this.fetchMailsSince = null; if (lastIndexTime != null && lastIndexTime.length() > 0) { try { fetchMailsSince = sinceDateParser.parse(lastIndexTime); - log.info("Parsed fetchMailsSince=" + lastIndexTime); + log.info("Parsed fetchMailsSince={}", lastIndexTime); } catch (ParseException e) { throw new DataImportHandlerException(DataImportHandlerException.SEVERE, "Invalid value for fetchMailSince: " + lastIndexTime, e); @@ -247,12 +247,13 @@ private Map getDocumentFromMail(Message mail) { addPartToDocument(mail, row, true); return row; } catch (Exception e) { - log.error("Failed to convert message [" + mail.toString() - + "] to document due to: " + e, e); + log.error("Failed to convert message [{}] to document due to: {}" + , mail, e, e); return null; } } + @SuppressWarnings({"unchecked"}) public void addPartToDocument(Part part, Map row, boolean outerMost) throws Exception { if (part instanceof Message) { addEnvelopeToDocument(part, row); @@ -269,9 +270,9 @@ public void addPartToDocument(Part part, Map row, boolean outerMo for (int i = 0; i < count; i++) addPartToDocument(mp.getBodyPart(i), row, false); } else { - log.warn("Multipart content is a not an instance of Multipart! Content is: " - + (content != null ? content.getClass().getName() : "null") - + ". Typically, this is due to the Java Activation JAR being loaded by the wrong classloader."); + log.warn("Multipart content is a not an instance of Multipart! Content is: {}" + + ". Typically, this is due to the Java Activation JAR being loaded by the wrong classloader." + , (content != null ? content.getClass().getName() : "null")); } } else if (part.isMimeType("message/rfc822")) { addPartToDocument((Part) part.getContent(), row, false); @@ -374,8 +375,8 @@ private boolean connectToMailBox() { if (("imap".equals(protocol) || "imaps".equals(protocol)) && "imap.gmail.com".equals(host)) { - log.info("Consider using 'gimaps' protocol instead of '" + protocol - + "' for enabling GMail specific extensions for " + host); + log.info("Consider using 'gimaps' protocol instead of '{}' for enabling GMail specific extensions for {}" + , protocol, host); } props.setProperty("mail.store.protocol", protocol); @@ -399,7 +400,7 @@ private boolean connectToMailBox() { } else { mailbox.connect(host, user, password); } - log.info("Connected to " + user + "'s mailbox on " + host); + log.info("Connected to {}'s mailbox on {}", user, host); return true; } catch (MessagingException e) { @@ -474,7 +475,7 @@ private void logConfig() { .append(lineSep); config.append("includeSharedFolders : ").append(includeSharedFolders) .append(lineSep); - log.info(config.toString()); + log.info("{}", config); } class FolderIterator implements Iterator { @@ -515,14 +516,16 @@ public Folder next() { hasMessages = (next.getType() & Folder.HOLDS_MESSAGES) != 0; next.open(Folder.READ_ONLY); lastFolder = next; - log.info("Opened folder : " + fullName); + log.info("Opened folder : {}", fullName); } if (recurse && ((next.getType() & Folder.HOLDS_FOLDERS) != 0)) { Folder[] children = next.list(); log.info("Added its children to list : "); for (int i = children.length - 1; i >= 0; i--) { folders.add(0, children[i]); - log.info("child name : " + children[i].getFullName()); + if (log.isInfoEnabled()) { + log.info("child name : {}", children[i].getFullName()); + } } if (children.length == 0) log.info("NO children : "); } @@ -530,7 +533,7 @@ public Folder next() { } while (!hasMessages); return next; } catch (Exception e) { - log.warn("Failed to read folders due to: "+e); + log.warn("Failed to read folders due to: {}", e); // throw new // DataImportHandlerException(DataImportHandlerException.SEVERE, // "Folder open failed", e); @@ -568,13 +571,13 @@ private void getOtherUserFolders() { try { Folder[] ufldrs = mailbox.getUserNamespaces(null); if (ufldrs != null) { - log.info("Found " + ufldrs.length + " user namespace folders"); + log.info("Found {} user namespace folders", ufldrs.length); for (Folder ufldr : ufldrs) folders.add(ufldr); } } catch (MessagingException me) { - log.warn("Messaging exception retrieving user namespaces: " - + me.getMessage()); + log.warn("Messaging exception retrieving user namespaces: {}" + , me.getMessage()); } } @@ -582,13 +585,13 @@ private void getSharedFolders() { try { Folder[] sfldrs = mailbox.getSharedNamespaces(); if (sfldrs != null) { - log.info("Found " + sfldrs.length + " shared namespace folders"); + log.info("Found {} shared namespace folders", sfldrs.length); for (Folder sfldr : sfldrs) folders.add(sfldr); } } catch (MessagingException me) { - log.warn("Messaging exception retrieving shared namespaces: " - + me.getMessage()); + log.warn("Messaging exception retrieving shared namespaces: {}" + , me.getMessage()); } } @@ -620,14 +623,16 @@ public MessageIterator(Folder folder, int batchSize) { this.batchSize = batchSize; SearchTerm st = getSearchTerm(); - log.info("SearchTerm=" + st); + log.info("SearchTerm={}", st); if (st != null || folder instanceof GmailFolder) { doBatching = false; // Searching can still take a while even though we're only pulling // envelopes; unless you're using gmail server-side filter, which is // fast - log.info("Searching folder " + folder.getName() + " for messages"); + if (log.isInfoEnabled()) { + log.info("Searching folder {} for messages", folder.getName()); + } final RTimer searchTimer = new RTimer(); // If using GMail, speed up the envelope processing by doing a @@ -642,12 +647,14 @@ public MessageIterator(Folder folder, int batchSize) { if (folder instanceof GmailFolder && fetchMailsSince != null) { String afterCrit = "after:" + afterFmt.format(fetchMailsSince); - log.info("Added server-side gmail filter: " + afterCrit); + log.info("Added server-side gmail filter: {}", afterCrit); Message[] afterMessages = folder.search(new GmailRawSearchTerm( afterCrit)); - - log.info("GMail server-side filter found " + afterMessages.length - + " messages received " + afterCrit + " in folder " + folder.getName()); + + if (log.isInfoEnabled()) { + log.info("GMail server-side filter found {} messages received {} in folder {}" + , afterMessages.length, afterCrit, folder.getName()); + } // now pass in the server-side filtered messages to the local filter messagesInCurBatch = folder.search((st != null ? st : this), afterMessages); @@ -657,11 +664,13 @@ public MessageIterator(Folder folder, int batchSize) { totalInFolder = messagesInCurBatch.length; folder.fetch(messagesInCurBatch, fp); current = 0; - log.info("Total messages : " + totalInFolder); - log.info("Search criteria applied. Batching disabled. Took {} (ms)", searchTimer.getTime()); + if (log.isInfoEnabled()) { + log.info("Total messages : {}", totalInFolder); + log.info("Search criteria applied. Batching disabled. Took {} (ms)", searchTimer.getTime()); // logOk + } } else { totalInFolder = folder.getMessageCount(); - log.info("Total messages : " + totalInFolder); + log.info("Total messages : {}", totalInFolder); getNextBatch(batchSize, folder); } } catch (MessagingException e) { @@ -685,8 +694,8 @@ private void getNextBatch(int batchSize, Folder folder) folder.fetch(messagesInCurBatch, fp); current = 0; currentBatch++; - log.info("Current Batch : " + currentBatch); - log.info("Messages in this batch : " + messagesInCurBatch.length); + log.info("Current Batch : {}", currentBatch); + log.info("Messages in this batch : {}", messagesInCurBatch.length); } public boolean hasNext() { @@ -741,8 +750,10 @@ public MailsSinceLastCheckFilter(Date date) { @SuppressWarnings("serial") public SearchTerm getCustomSearch(final Folder folder) { - log.info("Building mail filter for messages in " + folder.getName() - + " that occur after " + sinceDateParser.format(since)); + if (log.isInfoEnabled()) { + log.info("Building mail filter for messages in {} that occur after {}" + , folder.getName(), sinceDateParser.format(since)); + } return new DateTerm(ComparisonTerm.GE, since) { private int matched = 0; private int seen = 0; @@ -761,16 +772,20 @@ public boolean match(Message msg) { } else { String msgDateStr = (msgDate != null) ? sinceDateParser.format(msgDate) : "null"; String sinceDateStr = (since != null) ? sinceDateParser.format(since) : "null"; - log.debug("Message " + msg.getSubject() + " was received at [" + msgDateStr - + "], since filter is [" + sinceDateStr + "]"); + if (log.isDebugEnabled()) { + log.debug("Message {} was received at [{}], since filter is [{}]" + , msg.getSubject(), msgDateStr, sinceDateStr); + } } } catch (MessagingException e) { - log.warn("Failed to process message due to: "+e, e); + log.warn("Failed to process message due to: {}", e, e); } if (seen % 100 == 0) { - log.info("Matched " + matched + " of " + seen + " messages since: " - + sinceDateParser.format(since)); + if (log.isInfoEnabled()) { + log.info("Matched {} of {} messages since: {}" + , matched, seen, sinceDateParser.format(since)); + } } return isMatch; diff --git a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java index c789fc0c6314..78a53fac2583 100644 --- a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java +++ b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java @@ -132,6 +132,7 @@ protected void firstInit(Context context) { public Map nextRow() { if(done) return null; Map row = new HashMap<>(); + @SuppressWarnings({"unchecked"}) DataSource dataSource = context.getDataSource(); InputStream is = dataSource.getData(context.getResolvedEntityAttribute(URL)); ContentHandler contentHandler = null; diff --git a/solr/contrib/dataimporthandler-extras/src/test/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java b/solr/contrib/dataimporthandler-extras/src/test/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java index 0b00be605575..027a8d73279a 100644 --- a/solr/contrib/dataimporthandler-extras/src/test/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java +++ b/solr/contrib/dataimporthandler-extras/src/test/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java @@ -17,7 +17,6 @@ package org.apache.solr.handler.dataimport; import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.handler.dataimport.config.Entity; import org.junit.Ignore; import org.junit.Test; @@ -65,7 +64,7 @@ public void testConnection() { paramMap.put("processAttachement", "false"); DataImporter di = new DataImporter(); di.loadAndInit(getConfigFromMap(paramMap)); - Entity ent = di.getConfig().getEntities().get(0); + @SuppressWarnings({"unchecked"}) RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null); SolrWriterImpl swi = new SolrWriterImpl(); di.runCmd(rp, swi); @@ -80,7 +79,7 @@ public void testRecursion() { paramMap.put("processAttachement", "false"); DataImporter di = new DataImporter(); di.loadAndInit(getConfigFromMap(paramMap)); - Entity ent = di.getConfig().getEntities().get(0); + @SuppressWarnings({"unchecked"}) RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null); SolrWriterImpl swi = new SolrWriterImpl(); di.runCmd(rp, swi); @@ -96,7 +95,7 @@ public void testExclude() { paramMap.put("exclude", ".*grandchild.*"); DataImporter di = new DataImporter(); di.loadAndInit(getConfigFromMap(paramMap)); - Entity ent = di.getConfig().getEntities().get(0); + @SuppressWarnings({"unchecked"}) RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null); SolrWriterImpl swi = new SolrWriterImpl(); di.runCmd(rp, swi); @@ -112,7 +111,7 @@ public void testInclude() { paramMap.put("include", ".*grandchild.*"); DataImporter di = new DataImporter(); di.loadAndInit(getConfigFromMap(paramMap)); - Entity ent = di.getConfig().getEntities().get(0); + @SuppressWarnings({"unchecked"}) RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null); SolrWriterImpl swi = new SolrWriterImpl(); di.runCmd(rp, swi); @@ -129,7 +128,7 @@ public void testIncludeAndExclude() { paramMap.put("include", ".*grandchild.*"); DataImporter di = new DataImporter(); di.loadAndInit(getConfigFromMap(paramMap)); - Entity ent = di.getConfig().getEntities().get(0); + @SuppressWarnings({"unchecked"}) RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null); SolrWriterImpl swi = new SolrWriterImpl(); di.runCmd(rp, swi); @@ -138,6 +137,7 @@ public void testIncludeAndExclude() { @Test @Ignore("Needs a Mock Mail Server to work") + @SuppressWarnings({"unchecked"}) public void testFetchTimeSince() throws ParseException { paramMap.put("folders", "top1/child11"); paramMap.put("recurse", "true"); @@ -145,7 +145,6 @@ public void testFetchTimeSince() throws ParseException { paramMap.put("fetchMailsSince", "2008-12-26 00:00:00"); DataImporter di = new DataImporter(); di.loadAndInit(getConfigFromMap(paramMap)); - Entity ent = di.getConfig().getEntities().get(0); RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null); SolrWriterImpl swi = new SolrWriterImpl(); di.runCmd(rp, swi); diff --git a/solr/contrib/dataimporthandler/build.gradle b/solr/contrib/dataimporthandler/build.gradle index 403a11d07fc5..9286d4317b60 100644 --- a/solr/contrib/dataimporthandler/build.gradle +++ b/solr/contrib/dataimporthandler/build.gradle @@ -18,6 +18,8 @@ apply plugin: 'java-library' +description = 'Data Import Handler' + dependencies { implementation project(':solr:core') diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java index c1b4808eee79..03a30ab07a91 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java @@ -61,14 +61,14 @@ public void init(Context context, Properties initProps) { try { connectionTimeout = Integer.parseInt(cTimeout); } catch (NumberFormatException e) { - log.warn("Invalid connection timeout: " + cTimeout); + log.warn("Invalid connection timeout: {}", cTimeout); } } if (rTimeout != null) { try { readTimeout = Integer.parseInt(rTimeout); } catch (NumberFormatException e) { - log.warn("Invalid read timeout: " + rTimeout); + log.warn("Invalid read timeout: {}", rTimeout); } } } @@ -79,7 +79,7 @@ public InputStream getData(String query) { try { if (URIMETHOD.matcher(query).find()) url = new URL(query); else url = new URL(baseUrl + query); - log.debug("Accessing URL: " + url.toString()); + log.debug("Accessing URL: {}", url); URLConnection conn = url.openConnection(); conn.setConnectTimeout(connectionTimeout); conn.setReadTimeout(readTimeout); diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ClobTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ClobTransformer.java index 4bbe0467349b..2e9d93a0c1a3 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ClobTransformer.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ClobTransformer.java @@ -46,6 +46,7 @@ public Object transformRow(Map aRow, Context context) { srcCol = column; Object o = aRow.get(srcCol); if (o instanceof List) { + @SuppressWarnings({"unchecked"}) List inputs = (List) o; List results = new ArrayList<>(); for (Object input : inputs) { diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/Context.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/Context.java index 4d0cb3a683cf..70dbbcb6ec61 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/Context.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/Context.java @@ -102,6 +102,7 @@ public abstract class Context { * @see org.apache.solr.handler.dataimport.DataSource * @see #getDataSource(String) */ + @SuppressWarnings({"rawtypes"}) public abstract DataSource getDataSource(); /** @@ -112,6 +113,7 @@ public abstract class Context { * @return a new DataSource instance * @see org.apache.solr.handler.dataimport.DataSource */ + @SuppressWarnings({"rawtypes"}) public abstract DataSource getDataSource(String name); /** diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java index 20a2aad456d6..a47434e638b6 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java @@ -38,6 +38,7 @@ public class ContextImpl extends Context { private VariableResolver resolver; + @SuppressWarnings({"rawtypes"}) private DataSource ds; private String currProcess; @@ -57,7 +58,7 @@ public class ContextImpl extends Context { public ContextImpl(EntityProcessorWrapper epw, VariableResolver resolver, - DataSource ds, String currProcess, + @SuppressWarnings({"rawtypes"})DataSource ds, String currProcess, Map global, ContextImpl parentContext, DocBuilder docBuilder) { this.epw = epw; this.docBuilder = docBuilder; @@ -83,6 +84,7 @@ public String getResolvedEntityAttribute(String name) { } @Override + @SuppressWarnings({"unchecked"}) public List> getAllEntityFields() { return epw==null || epw.getEntity() == null ? Collections.EMPTY_LIST : epw.getEntity().getAllFieldsList(); } @@ -93,6 +95,7 @@ public VariableResolver getVariableResolver() { } @Override + @SuppressWarnings({"rawtypes"}) public DataSource getDataSource() { if (ds != null) return ds; if(epw==null) { return null; } @@ -108,6 +111,7 @@ public DataSource getDataSource() { } @Override + @SuppressWarnings({"rawtypes"}) public DataSource getDataSource(String name) { return dataImporter.getDataSourceInstance(epw==null ? null : epw.getEntity(), name, this); } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java index c48c332616f7..296fabed453d 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java @@ -89,8 +89,8 @@ public DataImporter getImporter() { } @Override - @SuppressWarnings("unchecked") - public void init(NamedList args) { + + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); Map macro = new HashMap<>(); macro.put("expandMacros", "false"); @@ -131,6 +131,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) } } SolrParams params = req.getParams(); + @SuppressWarnings({"rawtypes"}) NamedList defaultParams = (NamedList) initArgs.get("defaults"); RequestInfo requestParams = new RequestInfo(req, getParamsMap(params), contentStream); String command = requestParams.getCommand(); @@ -242,7 +243,7 @@ private DIHWriter getSolrWriter(final UpdateRequestProcessor processor, SolrParams reqParams = req.getParams(); String writerClassStr = null; if (reqParams != null && reqParams.get(PARAM_WRITER_IMPL) != null) { - writerClassStr = (String) reqParams.get(PARAM_WRITER_IMPL); + writerClassStr = reqParams.get(PARAM_WRITER_IMPL); } DIHWriter writer; if (writerClassStr != null @@ -252,6 +253,7 @@ private DIHWriter getSolrWriter(final UpdateRequestProcessor processor, try { @SuppressWarnings("unchecked") Class writerClass = DocBuilder.loadClass(writerClassStr, req.getCore()); + @SuppressWarnings({"rawtypes"}) Constructor cnstr = writerClass.getConstructor(new Class[] { UpdateRequestProcessor.class, SolrQueryRequest.class}); return cnstr.newInstance((Object) processor, (Object) req); @@ -266,7 +268,7 @@ public boolean upload(SolrInputDocument document) { try { return super.upload(document); } catch (RuntimeException e) { - log.error("Exception while adding: " + document, e); + log.error("Exception while adding: {}", document, e); return false; } } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java index 932849bbd9b7..c5b2f70bf226 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java @@ -125,7 +125,7 @@ boolean maybeReloadConfiguration(RequestInfo params, } else if(dataconfigFile!=null) { is = new InputSource(core.getResourceLoader().openResource(dataconfigFile)); is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(dataconfigFile)); - log.info("Loading DIH Configuration: " + dataconfigFile); + log.info("Loading DIH Configuration: {}", dataconfigFile); } if(is!=null) { config = loadDataConfig(is); @@ -142,13 +142,14 @@ boolean maybeReloadConfiguration(RequestInfo params, String name = defaultParams.getName(position); if (name.equals("datasource")) { success = true; + @SuppressWarnings({"rawtypes"}) NamedList dsConfig = (NamedList) defaultParams.getVal(position); log.info("Getting configuration for Global Datasource..."); Map props = new HashMap<>(); for (int i = 0; i < dsConfig.size(); i++) { props.put(dsConfig.getName(i), dsConfig.getVal(i).toString()); } - log.info("Adding properties to datasource: " + props); + log.info("Adding properties to datasource: {}", props); dsProps.put((String) dsConfig.get("name"), props); } position++; @@ -361,6 +362,7 @@ Object retrieve(Object key) { return store.get(key); } + @SuppressWarnings({"unchecked", "rawtypes"}) public DataSource getDataSourceInstance(Entity key, String name, Context ctx) { Map p = requestLevelDataSourceProps.get(name); if (p == null) @@ -373,6 +375,7 @@ public DataSource getDataSourceInstance(Entity key, String name, Context ctx) { throw new DataImportHandlerException(SEVERE, "No dataSource :" + name + " available for entity :" + key.getName()); String type = p.get(TYPE); + @SuppressWarnings({"rawtypes"}) DataSource dataSrc = null; if (type == null) { dataSrc = new JdbcDataSource(); @@ -493,11 +496,13 @@ void runCmd(RequestInfo reqParams, DIHWriter sw) { Map getStatusMessages() { //this map object is a Collections.synchronizedMap(new LinkedHashMap()). if we // synchronize on the object it must be safe to iterate through the map + @SuppressWarnings({"rawtypes"}) Map statusMessages = (Map) retrieve(STATUS_MSGS); Map result = new LinkedHashMap<>(); if (statusMessages != null) { synchronized (statusMessages) { for (Object o : statusMessages.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry e = (Map.Entry) o; //the toString is taken because some of the Objects create the data lazily when toString() is called result.put((String) e.getKey(), e.getValue().toString()); @@ -524,6 +529,7 @@ Map getEvaluators() { /** * used by tests. */ + @SuppressWarnings({"unchecked"}) Map getEvaluators(List> fn) { Map evaluators = new HashMap<>(); evaluators.put(Evaluator.DATE_FORMAT_EVALUATOR, new DateFormatEvaluator()); diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java index 6da9cc1b08b0..61edbe61117c 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java @@ -68,6 +68,7 @@ public Object transformRow(Map aRow, Context context) { try { Object o = aRow.get(srcCol); if (o instanceof List) { + @SuppressWarnings({"rawtypes"}) List inputs = (List) o; List results = new ArrayList<>(); for (Object input : inputs) { diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DebugLogger.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DebugLogger.java index 2fd93031073f..9de42fc6f3ad 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DebugLogger.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DebugLogger.java @@ -44,6 +44,7 @@ class DebugLogger { private Stack debugStack; + @SuppressWarnings({"rawtypes"}) NamedList output; // private final SolrWriter writer1; @@ -54,6 +55,7 @@ class DebugLogger { boolean enabled = true; + @SuppressWarnings({"rawtypes"}) public DebugLogger() { // writer = solrWriter; output = new NamedList(); @@ -75,6 +77,7 @@ private DebugInfo peekStack() { return debugStack.isEmpty() ? null : debugStack.peek(); } + @SuppressWarnings({"unchecked"}) public void log(DIHLogLevels event, String name, Object row) { if (event == DIHLogLevels.DISABLE_LOGGING) { enabled = false; @@ -150,9 +153,12 @@ private void popAllTransformers() { } } - private void addToNamedList(NamedList nl, Object row) { + @SuppressWarnings({"unchecked"}) + private void addToNamedList(@SuppressWarnings({"rawtypes"})NamedList nl, Object row) { if (row instanceof List) { + @SuppressWarnings({"rawtypes"}) List list = (List) row; + @SuppressWarnings({"rawtypes"}) NamedList l = new NamedList(); nl.add(null, l); for (Object o : list) { @@ -167,6 +173,7 @@ private void addToNamedList(NamedList nl, Object row) { } } + @SuppressWarnings({"rawtypes"}) DataSource wrapDs(final DataSource ds) { return new DataSource() { @Override @@ -236,6 +243,7 @@ public static String getStacktraceString(Exception e) { } static String getTransformerName(Transformer t) { + @SuppressWarnings({"rawtypes"}) Class transClass = t.getClass(); if (t instanceof EntityProcessorWrapper.ReflectionTransformer) { return ((EntityProcessorWrapper.ReflectionTransformer) t).trans; @@ -256,12 +264,14 @@ private static class DebugInfo { int tCount, rowCount; + @SuppressWarnings({"rawtypes"}) NamedList lst; DIHLogLevels type; DebugInfo parent; + @SuppressWarnings({"unchecked", "rawtypes"}) public DebugInfo(String name, DIHLogLevels type, DebugInfo parent) { this.name = name; this.type = type; diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java index 43e3af70795c..0f8dd6ed323f 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java @@ -66,7 +66,7 @@ public class DocBuilder { private EntityProcessorWrapper currentEntityProcessorWrapper; - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) private Map statusMessages = Collections.synchronizedMap(new LinkedHashMap()); public Statistics importStatistics = new Statistics(); @@ -157,6 +157,7 @@ private void invokeEventListener(String className) { private void invokeEventListener(String className, Exception lastException) { try { + @SuppressWarnings({"unchecked"}) EventListener listener = (EventListener) loadClass(className, dataImporter.getCore()).getConstructor().newInstance(); notifyListener(listener, lastException); } catch (Exception e) { @@ -265,7 +266,9 @@ public String toString() { statusMessages.put(DataImporter.MSG.TOTAL_FAILED_DOCS, ""+ importStatistics.failedDocCount.get()); statusMessages.put("Time taken", getTimeElapsedSince(startTime.get())); - log.info("Time taken = " + getTimeElapsedSince(startTime.get())); + if (log.isInfoEnabled()) { + log.info("Time taken = {}", getTimeElapsedSince(startTime.get())); + } } catch(Exception e) { throw new RuntimeException(e); @@ -313,6 +316,7 @@ private void finish(Map lastIndexTimeProps) { } } + @SuppressWarnings({"unchecked"}) void handleError(String message, Exception e) { if (!dataImporter.getCore().getCoreContainer().isZooKeeperAware()) { writer.rollback(); @@ -385,7 +389,7 @@ private void deleteAll(Set> deletedKeys) { key = map.get(keyName); } if(key == null) { - log.warn("no key was available for deleted pk query. keyName = " + keyName); + log.warn("no key was available for deleted pk query. keyName = {}", keyName); continue; } writer.deleteDoc(key); @@ -483,7 +487,7 @@ private void buildDocument(VariableResolver vr, DocWrapper doc, if (seenDocCount <= reqParams.getStart()) continue; if (seenDocCount > reqParams.getStart() + reqParams.getRows()) { - log.info("Indexing stopped at docCount = " + importStatistics.docCount); + log.info("Indexing stopped at docCount = {}", importStatistics.docCount); break; } } @@ -593,6 +597,7 @@ private void handleSpecialCommands(Map arow, DocWrapper doc) { Object value = arow.get(DELETE_DOC_BY_ID); if (value != null) { if (value instanceof Collection) { + @SuppressWarnings({"rawtypes"}) Collection collection = (Collection) value; for (Object o : collection) { writer.deleteDoc(o.toString()); @@ -606,6 +611,7 @@ private void handleSpecialCommands(Map arow, DocWrapper doc) { value = arow.get(DELETE_DOC_BY_QUERY); if (value != null) { if (value instanceof Collection) { + @SuppressWarnings({"rawtypes"}) Collection collection = (Collection) value; for (Object o : collection) { writer.deleteByQuery(o.toString()); @@ -689,6 +695,7 @@ private void addFields(Entity entity, DocWrapper doc, private void addFieldToDoc(Object value, String name, boolean multiValued, DocWrapper doc) { if (value instanceof Collection) { + @SuppressWarnings({"rawtypes"}) Collection collection = (Collection) value; if (multiValued) { for (Object o : collection) { @@ -714,6 +721,7 @@ private void addFieldToDoc(Object value, String name, boolean multiValued, DocWr } } + @SuppressWarnings({"unchecked"}) public EntityProcessorWrapper getEntityProcessorWrapper(Entity entity) { EntityProcessor entityProcessor = null; if (entity.getProcessorName() == null) { @@ -759,9 +767,11 @@ private String findMatchingPkColumn(String pk, Map row) { "deltaQuery has no column to resolve to declared primary key pk='%s'", pk)); } - log.info(String.format(Locale.ROOT, - "Resolving deltaQuery column '%s' to match entity's declared pk '%s'", - resolvedPk, pk)); + if (log.isInfoEnabled()) { + log.info(String.format(Locale.ROOT, + "Resolving deltaQuery column '%s' to match entity's declared pk '%s'", + resolvedPk, pk)); + } return resolvedPk; } @@ -772,7 +782,7 @@ private String findMatchingPkColumn(String pk, Map row) { * * @return an iterator to the list of keys for which Solr documents should be updated. */ - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) public Set> collectDelta(EntityProcessorWrapper epw, VariableResolver resolver, Set> deletedRows) { //someone called abort @@ -796,7 +806,9 @@ public Set> collectDelta(EntityProcessorWrapper epw, Variabl // identifying the modified rows for this entity Map> deltaSet = new HashMap<>(); - log.info("Running ModifiedRowKey() for Entity: " + epw.getEntity().getName()); + if (log.isInfoEnabled()) { + log.info("Running ModifiedRowKey() for Entity: {}", epw.getEntity().getName()); + } //get the modified rows in this entity String pk = epw.getEntity().getPk(); while (true) { @@ -844,8 +856,10 @@ public Set> collectDelta(EntityProcessorWrapper epw, Variabl return new HashSet(); } - log.info("Completed ModifiedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deltaSet.size()); - log.info("Completed DeletedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deletedSet.size()); + if (log.isInfoEnabled()) { + log.info("Completed ModifiedRowKey for Entity: {} rows obtained: {}", epw.getEntity().getName(), deltaSet.size()); + log.info("Completed DeletedRowKey for Entity: {} rows obtained : {}", epw.getEntity().getName(), deletedSet.size()); // logOk + } myModifiedPks.addAll(deltaSet.values()); Set> parentKeyList = new HashSet<>(); @@ -870,7 +884,9 @@ public Set> collectDelta(EntityProcessorWrapper epw, Variabl return new HashSet(); } } - log.info("Completed parentDeltaQuery for Entity: " + epw.getEntity().getName()); + if (log.isInfoEnabled()) { + log.info("Completed parentDeltaQuery for Entity: {}", epw.getEntity().getName()); + } if (epw.getEntity().isDocRoot()) deletedRows.addAll(deletedSet); @@ -919,7 +935,7 @@ public RequestInfo getReqParams() { return reqParams; } - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) static Class loadClass(String name, SolrCore core) throws ClassNotFoundException { try { return core != null ? diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java index 984f0303e337..d2a01b27b8b1 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java @@ -45,6 +45,7 @@ public class EntityProcessorWrapper extends EntityProcessor { private EntityProcessor delegate; private Entity entity; + @SuppressWarnings({"rawtypes"}) private DataSource datasource; private List children = new ArrayList<>(); private DocBuilder docBuilder; @@ -68,7 +69,7 @@ public EntityProcessorWrapper(EntityProcessor delegate, Entity entity, DocBuilde public void init(Context context) { rowcache = null; this.context = context; - resolver = (VariableResolver) context.getVariableResolver(); + resolver = context.getVariableResolver(); if (entityName == null) { onError = resolver.replaceTokens(context.getEntityAttribute(ON_ERROR)); if (onError == null) onError = ABORT; @@ -110,6 +111,7 @@ public boolean add(Transformer transformer) { continue; } try { + @SuppressWarnings({"rawtypes"}) Class clazz = DocBuilder.loadClass(trans, context.getSolrCore()); if (Transformer.class.isAssignableFrom(clazz)) { transformers.add((Transformer) clazz.getConstructor().newInstance()); @@ -124,7 +126,7 @@ public boolean add(Transformer transformer) { log.error(msg); wrapAndThrow(SEVERE, nsme,msg); } catch (Exception e) { - log.error("Unable to load Transformer: " + aTransArr, e); + log.error("Unable to load Transformer: {}", aTransArr, e); wrapAndThrow(SEVERE, e,"Unable to load Transformer: " + trans); } } @@ -153,13 +155,14 @@ private void checkIfTrusted(String trans) { static class ReflectionTransformer extends Transformer { final Method meth; + @SuppressWarnings({"rawtypes"}) final Class clazz; final String trans; final Object o; - public ReflectionTransformer(Method meth, Class clazz, String trans) + public ReflectionTransformer(Method meth, @SuppressWarnings({"rawtypes"})Class clazz, String trans) throws Exception { this.meth = meth; this.clazz = clazz; @@ -172,7 +175,7 @@ public Object transformRow(Map aRow, Context context) { try { return meth.invoke(o, aRow); } catch (Exception e) { - log.warn("method invocation failed on transformer : " + trans, e); + log.warn("method invocation failed on transformer : {}", trans, e); throw new DataImportHandlerException(WARN, e); } } @@ -195,7 +198,7 @@ protected Map applyTransformer(Map row) { Map transformedRow = row; List> rows = null; boolean stopTransform = checkStopTransform(row); - VariableResolver resolver = (VariableResolver) context.getVariableResolver(); + VariableResolver resolver = context.getVariableResolver(); for (Transformer t : transformers) { if (stopTransform) break; try { @@ -207,6 +210,7 @@ protected Map applyTransformer(Map row) { if (o == null) continue; if (o instanceof Map) { + @SuppressWarnings({"rawtypes"}) Map oMap = (Map) o; stopTransform = checkStopTransform(oMap); tmpRows.add((Map) o); @@ -223,6 +227,7 @@ protected Map applyTransformer(Map row) { if (o == null) return null; if (o instanceof Map) { + @SuppressWarnings({"rawtypes"}) Map oMap = (Map) o; stopTransform = checkStopTransform(oMap); transformedRow = (Map) o; @@ -251,7 +256,7 @@ protected Map applyTransformer(Map row) { } - private boolean checkStopTransform(Map oMap) { + private boolean checkStopTransform(@SuppressWarnings({"rawtypes"})Map oMap) { return oMap.get("$stopTransform") != null && Boolean.parseBoolean(oMap.get("$stopTransform").toString()); } @@ -313,7 +318,7 @@ public void destroy() { } public VariableResolver getVariableResolver() { - return (VariableResolver) context.getVariableResolver(); + return context.getVariableResolver(); } public Context getContext() { @@ -333,11 +338,12 @@ public List getChildren() { return children; } + @SuppressWarnings({"rawtypes"}) public DataSource getDatasource() { return datasource; } - public void setDatasource(DataSource datasource) { + public void setDatasource(@SuppressWarnings({"rawtypes"})DataSource datasource) { this.datasource = datasource; } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java index 920472e2bfe2..34df122687cd 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java @@ -102,14 +102,14 @@ static File getFile(String basePath, String query) { File basePathFile; if (basePath == null) { basePathFile = new File(".").getAbsoluteFile(); - log.warn("FileDataSource.basePath is empty. " + - "Resolving to: " + basePathFile.getAbsolutePath()); + log.warn("FileDataSource.basePath is empty. Resolving to: {}" + , basePathFile.getAbsolutePath()); } else { basePathFile = new File(basePath); if (!basePathFile.isAbsolute()) { basePathFile = basePathFile.getAbsoluteFile(); - log.warn("FileDataSource.basePath is not absolute. Resolving to: " - + basePathFile.getAbsolutePath()); + log.warn("FileDataSource.basePath is not absolute. Resolving to: {}" + , basePathFile.getAbsolutePath()); } } @@ -117,7 +117,9 @@ static File getFile(String basePath, String query) { } if (file.isFile() && file.canRead()) { - log.debug("Accessing File: " + file.getAbsolutePath()); + if (log.isDebugEnabled()) { + log.debug("Accessing File: {}", file.getAbsolutePath()); + } return file; } else { throw new FileNotFoundException("Could not find file: " + query + diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java index e62c329283ef..7ef4d9362a9c 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/HTMLStripTransformer.java @@ -49,6 +49,7 @@ public Object transformRow(Map row, Context context) { if (tmpVal instanceof List) { List inputs = (List) tmpVal; + @SuppressWarnings({"rawtypes"}) List results = new ArrayList(); for (String input : inputs) { if (input == null) diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java index 5eb351724665..87f38f49f685 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java @@ -87,7 +87,7 @@ public void init(Context context, Properties initProps) { if (batchSize == -1) batchSize = Integer.MIN_VALUE; } catch (NumberFormatException e) { - log.warn("Invalid batch size: " + bsz); + log.warn("Invalid batch size: {}", bsz); } } @@ -172,9 +172,10 @@ protected Callable createConnectionFactory(final Context context, return factory = new Callable() { @Override public Connection call() throws Exception { - log.info("Creating a connection for entity " - + context.getEntityAttribute(DataImporter.NAME) + " with URL: " - + url); + if (log.isInfoEnabled()) { + log.info("Creating a connection for entity {} with URL: {}" + , context.getEntityAttribute(DataImporter.NAME), url); + } long start = System.nanoTime(); Connection c = null; @@ -188,6 +189,7 @@ public Connection call() throws Exception { // the class loader of the class which is trying to make the connection. // This is a workaround for cases where the user puts the driver jar in the // solr.home/lib or solr.home/core/lib directories. + @SuppressWarnings({"unchecked"}) Driver d = (Driver) DocBuilder.loadClass(driver, context.getSolrCore()).getConstructor().newInstance(); c = d.connect(url, initProps); } @@ -205,8 +207,8 @@ public Connection call() throws Exception { throw new DataImportHandlerException(SEVERE, "Exception initializing SQL connection", e); } } - log.info("Time taken for getConnection(): " - + TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS)); + log.info("Time taken for getConnection(): {}" + , TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS)); return c; } @@ -316,11 +318,11 @@ public ResultSetIterator(String query) { try { Connection c = getConnection(); stmt = createStatement(c, batchSize, maxRows); - log.debug("Executing SQL: " + query); + log.debug("Executing SQL: {}", query); long start = System.nanoTime(); resultSet = executeStatement(stmt, query); - log.trace("Time taken for sql :" - + TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS)); + log.trace("Time taken for sql : {}" + , TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS)); setColNames(resultSet); } catch (Exception e) { close(); diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/NumberFormatTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/NumberFormatTransformer.java index 349b14eee30e..f693aecce4a0 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/NumberFormatTransformer.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/NumberFormatTransformer.java @@ -69,6 +69,7 @@ public Object transformRow(Map row, Context context) { if (val instanceof List) { List inputs = (List) val; + @SuppressWarnings({"rawtypes"}) List results = new ArrayList(); for (String input : inputs) { try { diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/PlainTextEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/PlainTextEntityProcessor.java index c75608c95a9b..4b8771af26ed 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/PlainTextEntityProcessor.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/PlainTextEntityProcessor.java @@ -45,6 +45,7 @@ public void init(Context context) { @Override public Map nextRow() { if (ended) return null; + @SuppressWarnings({"unchecked"}) DataSource ds = context.getDataSource(); String url = context.replaceTokens(context.getEntityAttribute(URL)); Reader r = null; diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java index 719decae906e..f5934163b009 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java @@ -43,7 +43,7 @@ public class RegexTransformer extends Transformer { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Override - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) public Map transformRow(Map row, Context ctx) { List> fields = ctx.getAllEntityFields(); @@ -135,7 +135,7 @@ private List readBySplit(String splitBy, String value) { return l; } - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) private Object readfromRegExp(String reStr, String value, String columnName, String gNames) { String[] groupNames = null; if(gNames != null && gNames.trim().length() >0){ @@ -165,7 +165,7 @@ private Object readfromRegExp(String reStr, String value, String columnName, Str } } } catch (Exception e) { - log.warn("Parsing failed for field : " + columnName, e); + log.warn("Parsing failed for field : {}", columnName, e); } } return l == null ? map: l; diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java index 2d5b078b87a1..0b77c6ec6d3f 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java @@ -214,7 +214,7 @@ public void persist(Map propObjs) { existingProps.putAll(newProps); propOutput = new OutputStreamWriter(new FileOutputStream(getPersistFile()), StandardCharsets.UTF_8); existingProps.store(propOutput, null); - log.info("Wrote last indexed time to " + filename); + log.info("Wrote last indexed time to {}", filename); } catch (Exception e) { throw new DataImportHandlerException(DataImportHandlerException.SEVERE, "Unable to persist Index Start Time", e); @@ -235,9 +235,9 @@ public Map readIndexerProperties() { filePath += filename; propInput = new FileInputStream(filePath); props.load(new InputStreamReader(propInput, StandardCharsets.UTF_8)); - log.info("Read " + filename); + log.info("Read {}", filename); } catch (Exception e) { - log.warn("Unable to read: " + filename); + log.warn("Unable to read: {}", filename); } finally { IOUtils.closeWhileHandlingException(propInput); } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java index 3964f3f24638..8e7624bd8b04 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java @@ -79,7 +79,7 @@ public boolean upload(SolrInputDocument d) { command.commitWithin = commitWithin; processor.processAdd(command); } catch (Exception e) { - log.warn("Error creating document : " + d, e); + log.warn("Error creating document : {}", d, e); return false; } @@ -89,24 +89,24 @@ public boolean upload(SolrInputDocument d) { @Override public void deleteDoc(Object id) { try { - log.info("Deleting document: " + id); + log.info("Deleting document: {}", id); DeleteUpdateCommand delCmd = new DeleteUpdateCommand(req); delCmd.setId(id.toString()); processor.processDelete(delCmd); } catch (IOException e) { - log.error("Exception while deleteing: " + id, e); + log.error("Exception while deleteing: {}", id, e); } } @Override public void deleteByQuery(String query) { try { - log.info("Deleting documents from Solr with query: " + query); + log.info("Deleting documents from Solr with query: {}", query); DeleteUpdateCommand delCmd = new DeleteUpdateCommand(req); delCmd.query = query; processor.processDelete(delCmd); } catch (IOException e) { - log.error("Exception while deleting by query: " + query, e); + log.error("Exception while deleting by query: {}", query, e); } } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java index 19c6d0f7476b..8e0522a465b5 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java @@ -61,7 +61,7 @@ protected void initQuery(String q) { } catch (DataImportHandlerException e) { throw e; } catch (Exception e) { - log.error( "The query failed '" + q + "'", e); + log.error( "The query failed '{}'", q, e); throw new DataImportHandlerException(DataImportHandlerException.SEVERE, e); } } @@ -103,8 +103,10 @@ public Map nextModifiedParentRowKey() { String parentDeltaQuery = context.getEntityAttribute(PARENT_DELTA_QUERY); if (parentDeltaQuery == null) return null; - log.info("Running parentDeltaQuery for Entity: " - + context.getEntityAttribute("name")); + if (log.isInfoEnabled()) { + log.info("Running parentDeltaQuery for Entity: {}" + , context.getEntityAttribute("name")); + } initQuery(context.replaceTokens(parentDeltaQuery)); } return getNext(); @@ -119,7 +121,7 @@ public String getQuery() { String deltaImportQuery = context.getEntityAttribute(DELTA_IMPORT_QUERY); if(deltaImportQuery != null) return deltaImportQuery; } - log.warn("'deltaImportQuery' attribute is not specified for entity : "+ entityName); + log.warn("'deltaImportQuery' attribute is not specified for entity : {}", entityName); return getDeltaImportQuery(queryString); } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java index f655edd17d88..75a6ff29686d 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java @@ -76,8 +76,8 @@ public Object transformRow(Map row, Context context) { } for (String v : variables) { if (resolver.resolve(v) == null) { - log.warn("Unable to resolve variable: " + v - + " while parsing expression: " + expr); + log.warn("Unable to resolve variable: {} while parsing expression: {}" + ,v , expr); resolvable = false; } } @@ -94,6 +94,7 @@ public Object transformRow(Map row, Context context) { return row; } + @SuppressWarnings({"unchecked"}) private void addToRow(String key, Map row, Object value) { Object prevVal = row.get(key); if (prevVal != null) { diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java index 145ffc47eb71..0beed255d165 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java @@ -72,14 +72,14 @@ public void init(Context context, Properties initProps) { try { connectionTimeout = Integer.parseInt(cTimeout); } catch (NumberFormatException e) { - log.warn("Invalid connection timeout: " + cTimeout); + log.warn("Invalid connection timeout: {}", cTimeout); } } if (rTimeout != null) { try { readTimeout = Integer.parseInt(rTimeout); } catch (NumberFormatException e) { - log.warn("Invalid read timeout: " + rTimeout); + log.warn("Invalid read timeout: {}", rTimeout); } } } @@ -91,7 +91,7 @@ public Reader getData(String query) { if (URIMETHOD.matcher(query).find()) url = new URL(query); else url = new URL(baseUrl + query); - log.debug("Accessing URL: " + url.toString()); + log.debug("Accessing URL: {}", url); URLConnection conn = url.openConnection(); conn.setConnectTimeout(connectionTimeout); diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java index c93b581fdfc9..03737121791a 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java @@ -136,8 +136,9 @@ private void initXpathReader(VariableResolver resolver) { // some XML parsers are broken and don't close the byte stream (but they should according to spec) IOUtils.closeQuietly(xsltSource.getInputStream()); } - log.info("Using xslTransformer: " - + xslTransformer.getClass().getName()); + if (log.isInfoEnabled()) { + log.info("Using xslTransformer: {}", xslTransformer.getClass().getName()); + } } catch (Exception e) { throw new DataImportHandlerException(SEVERE, "Error initializing XSL ", e); @@ -183,6 +184,7 @@ private void initXpathReader(VariableResolver resolver) { } } String url = context.getEntityAttribute(URL); + @SuppressWarnings({"unchecked"}) List l = url == null ? Collections.EMPTY_LIST : resolver.getVariables(url); for (String s : l) { if (s.startsWith(entityName + ".")) { @@ -267,7 +269,7 @@ private void addNamespace() { Object val = context.getSessionAttribute(name, Context.SCOPE_ENTITY); if (val != null) namespace.put(name, val); } - ((VariableResolver)context.getVariableResolver()).addNamespace(entityName, namespace); + context.getVariableResolver().addNamespace(entityName, namespace); } private void addCommonFields(Map r) { @@ -283,6 +285,7 @@ private void addCommonFields(Map r) { } + @SuppressWarnings({"unchecked"}) private void initQuery(String s) { Reader data = null; try { @@ -293,10 +296,12 @@ private void initQuery(String s) { if (ABORT.equals(onError)) { wrapAndThrow(SEVERE, e); } else if (SKIP.equals(onError)) { - if (log.isDebugEnabled()) log.debug("Skipping url : " + s, e); + if (log.isDebugEnabled()) { + log.debug("Skipping url : {}", s, e); + } wrapAndThrow(DataImportHandlerException.SKIP, e); } else { - log.warn("Failed for url : " + s, e); + log.warn("Failed for url : {}", s, e); rowIterator = Collections.EMPTY_LIST.iterator(); return; } @@ -313,7 +318,7 @@ private void initQuery(String s) { } else if (SKIP.equals(onError)) { wrapAndThrow(DataImportHandlerException.SKIP, e); } else { - log.warn("Failed for url : " + s, e); + log.warn("Failed for url : {}", s, e); rowIterator = Collections.EMPTY_LIST.iterator(); return; } @@ -355,6 +360,7 @@ private void closeIt(Reader data) { } } + @SuppressWarnings({"unchecked"}) protected Map readRow(Map record, String xpath) { if (useSolrAddXml) { List names = (List) record.get("name"); @@ -364,9 +370,11 @@ protected Map readRow(Map record, String xpath) if (row.containsKey(names.get(i))) { Object existing = row.get(names.get(i)); if (existing instanceof List) { + @SuppressWarnings({"rawtypes"}) List list = (List) existing; list.add(values.get(i)); } else { + @SuppressWarnings({"rawtypes"}) List list = new ArrayList(); list.add(existing); list.add(values.get(i)); diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java index 13daf49b0857..0a4638f8986f 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java @@ -585,6 +585,7 @@ private Node getOrAddNode(String xpathName, List searchList ) { * records values. If a fields value is a List then they have to be * deep-copied for thread safety */ + @SuppressWarnings({"unchecked", "rawtypes"}) private static Map getDeepCopy(Map values) { Map result = new HashMap<>(); for (Map.Entry entry : values.entrySet()) { @@ -645,7 +646,7 @@ private static List splitEscapeQuote(String str) { } catch (IllegalArgumentException ex) { // Other implementations will likely throw this exception since "reuse-instance" // isimplementation specific. - log.debug("Unable to set the 'reuse-instance' property for the input chain: " + factory); + log.debug("Unable to set the 'reuse-instance' property for the input chain: {}", factory); } } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java index 64a776c9ccc9..2d83202b3bb5 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java @@ -74,7 +74,7 @@ public void persist(Map propObjs) { zkClient.setData(path, bytes, false); } catch (Exception e) { SolrZkClient.checkInterrupted(e); - log.warn("Could not persist properties to " + path + " :" + e.getClass(), e); + log.warn("Could not persist properties to {} : {}", path, e.getClass(), e); } } @@ -88,7 +88,7 @@ public Map readIndexerProperties() { } } catch (Exception e) { SolrZkClient.checkInterrupted(e); - log.warn("Could not read DIH properties from " + path + " :" + e.getClass(), e); + log.warn("Could not read DIH properties from {} : {}", path, e.getClass(), e); } return propertiesToMap(props); } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java index 0ba13eac788c..3832355a48c4 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java @@ -106,7 +106,9 @@ private void verifyWithSchema(Map fields) { SchemaField sf = entry.getValue(); if (!fields.containsKey(sf.getName())) { if (sf.isRequired()) { - log.info(sf.getName() + " is a required field in SolrSchema . But not found in DataConfig"); + if (log.isInfoEnabled()) { + log.info("{} is a required field in SolrSchema . But not found in DataConfig", sf.getName()); + } } } } @@ -114,7 +116,9 @@ private void verifyWithSchema(Map fields) { EntityField fld = entry.getValue(); SchemaField field = getSchemaField(fld.getName()); if (field == null && !isSpecialCommand(fld.getName())) { - log.info("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema"); + if (log.isInfoEnabled()) { + log.info("The field :{} present in DataConfig does not have a counterpart in Solr Schema", fld.getName()); + } } } } diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDIHCacheTestCase.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDIHCacheTestCase.java index 9a0f3a772e78..7a7b3ec88caa 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDIHCacheTestCase.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDIHCacheTestCase.java @@ -81,7 +81,7 @@ static class ControlData implements Comparable, Iterable { } @Override - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) public int compareTo(ControlData cd) { Comparable c1 = (Comparable) data[0]; Comparable c2 = (Comparable) cd.data[0]; diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java index 3674a3d58f10..7a31acfda3e0 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java @@ -115,6 +115,7 @@ protected File redirectTempProperties(DataImporter di) { * @param extraParams any extra request parameters needed to be passed to DataImportHandler * @throws Exception in case of any error */ + @SuppressWarnings({"unchecked"}) protected void runFullImport(String dataConfig, Map extraParams) throws Exception { HashMap params = new HashMap<>(); params.put("command", "full-import"); @@ -123,6 +124,7 @@ protected void runFullImport(String dataConfig, Map extraParams) params.put("clean", "true"); params.put("commit", "true"); params.putAll(extraParams); + @SuppressWarnings({"rawtypes"}) NamedList l = new NamedList(); for (Map.Entry e : params.entrySet()) { l.add(e.getKey(),e.getValue()); @@ -136,7 +138,7 @@ protected void runFullImport(String dataConfig, Map extraParams) */ @SuppressWarnings("unchecked") public static TestContext getContext(EntityProcessorWrapper parent, - VariableResolver resolver, DataSource parentDataSource, + VariableResolver resolver, @SuppressWarnings({"rawtypes"})DataSource parentDataSource, String currProcess, final List> entityFields, final Map entityAttrs) { if (resolver == null) resolver = new VariableResolver(); @@ -150,7 +152,7 @@ public static TestContext getContext(EntityProcessorWrapper parent, * Strings at even index are keys, odd-index strings are values in the * returned map */ - @SuppressWarnings("unchecked") + @SuppressWarnings({"rawtypes"}) public static Map createMap(Object... args) { return Utils.makeMap(args); } @@ -217,6 +219,7 @@ public VariableResolver getVariableResolver() { } @Override + @SuppressWarnings({"rawtypes"}) public DataSource getDataSource() { return delegate.getDataSource(); } @@ -257,7 +260,7 @@ public Context getParentContext() { } @Override - public DataSource getDataSource(String name) { + @SuppressWarnings({"rawtypes"})public DataSource getDataSource(String name) { return delegate.getDataSource(name); } diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java index 277404451c1c..ee5ec82f9e50 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java @@ -112,7 +112,7 @@ protected void logPropertiesFile() { for(Map.Entry entry : props.entrySet()) { sb.append(" > key=" + entry.getKey() + " / value=" + entry.getValue() + "\n"); } - log.debug(sb.toString()); + log.debug("{}", sb); } } @@ -465,9 +465,10 @@ public IntChanges modifySomePeople() throws Exception { // One second in the future ensures a change time after the last import (DIH // uses second precision only) Timestamp theTime = new Timestamp(System.currentTimeMillis() + 1000); - log.debug("PEOPLE UPDATE USING TIMESTAMP: " - + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT) - .format(theTime)); + if (log.isDebugEnabled()) { + log.debug("PEOPLE UPDATE USING TIMESTAMP: {}" + , new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(theTime)); + } try { conn = newConnection(); change = conn @@ -537,9 +538,10 @@ public String[] modifySomeCountries() throws Exception { // One second in the future ensures a change time after the last import (DIH // uses second precision only) Timestamp theTime = new Timestamp(System.currentTimeMillis() + 1000); - log.debug("COUNTRY UPDATE USING TIMESTAMP: " - + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT) - .format(theTime)); + if (log.isDebugEnabled()) { + log.debug("COUNTRY UPDATE USING TIMESTAMP: {}" + , new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(theTime)); + } try { conn = newConnection(); change = conn @@ -720,9 +722,10 @@ protected void populateData(Connection conn) throws Exception { s.executeUpdate("create table countries(code varchar(3) not null primary key, country_name varchar(50), deleted char(1) default 'N', last_modified timestamp not null)"); s.executeUpdate("create table people(id int not null primary key, name varchar(50), country_code char(2), deleted char(1) default 'N', last_modified timestamp not null)"); s.executeUpdate("create table people_sports(id int not null primary key, person_id int, sport_name varchar(50), deleted char(1) default 'N', last_modified timestamp not null)"); - log.debug("INSERTING DB DATA USING TIMESTAMP: " - + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT) - .format(theTime)); + if (log.isDebugEnabled()) { + log.debug("INSERTING DB DATA USING TIMESTAMP: {}", + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(theTime)); + } ps = conn .prepareStatement("insert into countries (code, country_name, last_modified) values (?,?,?)"); for (String[] country : countries) { diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java index 756821027edf..5a7ea84650ac 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java @@ -42,7 +42,7 @@ public MockInitialContextFactory() { @Override @SuppressWarnings("unchecked") - public javax.naming.Context getInitialContext(Hashtable env) { + public javax.naming.Context getInitialContext(@SuppressWarnings({"rawtypes"})Hashtable env) { return context; } diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestClobTransformer.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestClobTransformer.java index fe00d491d04f..26478dedba73 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestClobTransformer.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestClobTransformer.java @@ -32,6 +32,7 @@ * @see org.apache.solr.handler.dataimport.ClobTransformer * @since solr 1.4 */ +@SuppressWarnings({"unchecked"}) public class TestClobTransformer extends AbstractDataImportHandlerTestCase { @Test public void simple() throws Exception { @@ -45,6 +46,7 @@ public void simple() throws Exception { Context ctx = getContext(null, new VariableResolver(), null, Context.FULL_DUMP, flds, Collections.EMPTY_MAP); Transformer t = new ClobTransformer(); Map row = new HashMap<>(); + @SuppressWarnings({"rawtypes"}) Clob clob = (Clob) Proxy.newProxyInstance(this.getClass().getClassLoader(), new Class[]{Clob.class}, new InvocationHandler() { @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDataConfig.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDataConfig.java index 7a56fd2f36e7..c5028931c73f 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDataConfig.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDataConfig.java @@ -44,6 +44,7 @@ public static void beforeClass() throws Exception { @Test @SuppressWarnings("unchecked") public void testDataConfigWithDataSource() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("id", "1", "desc", "one")); MockDataSource.setIterator("select * from x", rows.iterator()); diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java index 7f7278e7d0bd..6ee2432d085f 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java @@ -52,6 +52,7 @@ public void loadClass() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void singleEntityNoRows() { DataImporter di = new DataImporter(); di.loadAndInit(dc_singleEntity); @@ -71,6 +72,7 @@ public void singleEntityNoRows() { } @Test + @SuppressWarnings({"unchecked"}) public void testDeltaImportNoRows_MustNotCommit() { DataImporter di = new DataImporter(); di.loadAndInit(dc_deltaConfig); @@ -93,6 +95,7 @@ public void testDeltaImportNoRows_MustNotCommit() { } @Test + @SuppressWarnings({"unchecked"}) public void singleEntityOneRow() { DataImporter di = new DataImporter(); di.loadAndInit(dc_singleEntity); @@ -122,6 +125,7 @@ public void singleEntityOneRow() { } @Test + @SuppressWarnings({"unchecked"}) public void testImportCommand() { DataImporter di = new DataImporter(); di.loadAndInit(dc_singleEntity); @@ -142,7 +146,7 @@ public void testImportCommand() { assertEquals(1, di.getDocBuilder().importStatistics.rowsCount.get()); for (int i = 0; i < l.size(); i++) { - Map map = (Map) l.get(i); + Map map = l.get(i); SolrInputDocument doc = swi.docs.get(i); for (Map.Entry entry : map.entrySet()) { assertEquals(entry.getValue(), doc.getFieldValue(entry.getKey())); @@ -151,6 +155,7 @@ public void testImportCommand() { } @Test + @SuppressWarnings({"unchecked"}) public void singleEntityMultipleRows() { DataImporter di = new DataImporter(); di.loadAndInit(dc_singleEntity); @@ -170,7 +175,7 @@ public void singleEntityMultipleRows() { assertEquals(Boolean.TRUE, swi.finishCalled); assertEquals(3, swi.docs.size()); for (int i = 0; i < l.size(); i++) { - Map map = (Map) l.get(i); + Map map = l.get(i); SolrInputDocument doc = swi.docs.get(i); for (Map.Entry entry : map.entrySet()) { assertEquals(entry.getValue(), doc.getFieldValue(entry.getKey())); @@ -183,6 +188,7 @@ public void singleEntityMultipleRows() { } @Test + @SuppressWarnings({"unchecked"}) public void templateXPath() { DataImporter di = new DataImporter(); di.loadAndInit(dc_variableXpath); diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder2.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder2.java index 27865cd5e347..2941f589617f 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder2.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder2.java @@ -46,6 +46,7 @@ public static void beforeClass() throws Exception { @Test @SuppressWarnings("unchecked") public void testSingleEntity() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("id", "1", "desc", "one")); MockDataSource.setIterator("select * from x", rows.iterator()); @@ -62,6 +63,7 @@ public void testSingleEntity() throws Exception { @Test @SuppressWarnings("unchecked") public void testSingleEntity_CaseInsensitive() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("id", "1", "desC", "one")); MockDataSource.setIterator("select * from x", rows.iterator()); @@ -76,7 +78,9 @@ public void testSingleEntity_CaseInsensitive() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testErrorHandler() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("id", "1", "FORCE_ERROR", "true")); MockDataSource.setIterator("select * from x", rows.iterator()); @@ -90,6 +94,7 @@ public void testErrorHandler() throws Exception { @Test @SuppressWarnings("unchecked") public void testDynamicFields() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("id", "1", "desc", "one")); MockDataSource.setIterator("select * from x", rows.iterator()); @@ -103,6 +108,7 @@ public void testDynamicFields() throws Exception { @Test @SuppressWarnings("unchecked") public void testRequestParamsAsVariable() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("id", "101", "desc", "ApacheSolr")); MockDataSource.setIterator("select * from books where category='search'", rows.iterator()); @@ -118,6 +124,7 @@ public void testRequestParamsAsVariable() throws Exception { @Test @SuppressWarnings("unchecked") public void testDynamicFieldNames() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("mypk", "101", "text", "ApacheSolr")); MockDataSource.setIterator("select * from x", rows.iterator()); @@ -132,6 +139,7 @@ public void testDynamicFieldNames() throws Exception { @Test @SuppressWarnings("unchecked") public void testRequestParamsAsFieldName() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("mypk", "101", "text", "ApacheSolr")); MockDataSource.setIterator("select * from x", rows.iterator()); @@ -147,6 +155,7 @@ public void testRequestParamsAsFieldName() throws Exception { @Test @SuppressWarnings("unchecked") public void testContext() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("id", "1", "desc", "one")); MockDataSource.setIterator("select * from x", rows.iterator()); @@ -157,6 +166,7 @@ public void testContext() throws Exception { @Test @SuppressWarnings("unchecked") public void testSkipDoc() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("id", "1", "desc", "one")); rows.add(createMap("id", "2", "desc", "two", DocBuilder.SKIP_DOC, "true")); @@ -169,7 +179,7 @@ public void testSkipDoc() throws Exception { } @Test - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) public void testSkipRow() throws Exception { List rows = new ArrayList(); rows.add(createMap("id", "1", "desc", "one")); @@ -206,6 +216,7 @@ public void testSkipRow() throws Exception { @Test @SuppressWarnings("unchecked") public void testStopTransform() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("id", "1", "desc", "one")); rows.add(createMap("id", "2", "desc", "two", "$stopTransform", "true")); @@ -219,7 +230,7 @@ public void testStopTransform() throws Exception { } @Test - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) public void testDeleteDocs() throws Exception { List rows = new ArrayList(); rows.add(createMap("id", "1", "desc", "one")); @@ -269,6 +280,7 @@ public void testDeleteDocs() throws Exception { public void testFileListEntityProcessor_lastIndexTime() throws Exception { File tmpdir = createTempDir().toFile(); + @SuppressWarnings({"unchecked"}) Map params = createMap("baseDir", tmpdir.getAbsolutePath()); createFile(tmpdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), true); diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestEphemeralCache.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestEphemeralCache.java index ddccafeaa3e4..b5b3c3374253 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestEphemeralCache.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestEphemeralCache.java @@ -46,6 +46,7 @@ public void test() throws Exception { @SuppressWarnings("unchecked") private void setupMockData() { + @SuppressWarnings({"rawtypes"}) List parentRows = new ArrayList(); parentRows.add(createMap("id", new BigDecimal("1"), "parent_s", "one")); parentRows.add(createMap("id", new BigDecimal("2"), "parent_s", "two")); @@ -53,6 +54,7 @@ private void setupMockData() { parentRows.add(createMap("id", new BigDecimal("4"), "parent_s", "four")); parentRows.add(createMap("id", new BigDecimal("5"), "parent_s", "five")); + @SuppressWarnings({"rawtypes"}) List child1Rows = new ArrayList(); child1Rows.add(createMap("id", new BigDecimal("6"), "child1a_mult_s", "this is the number six.")); child1Rows.add(createMap("id", new BigDecimal("5"), "child1a_mult_s", "this is the number five.")); @@ -65,6 +67,7 @@ private void setupMockData() { child1Rows.add(createMap("id", new BigDecimal("1"), "child1a_mult_s", "uno")); child1Rows.add(createMap("id", new BigDecimal("2"), "child1b_s", "CHILD1B", "child1a_mult_s", "this is the number two.")); + @SuppressWarnings({"rawtypes"}) List child2Rows = new ArrayList(); child2Rows.add(createMap("id", new BigDecimal("6"), "child2a_mult_s", "Child 2 says, 'this is the number six.'")); child2Rows.add(createMap("id", new BigDecimal("5"), "child2a_mult_s", "Child 2 says, 'this is the number five.'")); diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestErrorHandling.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestErrorHandling.java index 1ea1ad483a1d..2391ae8a3140 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestErrorHandling.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestErrorHandling.java @@ -80,6 +80,7 @@ public void testAbortOnError() throws Exception { assertQ(req("*:*"), "//*[@numFound='0']"); } + @SuppressWarnings({"unchecked"}) public void testTransformerErrorContinue() throws Exception { StringDataSource.xml = wellformedXml; List> rows = new ArrayList<>(); diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestFieldReader.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestFieldReader.java index 347a40bf1ab8..3203bdab119f 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestFieldReader.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestFieldReader.java @@ -32,6 +32,7 @@ public class TestFieldReader extends AbstractDataImportHandlerTestCase { @Test + @SuppressWarnings({"unchecked"}) public void simple() { DataImporter di = new DataImporter(); di.loadAndInit(config); diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java index dd2cf72f6421..c4b4ab0003ca 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java @@ -50,6 +50,7 @@ public void testSimple() throws IOException { createFile(tmpdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), false); createFile(tmpdir, "b.xml", "b.xml".getBytes(StandardCharsets.UTF_8), false); createFile(tmpdir, "c.props", "c.props".getBytes(StandardCharsets.UTF_8), false); + @SuppressWarnings({"rawtypes"}) Map attrs = createMap( FileListEntityProcessor.FILE_NAME, "xml$", FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath()); @@ -68,6 +69,7 @@ public void testSimple() throws IOException { } @Test + @SuppressWarnings({"unchecked"}) public void testBiggerSmallerFiles() throws IOException { File tmpdir = createTempDir().toFile(); @@ -91,6 +93,7 @@ public void testBiggerSmallerFiles() throws IOException { minLength = content.length; smallestFile = "c.props"; } + @SuppressWarnings({"rawtypes"}) Map attrs = createMap( FileListEntityProcessor.FILE_NAME, ".*", FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(), @@ -120,7 +123,7 @@ public void testBiggerSmallerFiles() throws IOException { } @SuppressWarnings("unchecked") - static List getFiles(VariableResolver resolver, Map attrs) { + static List getFiles(VariableResolver resolver, @SuppressWarnings({"rawtypes"})Map attrs) { Context c = getContext(null, resolver, null, Context.FULL_DUMP, Collections.EMPTY_LIST, attrs); FileListEntityProcessor fileListEntityProcessor = new FileListEntityProcessor(); @@ -137,12 +140,14 @@ static List getFiles(VariableResolver resolver, Map attrs) { @SuppressForbidden(reason = "Needs currentTimeMillis to set last modified time") @Test + @SuppressWarnings({"unchecked"}) public void testNTOT() throws IOException { File tmpdir = createTempDir().toFile(); createFile(tmpdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), true); createFile(tmpdir, "b.xml", "b.xml".getBytes(StandardCharsets.UTF_8), true); createFile(tmpdir, "c.props", "c.props".getBytes(StandardCharsets.UTF_8), true); + @SuppressWarnings({"rawtypes"}) Map attrs = createMap( FileListEntityProcessor.FILE_NAME, "xml$", FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(), @@ -178,6 +183,7 @@ public void testRECURSION() throws IOException { createFile(childdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), true); createFile(childdir, "b.xml", "b.xml".getBytes(StandardCharsets.UTF_8), true); createFile(childdir, "c.props", "c.props".getBytes(StandardCharsets.UTF_8), true); + @SuppressWarnings({"rawtypes"}) Map attrs = createMap( FileListEntityProcessor.FILE_NAME, "^.*\\.xml$", FileListEntityProcessor.BASE_DIR, childdir.getAbsolutePath(), diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSourceConvertType.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSourceConvertType.java index 94f437c3709c..ef1cc7b5b2cc 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSourceConvertType.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSourceConvertType.java @@ -48,7 +48,7 @@ public void testConvertType() throws Throwable { convertTypeTest("true", Long.class); } - private void convertTypeTest(String convertType, Class resultClass) throws Throwable { + private void convertTypeTest(String convertType, @SuppressWarnings({"rawtypes"})Class resultClass) throws Throwable { JdbcDataSource dataSource = new JdbcDataSource(); Properties p = new Properties(); p.put("driver", "org.apache.derby.jdbc.EmbeddedDriver"); diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java index eb9989c181a3..492e18a07f2c 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java @@ -42,12 +42,14 @@ public void testSimple() throws IOException { * /> */ + @SuppressWarnings({"rawtypes"}) Map attrs = createMap( LineEntityProcessor.URL, "dummy.lis", LineEntityProcessor.ACCEPT_LINE_REGEX, null, LineEntityProcessor.SKIP_LINE_REGEX, null ); + @SuppressWarnings({"unchecked"}) Context c = getContext( null, //parentEntity new VariableResolver(), //resolver @@ -82,12 +84,14 @@ public void testOnly_xml_files() throws IOException { * acceptLineRegex="xml" * /> */ + @SuppressWarnings({"rawtypes"}) Map attrs = createMap( LineEntityProcessor.URL, "dummy.lis", LineEntityProcessor.ACCEPT_LINE_REGEX, "xml", LineEntityProcessor.SKIP_LINE_REGEX, null ); + @SuppressWarnings({"unchecked"}) Context c = getContext( null, //parentEntity new VariableResolver(), //resolver @@ -120,12 +124,14 @@ public void testOnly_xml_files_no_xsd() throws IOException { * omitLineRegex="\\.xsd" * /> */ + @SuppressWarnings({"rawtypes"}) Map attrs = createMap( LineEntityProcessor.URL, "dummy.lis", LineEntityProcessor.ACCEPT_LINE_REGEX, "\\.xml", LineEntityProcessor.SKIP_LINE_REGEX, "\\.xsd" ); + @SuppressWarnings({"unchecked"}) Context c = getContext( null, //parentEntity new VariableResolver(), //resolver @@ -157,11 +163,13 @@ public void testNo_xsd_files() throws IOException { * omitLineRegex="\\.xsd" * /> */ + @SuppressWarnings({"rawtypes"}) Map attrs = createMap( LineEntityProcessor.URL, "dummy.lis", LineEntityProcessor.SKIP_LINE_REGEX, "\\.xsd" ); + @SuppressWarnings({"unchecked"}) Context c = getContext( null, //parentEntity new VariableResolver(), //resolver diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java index 23854382dde3..007e63fa2481 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java @@ -46,6 +46,7 @@ public void testSimple() throws IOException { redirectTempProperties(di); TestDocBuilder.SolrWriterImpl sw = new TestDocBuilder.SolrWriterImpl(); + @SuppressWarnings({"unchecked"}) RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null); di.runCmd(rp, sw); assertEquals(DS.s, sw.docs.get(0).getFieldValue("x")); @@ -142,12 +143,14 @@ public void testSimple2() throws IOException { redirectTempProperties(di); TestDocBuilder.SolrWriterImpl sw = new TestDocBuilder.SolrWriterImpl(); + @SuppressWarnings({"unchecked"}) RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null); di.runCmd(rp, sw); assertEquals(DS.s, sw.docs.get(0).getFieldValue("plainText")); } + @SuppressWarnings({"rawtypes"}) public static class DS extends DataSource { static String s = "hello world"; diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestRegexTransformer.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestRegexTransformer.java index e792804634fd..9af9b290bfa9 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestRegexTransformer.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestRegexTransformer.java @@ -69,12 +69,15 @@ public void testGroupNames() { assertEquals("Noble", result.get("firstName")); assertEquals("Paul", result.get("lastName")); src= new HashMap<>(); + @SuppressWarnings({"unchecked", "rawtypes"}) List l= new ArrayList(); l.add("Mr Noble Paul") ; l.add("Mr Shalin Mangar") ; src.put("fullName", l); result = new RegexTransformer().transformRow(src, context); + @SuppressWarnings({"rawtypes"}) List l1 = (List) result.get("firstName"); + @SuppressWarnings({"rawtypes"}) List l2 = (List) result.get("lastName"); assertEquals("Noble", l1.get(0)); assertEquals("Shalin", l1.get(1)); @@ -150,6 +153,7 @@ public void testMileage() { VariableResolver resolver = new VariableResolver(); resolver.addNamespace("e", row); + @SuppressWarnings({"unchecked"}) Map eAttrs = createMap("name", "e"); Context context = getContext(null, resolver, null, Context.FULL_DUMP, fields, eAttrs); @@ -175,6 +179,7 @@ public void testMultiValuedRegex(){ ArrayList strings = new ArrayList<>(); strings.add("hello"); strings.add("world"); + @SuppressWarnings({"unchecked"}) Map result = new RegexTransformer().transformRow(createMap("person", strings), context); assertEquals(strings,result.get("participant")); } diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java index 9cd606dca0b6..2000231d26cc 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java @@ -128,6 +128,7 @@ public void testReadScriptTag() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testCheckScript() throws Exception { try { DocumentBuilder builder = DocumentBuilderFactory.newInstance() @@ -137,13 +138,14 @@ public void testCheckScript() throws Exception { DIHConfiguration dc = di.readFromXml(document); Context c = getContext("checkNextToken", dc.getScript().getText()); + @SuppressWarnings({"rawtypes"}) Map map = new HashMap(); map.put("nextToken", "hello"); EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null, null); sep.init(c); sep.applyTransformer(map); assertEquals("true", map.get("$hasMore")); - map = new HashMap(); + map = new HashMap<>(); map.put("nextToken", ""); sep.applyTransformer(map); assertNull(map.get("$hasMore")); diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java index a2a9fffa9ab3..1753b81de04c 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java @@ -87,7 +87,7 @@ public void testMultiValuedFields() { Map next = processor.nextRow(); assertNotNull(next); - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) List multiField = (List) next.get("description"); assertEquals(testDoc.getValues("description").size(), multiField.size()); assertEquals(testDoc.getValues("description"), multiField); @@ -161,6 +161,7 @@ private List generateUniqueDocs(int numDocs) { types.add(new FldType(ID, ONE_ONE, new SVal('A', 'Z', 4, 40))); types.add(new FldType("description", new IRange(1, 3), new SVal('a', 'c', 1, 1))); + @SuppressWarnings({"rawtypes"}) Set previousIds = new HashSet<>(); List docs = new ArrayList<>(numDocs); for (int i = 0; i < numDocs; i++) { diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java index 7b0e613ea750..8dd1b552e560 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java @@ -44,7 +44,7 @@ public void testCacheWithKeyLookup() { List testData = extractDataByKeyLookup(cache, fieldNames); compareData(data, testData); } catch (Exception e) { - log.warn("Exception thrown: " + e.toString()); + log.warn("Exception thrown: {}", e); Assert.fail(); } finally { try { @@ -64,7 +64,7 @@ public void testCacheWithOrderedLookup() { List testData = extractDataInKeyOrder(cache, fieldNames); compareData(data, testData); } catch (Exception e) { - log.warn("Exception thrown: " + e.toString()); + log.warn("Exception thrown: {}", e); Assert.fail(); } finally { try { @@ -180,7 +180,7 @@ public void testCacheReopensWithUpdate() { compareData(newControlData, testData); } catch (Exception e) { - log.warn("Exception thrown: " + e.toString()); + log.warn("Exception thrown: {}", e); Assert.fail(); } finally { try { diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java index 1f22a8c25b5d..9708cdcff34c 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java @@ -49,7 +49,7 @@ public void testSingleEntity() throws Exception { logPropertiesFile(); changeStuff(); int c = calculateDatabaseCalls(); - log.debug("testSingleEntity delta-import (" + c + " database calls expected)..."); + log.debug("testSingleEntity delta-import ({} database calls expected)...", c); singleEntity(c); validateChanges(); } @@ -76,7 +76,7 @@ public void testWithSimpleTransformer() throws Exception { changeStuff(); int c = calculateDatabaseCalls(); simpleTransform(c); - log.debug("testWithSimpleTransformer delta-import (" + c + " database calls expected)..."); + log.debug("testWithSimpleTransformer delta-import ({} database calls expected)...", c); validateChanges(); } @Test @@ -86,7 +86,7 @@ public void testWithComplexTransformer() throws Exception { logPropertiesFile(); changeStuff(); int c = calculateDatabaseCalls(); - log.debug("testWithComplexTransformer delta-import (" + c + " database calls expected)..."); + log.debug("testWithComplexTransformer delta-import ({} database calls expected)...", c); complexTransform(c, personChanges.deletedKeys.length); validateChanges(); } @@ -94,7 +94,7 @@ public void testWithComplexTransformer() throws Exception { public void testChildEntities() throws Exception { log.debug("testChildEntities full-import..."); useParentDeltaQueryParam = random().nextBoolean(); - log.debug("using parent delta? " + useParentDeltaQueryParam); + log.debug("using parent delta? {}", useParentDeltaQueryParam); withChildEntities(false, true); logPropertiesFile(); changeStuff(); @@ -168,13 +168,13 @@ private void countryChangesLog() sb.append(s).append(" "); } sb.append(" }"); - log.debug(sb.toString()); + log.debug("{}", sb); } } private void personChangesLog() { if(personChanges!=null) { - log.debug("person changes { " + personChanges.toString() + " } "); + log.debug("person changes [ {} ] ", personChanges); } } @Override diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestTemplateTransformer.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestTemplateTransformer.java index b5c38117ac33..11ea30be4f38 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestTemplateTransformer.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestTemplateTransformer.java @@ -36,6 +36,7 @@ public class TestTemplateTransformer extends AbstractDataImportHandlerTestCase { @Test @SuppressWarnings("unchecked") public void testTransformRow() { + @SuppressWarnings({"rawtypes"}) List fields = new ArrayList(); fields.add(createMap("column", "firstName")); fields.add(createMap("column", "lastName")); @@ -52,6 +53,7 @@ public void testTransformRow() { TemplateTransformer.TEMPLATE,"Mr ${e.name}")); List mails = Arrays.asList("a@b.com", "c@d.com"); + @SuppressWarnings({"rawtypes"}) Map row = createMap( "firstName", "Shalin", "middleName", "Shekhar", @@ -73,6 +75,7 @@ public void testTransformRow() { @Test @SuppressWarnings("unchecked") public void testTransformRowMultiValue() { + @SuppressWarnings({"rawtypes"}) List fields = new ArrayList(); fields.add(createMap("column", "year")); fields.add(createMap("column", "month")); @@ -89,6 +92,7 @@ public void testTransformRowMultiValue() { TemplateTransformer.TEMPLATE, "${e.year}-${e.month}-${e.day}" )); + @SuppressWarnings({"rawtypes"}) Map row = createMap( "year", "2016", "month", "Apr", "day", "30" ); diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java index 00285649fe61..ef88fffb7d44 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java @@ -146,6 +146,7 @@ public void testFunctionNamespace1() throws Exception { m.put("class", E.class.getName()); l.add(m); resolver.setEvaluators(new DataImporter().getEvaluators(l)); + @SuppressWarnings({"unchecked"}) ContextImpl context = new ContextImpl(null, resolver, null, Context.FULL_DUMP, Collections.EMPTY_MAP, null, null); diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestWriterImpl.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestWriterImpl.java index e5c2a949f25c..24eb28bcd3ac 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestWriterImpl.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestWriterImpl.java @@ -43,6 +43,7 @@ public static void beforeClass() throws Exception { @Test @SuppressWarnings("unchecked") public void testDataConfigWithDataSource() throws Exception { + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("id", "1", "desc", "one")); rows.add(createMap("id", "2", "desc", "two")); @@ -51,6 +52,7 @@ public void testDataConfigWithDataSource() throws Exception { MockDataSource.setIterator("select * from x", rows.iterator()); + @SuppressWarnings({"rawtypes"}) Map extraParams = createMap("writerImpl", TestSolrWriter.class.getName(), "commit", "true"); runFullImport(loadDataConfig("data-config-with-datasource.xml"), diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java index 72da77a60ba5..e2200eab7830 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java @@ -42,12 +42,15 @@ public class TestXPathEntityProcessor extends AbstractDataImportHandlerTestCase int rowsToRead = -1; @Test + @SuppressWarnings({"unchecked"}) public void withFieldsAndXpath() throws Exception { File tmpdir = createTempDir().toFile(); createFile(tmpdir, "x.xsl", xsl.getBytes(StandardCharsets.UTF_8), false); + @SuppressWarnings({"rawtypes"}) Map entityAttrs = createMap("name", "e", "url", "cd.xml", XPathEntityProcessor.FOR_EACH, "/catalog/cd"); + @SuppressWarnings({"rawtypes"}) List fields = new ArrayList(); fields.add(createMap("column", "title", "xpath", "/catalog/cd/title")); fields.add(createMap("column", "artist", "xpath", "/catalog/cd/artist")); @@ -70,9 +73,12 @@ public void withFieldsAndXpath() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testMultiValued() throws Exception { + @SuppressWarnings({"rawtypes"}) Map entityAttrs = createMap("name", "e", "url", "testdata.xml", XPathEntityProcessor.FOR_EACH, "/root"); + @SuppressWarnings({"rawtypes"}) List fields = new ArrayList(); fields.add(createMap("column", "a", "xpath", "/root/a", DataImporter.MULTI_VALUED, "true")); Context c = getContext(null, @@ -86,6 +92,7 @@ public void testMultiValued() throws Exception { break; result.add(row); } + @SuppressWarnings({"rawtypes"}) List l = (List)result.get(0).get("a"); assertEquals(3, l.size()); assertEquals("1", l.get(0)); @@ -216,9 +223,12 @@ public void testMultiValuedWithMultipleDocuments() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testMultiValuedFlatten() throws Exception { + @SuppressWarnings({"rawtypes"}) Map entityAttrs = createMap("name", "e", "url", "testdata.xml", XPathEntityProcessor.FOR_EACH, "/root"); + @SuppressWarnings({"rawtypes"}) List fields = new ArrayList(); fields.add(createMap("column", "a", "xpath", "/root/a" ,"flatten","true")); Context c = getContext(null, @@ -236,12 +246,15 @@ public void testMultiValuedFlatten() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void withFieldsAndXpathStream() throws Exception { final Object monitor = new Object(); final boolean[] done = new boolean[1]; + @SuppressWarnings({"rawtypes"}) Map entityAttrs = createMap("name", "e", "url", "cd.xml", XPathEntityProcessor.FOR_EACH, "/catalog/cd", "stream", "true", "batchSize","1"); + @SuppressWarnings({"rawtypes"}) List fields = new ArrayList(); fields.add(createMap("column", "title", "xpath", "/catalog/cd/title")); fields.add(createMap("column", "artist", "xpath", "/catalog/cd/artist")); @@ -331,11 +344,13 @@ public void streamStopsAfterInterrupt() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void withDefaultSolrAndXsl() throws Exception { File tmpdir = createTempDir().toFile(); AbstractDataImportHandlerTestCase.createFile(tmpdir, "x.xsl", xsl.getBytes(StandardCharsets.UTF_8), false); + @SuppressWarnings({"rawtypes"}) Map entityAttrs = createMap("name", "e", XPathEntityProcessor.USE_SOLR_ADD_SCHEMA, "true", "xsl", "" + new File(tmpdir, "x.xsl").toURI(), "url", "cd.xml"); diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java index 2b0757a4ac98..fe8c65718301 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java @@ -510,6 +510,7 @@ public void testSameForEachAndXpath(){ } @Test + @SuppressWarnings({"unchecked"}) public void testPutNullTest(){ String xml = "\n" + " \n" + diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java index 9279d282753d..48c0e67ed55e 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java @@ -95,6 +95,8 @@ public static void dihZk_afterClass() throws Exception { @SuppressForbidden(reason = "Needs currentTimeMillis to construct date stamps") @Test + @SuppressWarnings({"unchecked"}) + @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-12823") public void testZKPropertiesWriter() throws Exception { // test using ZooKeeper assertTrue("Not using ZooKeeper", h.getCoreContainer().isZooKeeperAware()); @@ -121,6 +123,7 @@ public void testZKPropertiesWriter() throws Exception { props.put("last_index_time", oneSecondAgo); spw.persist(props); + @SuppressWarnings({"rawtypes"}) List rows = new ArrayList(); rows.add(createMap("id", "1", "year_s", "2013")); MockDataSource.setIterator("select " + df.format(oneSecondAgo) + " from dummy", rows.iterator()); diff --git a/solr/contrib/extraction/build.gradle b/solr/contrib/extraction/build.gradle index 6bb8684617f2..20f07bfbc35b 100644 --- a/solr/contrib/extraction/build.gradle +++ b/solr/contrib/extraction/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Solr Integration with Tika for extracting content from binary file formats such as Microsoft Word and Adobe PDF' + dependencies { implementation project(':solr:core') diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java index cd5d4af576ff..1b33ec948d2c 100644 --- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java +++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java @@ -132,6 +132,7 @@ void addDoc(SolrContentHandler handler) throws IOException { } @Override + @SuppressWarnings({"unchecked"}) public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream, UpdateRequestProcessor processor) throws Exception { Parser parser = null; @@ -215,7 +216,7 @@ public void load(SolrQueryRequest req, SolrQueryResponse rsp, if(pwMapFile != null && pwMapFile.length() > 0) { InputStream is = req.getCore().getResourceLoader().openResource(pwMapFile); if(is != null) { - log.debug("Password file supplied: "+pwMapFile); + log.debug("Password file supplied: {}", pwMapFile); epp.parse(is); } } @@ -223,13 +224,13 @@ public void load(SolrQueryRequest req, SolrQueryResponse rsp, String resourcePassword = params.get(ExtractingParams.RESOURCE_PASSWORD); if(resourcePassword != null) { epp.setExplicitPassword(resourcePassword); - log.debug("Literal password supplied for file "+resourceName); + log.debug("Literal password supplied for file {}", resourceName); } parser.parse(inputStream, parsingHandler, metadata, context); } catch (TikaException e) { if(ignoreTikaException) log.warn(new StringBuilder("skip extracting text due to ").append(e.getLocalizedMessage()) - .append(". metadata=").append(metadata.toString()).toString()); + .append(". metadata=").append(metadata.toString()).toString()); // logOk else throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } @@ -243,6 +244,7 @@ public void load(SolrQueryRequest req, SolrQueryResponse rsp, rsp.add(stream.getName(), writer.toString()); writer.close(); String[] names = metadata.names(); + @SuppressWarnings({"rawtypes"}) NamedList metadataNL = new NamedList(); for (int i = 0; i < names.length; i++) { String[] vals = metadata.getValues(names[i]); diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java index 2605f9848dfa..ccbdb1a2de2e 100644 --- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java +++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java @@ -52,7 +52,7 @@ public PermissionNameProvider.Name getPermissionName(AuthorizationContext reques } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); } diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java index 41175a0987fe..5ef5d3f1f3a6 100644 --- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java +++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java @@ -88,7 +88,7 @@ public static LinkedHashMap parseRulesFile(InputStream is) { continue; int sep = line.indexOf("="); if(sep <= 0) { - log.warn("Wrong format of password line "+linenum); + log.warn("Wrong format of password line {}", linenum); continue; } String pass = line.substring(sep+1).trim(); @@ -97,7 +97,7 @@ public static LinkedHashMap parseRulesFile(InputStream is) { Pattern pattern = Pattern.compile(regex); rules.put(pattern, pass); } catch(PatternSyntaxException pse) { - log.warn("Key of line "+linenum+" was not a valid regex pattern", pse); + log.warn("Key of line {} was not a valid regex pattern{}", linenum, pse); continue; } } diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/XLSXResponseWriter.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/XLSXResponseWriter.java index 9264a1d97e57..9665b25ffd81 100644 --- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/XLSXResponseWriter.java +++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/XLSXResponseWriter.java @@ -231,8 +231,10 @@ public void close() throws IOException { } //NOTE: a document cannot currently contain another document + @SuppressWarnings({"rawtypes"}) List tmpList; @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void writeSolrDocument(String name, SolrDocument doc, ReturnFields returnFields, int idx ) throws IOException { if (tmpList == null) { tmpList = new ArrayList(1); @@ -277,7 +279,7 @@ public void writeStr(String name, String val, boolean needsEscaping) throws IOEx } @Override - public void writeArray(String name, Iterator val) throws IOException { + public void writeArray(String name, @SuppressWarnings({"rawtypes"})Iterator val) throws IOException { StringBuffer output = new StringBuffer(); while (val.hasNext()) { Object v = val.next(); diff --git a/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java b/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java index 14de8422ee16..a68870844cf2 100644 --- a/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java +++ b/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java @@ -467,12 +467,14 @@ public void testExtractOnly() throws Exception { assertTrue("handler is null and it shouldn't be", handler != null); SolrQueryResponse rsp = loadLocal("extraction/solr-word.pdf", ExtractingParams.EXTRACT_ONLY, "true"); assertTrue("rsp is null and it shouldn't be", rsp != null); + @SuppressWarnings({"rawtypes"}) NamedList list = rsp.getValues(); String extraction = (String) list.get("solr-word.pdf"); assertTrue("extraction is null and it shouldn't be", extraction != null); assertTrue(extraction + " does not contain " + "solr-word", extraction.indexOf("solr-word") != -1); + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) list.get("solr-word.pdf_metadata"); assertTrue("nl is null and it shouldn't be", nl != null); Object title = nl.get("title"); @@ -507,6 +509,7 @@ public void testXPath() throws Exception { ExtractingParams.EXTRACT_ONLY, "true" ); assertTrue("rsp is null and it shouldn't be", rsp != null); + @SuppressWarnings({"rawtypes"}) NamedList list = rsp.getValues(); String val = (String) list.get("example.html"); assertEquals("News", val.trim()); //there is only one matching tag diff --git a/solr/contrib/jaegertracer-configurator/build.gradle b/solr/contrib/jaegertracer-configurator/build.gradle index bcbb3f6c06bd..d2094b1bc34b 100644 --- a/solr/contrib/jaegertracer-configurator/build.gradle +++ b/solr/contrib/jaegertracer-configurator/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Jaeger tracer configurator for tracing Solr using OpenTracing with Jaeger backend' + dependencies { implementation project(':solr:core') diff --git a/solr/contrib/jaegertracer-configurator/src/java/org/apache/solr/jaeger/JaegerTracerConfigurator.java b/solr/contrib/jaegertracer-configurator/src/java/org/apache/solr/jaeger/JaegerTracerConfigurator.java index bb144b10a890..fdbb8bbde9ae 100644 --- a/solr/contrib/jaegertracer-configurator/src/java/org/apache/solr/jaeger/JaegerTracerConfigurator.java +++ b/solr/contrib/jaegertracer-configurator/src/java/org/apache/solr/jaeger/JaegerTracerConfigurator.java @@ -38,7 +38,7 @@ public Tracer getTracer() { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { Object host = args.get(AGENT_HOST); if (!(host instanceof String)) { throw new IllegalArgumentException("Expected a required string for param '" + AGENT_HOST + "'"); diff --git a/solr/contrib/jaegertracer-configurator/src/test/org/apache/solr/jaeger/TestJaegerConfigurator.java b/solr/contrib/jaegertracer-configurator/src/test/org/apache/solr/jaeger/TestJaegerConfigurator.java index 55018eb5ae81..a0867f78dd6e 100644 --- a/solr/contrib/jaegertracer-configurator/src/test/org/apache/solr/jaeger/TestJaegerConfigurator.java +++ b/solr/contrib/jaegertracer-configurator/src/test/org/apache/solr/jaeger/TestJaegerConfigurator.java @@ -69,8 +69,10 @@ public void testInjected() throws Exception{ } @Test + @SuppressWarnings({"unchecked"}) public void testRequiredParameters() throws IOException { JaegerTracerConfigurator configurator = new JaegerTracerConfigurator(); + @SuppressWarnings({"rawtypes"}) NamedList initArgs = new NamedList(); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> configurator.init(initArgs)); assertTrue(exc.getMessage().contains(AGENT_HOST) || exc.getMessage().contains(AGENT_PORT)); @@ -92,8 +94,10 @@ public void testRequiredParameters() throws IOException { } @Test + @SuppressWarnings({"unchecked"}) public void testIncorrectFormat() { JaegerTracerConfigurator configurator = new JaegerTracerConfigurator(); + @SuppressWarnings({"rawtypes"}) NamedList initArgs = new NamedList(); initArgs.add(AGENT_HOST, 100); initArgs.add(AGENT_PORT, 5775); diff --git a/solr/contrib/langid/build.gradle b/solr/contrib/langid/build.gradle index 8ca54bf187f1..f9cfdb73e3e9 100644 --- a/solr/contrib/langid/build.gradle +++ b/solr/contrib/langid/build.gradle @@ -18,6 +18,8 @@ apply plugin: 'java-library' +description = 'Language Identifier contrib for extracting language from a document being indexed' + dependencies { implementation project(':solr:core') diff --git a/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java b/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java index 487d37c948cb..d4fbe600b0ac 100644 --- a/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java +++ b/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java @@ -149,7 +149,7 @@ private void initParams(SolrParams params) { if(keyVal.length == 2) { lcMap.put(keyVal[0], keyVal[1]); } else { - log.error("Unsupported format for langid.lcmap: "+mapping+". Skipping this mapping."); + log.error("Unsupported format for langid.lcmap: {}. Skipping this mapping.", mapping); } } } @@ -162,7 +162,7 @@ private void initParams(SolrParams params) { if(keyVal.length == 2) { mapLcMap.put(keyVal[0], keyVal[1]); } else { - log.error("Unsupported format for langid.map.lcmap: "+mapping+". Skipping this mapping."); + log.error("Unsupported format for langid.map.lcmap: {}. Skipping this mapping.", mapping); } } } @@ -175,13 +175,15 @@ private void initParams(SolrParams params) { if (maxFieldValueChars > maxTotalChars) { if (maxTotalChars == MAX_TOTAL_CHARS_DEFAULT) { // If the user specified only maxFieldValueChars, make maxTotalChars the same as it - log.warn(MAX_FIELD_VALUE_CHARS + " (" + maxFieldValueChars + ") is less than " + MAX_TOTAL_CHARS + " (" - + maxTotalChars + "). Setting " + MAX_TOTAL_CHARS + " to " + maxFieldValueChars + "."); + log.warn("{} ({}) is less than {} ({}). Setting {} to {}." + , MAX_FIELD_VALUE_CHARS, maxFieldValueChars, MAX_TOTAL_CHARS + , maxTotalChars, MAX_TOTAL_CHARS, maxFieldValueChars); maxTotalChars = maxFieldValueChars; } else { // If the user specified maxTotalChars, make maxFieldValueChars the same as it - log.warn(MAX_FIELD_VALUE_CHARS + " (" + maxFieldValueChars + ") is less than " + MAX_TOTAL_CHARS + " (" - + maxTotalChars + "). Setting " + MAX_FIELD_VALUE_CHARS + " to " + maxTotalChars + "."); + log.warn("{} ({}) is less than {} ({}). Setting {} to {}." + , MAX_FIELD_VALUE_CHARS, maxFieldValueChars, MAX_TOTAL_CHARS + , maxTotalChars, MAX_FIELD_VALUE_CHARS, maxTotalChars ); maxFieldValueChars = maxTotalChars; } } @@ -219,10 +221,14 @@ protected void process(SolrInputDocument doc) { List languagelist = detectLanguage(doc); docLang = resolveLanguage(languagelist, fallbackLang); docLangs.add(docLang); - log.debug("Detected main document language from fields "+ Arrays.toString(inputFields) +": "+docLang); + if (log.isDebugEnabled()) { + log.debug("Detected main document language from fields {}: {}", Arrays.toString(inputFields), docLang); + } if(doc.containsKey(langField) && overwrite) { - log.debug("Overwritten old value "+doc.getFieldValue(langField)); + if (log.isDebugEnabled()) { + log.debug("Overwritten old value {}", doc.getFieldValue(langField)); + } } if(langField != null && langField.length() != 0) { doc.setField(langField, docLang); @@ -231,7 +237,7 @@ protected void process(SolrInputDocument doc) { // langField is set, we sanity check it against whitelist and fallback docLang = resolveLanguage(doc.getFieldValue(langField).toString(), fallbackLang); docLangs.add(docLang); - log.debug("Field "+langField+" already contained value "+docLang+", not overwriting."); + log.debug("Field {} already contained value {}, not overwriting.", langField, docLang); } if(enableMapping) { @@ -242,15 +248,17 @@ protected void process(SolrInputDocument doc) { List languagelist = detectLanguage(solrDocReader(doc, new String[]{fieldName})); fieldLang = resolveLanguage(languagelist, docLang); docLangs.add(fieldLang); - log.debug("Mapping field "+fieldName+" using individually detected language "+fieldLang); + log.debug("Mapping field {} using individually detected language {}", fieldName, fieldLang); } else { fieldLang = docLang; - log.debug("Mapping field "+fieldName+" using document global language "+fieldLang); + log.debug("Mapping field {} using document global language {}", fieldName, fieldLang); } String mappedOutputField = getMappedField(fieldName, fieldLang); if (mappedOutputField != null) { - log.debug("Mapping field {} to {}", doc.getFieldValue(docIdField), fieldLang); + if (log.isDebugEnabled()) { + log.debug("Mapping field {} to {}", doc.getFieldValue(docIdField), fieldLang); + } SolrInputField inField = doc.getField(fieldName); doc.setField(mappedOutputField, inField.getValue()); if(!mapKeepOrig) { @@ -282,12 +290,12 @@ private String getFallbackLang(SolrInputDocument doc, String[] fallbackFields, S for(String field : fallbackFields) { if(doc.containsKey(field)) { lang = (String) doc.getFieldValue(field); - log.debug("Language fallback to field "+field); + log.debug("Language fallback to field {}", field); break; } } if(lang == null) { - log.debug("Language fallback to value "+fallbackValue); + log.debug("Language fallback to value {}", fallbackValue); lang = fallbackValue; } return lang; @@ -337,7 +345,9 @@ protected String resolveLanguage(List languages, String fallba DetectedLanguage lang = languages.get(0); String normalizedLang = normalizeLangCode(lang.getLangCode()); if(langWhitelist.isEmpty() || langWhitelist.contains(normalizedLang)) { - log.debug("Language detected {} with certainty {}", normalizedLang, lang.getCertainty()); + if (log.isDebugEnabled()) { + log.debug("Language detected {} with certainty {}", normalizedLang, lang.getCertainty()); + } if(lang.getCertainty() >= threshold) { langStr = normalizedLang; } else { @@ -345,7 +355,9 @@ protected String resolveLanguage(List languages, String fallba langStr = fallbackLang; } } else { - log.debug("Detected a language not in whitelist ({}), using fallback {}", lang.getLangCode(), fallbackLang); + if (log.isDebugEnabled()) { + log.debug("Detected a language not in whitelist ({}), using fallback {}", lang.getLangCode(), fallbackLang); + } langStr = fallbackLang; } } @@ -366,7 +378,7 @@ protected String resolveLanguage(List languages, String fallba protected String normalizeLangCode(String langCode) { if (lcMap.containsKey(langCode)) { String lc = lcMap.get(langCode); - log.debug("Doing langcode normalization mapping from "+langCode+" to "+lc); + log.debug("Doing langcode normalization mapping from {} to {}", langCode, lc); return lc; } return langCode; @@ -389,7 +401,7 @@ protected String getMappedField(String currentField, String language) { log.warn("Unsuccessful field name mapping from {} to {}, field does not exist and enforceSchema=true; skipping mapping.", currentField, newFieldName); return null; } else { - log.debug("Doing mapping from "+currentField+" with language "+language+" to field "+newFieldName); + log.debug("Doing mapping from {} with language {} to field {}", currentField, language, newFieldName); } return newFieldName; } diff --git a/solr/contrib/langid/src/java/org/apache/solr/update/processor/OpenNLPLangDetectUpdateProcessorFactory.java b/solr/contrib/langid/src/java/org/apache/solr/update/processor/OpenNLPLangDetectUpdateProcessorFactory.java index b4e0253e235b..c5aa3a71ee16 100644 --- a/solr/contrib/langid/src/java/org/apache/solr/update/processor/OpenNLPLangDetectUpdateProcessorFactory.java +++ b/solr/contrib/langid/src/java/org/apache/solr/update/processor/OpenNLPLangDetectUpdateProcessorFactory.java @@ -61,7 +61,7 @@ public class OpenNLPLangDetectUpdateProcessorFactory extends UpdateRequestProces private SolrResourceLoader solrResourceLoader; @Override - public void init( NamedList args ) + public void init( @SuppressWarnings({"rawtypes"})NamedList args ) { if (args != null) { Object o; diff --git a/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java b/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java index 5a43bd52e796..ecce415421ab 100644 --- a/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java +++ b/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java @@ -58,7 +58,10 @@ protected List detectLanguage(Reader solrDocReader) { certainty = 0d; DetectedLanguage language = new DetectedLanguage(identifier.getLanguage(), certainty); languages.add(language); - log.debug("Language detected as "+language+" with a certainty of "+language.getCertainty()+" (Tika distance="+identifier.toString()+")"); + if (log.isDebugEnabled()) { + log.debug("Language detected as {} with a certainty of {} (Tika distance={})" + , language, language.getCertainty(), identifier); + } } else { log.debug("No input text to detect language from, returning empty list"); } diff --git a/solr/contrib/ltr/build.gradle b/solr/contrib/ltr/build.gradle index 98908fac0079..f3288d98665f 100644 --- a/solr/contrib/ltr/build.gradle +++ b/solr/contrib/ltr/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Learning to Rank Package' + dependencies { implementation project(':solr:core') implementation project(':lucene:analysis:common') diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRThreadModule.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRThreadModule.java index e1426106c9e5..856f4dfea7f3 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRThreadModule.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRThreadModule.java @@ -59,9 +59,10 @@ */ final public class LTRThreadModule extends CloseHook implements NamedListInitializedPlugin { - public static LTRThreadModule getInstance(NamedList args) { + public static LTRThreadModule getInstance(@SuppressWarnings({"rawtypes"})NamedList args) { final LTRThreadModule threadManager; + @SuppressWarnings({"rawtypes"}) final NamedList threadManagerArgs = extractThreadModuleParams(args); // if and only if there are thread module args then we want a thread module! if (threadManagerArgs.size() > 0) { @@ -77,6 +78,7 @@ public static LTRThreadModule getInstance(NamedList args) { private static String CONFIG_PREFIX = "threadModule."; + @SuppressWarnings({"unchecked", "rawtypes"}) private static NamedList extractThreadModuleParams(NamedList args) { // gather the thread module args from amongst the general args @@ -118,7 +120,8 @@ public LTRThreadModule() { } @Override - public void init(NamedList args) { + @SuppressWarnings({"unchecked"}) + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { if (args != null) { SolrPluginUtils.invokeSetters(this, args); } diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/Feature.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/Feature.java index 0230cfd280ce..bc7ff87db9b9 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/Feature.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/Feature.java @@ -69,6 +69,7 @@ public abstract class Feature extends Query implements Accountable { final private Map params; + @SuppressWarnings({"rawtypes"}) public static Feature getInstance(SolrResourceLoader solrResourceLoader, String className, String name, Map params) { final Feature f = solrResourceLoader.newInstance( diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LTRScoringModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LTRScoringModel.java index a378e24044f1..9052ba179916 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LTRScoringModel.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LTRScoringModel.java @@ -89,6 +89,7 @@ public abstract class LTRScoringModel implements Accountable { protected final List norms; private Integer hashCode; // cached since it shouldn't actually change after construction + @SuppressWarnings({"rawtypes"}) public static LTRScoringModel getInstance(SolrResourceLoader solrResourceLoader, String className, String name, List features, List norms, diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LinearModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LinearModel.java index 908b0735cc7f..e5b506481e35 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LinearModel.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LinearModel.java @@ -79,6 +79,7 @@ public class LinearModel extends LTRScoringModel { protected Float[] featureToWeight; public void setWeights(Object weights) { + @SuppressWarnings({"unchecked"}) final Map modelWeights = (Map) weights; for (int ii = 0; ii < features.size(); ++ii) { final String key = features.get(ii).getName(); diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/MultipleAdditiveTreesModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/MultipleAdditiveTreesModel.java index c3e434ba0136..e06303a3cc00 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/MultipleAdditiveTreesModel.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/MultipleAdditiveTreesModel.java @@ -155,10 +155,12 @@ public void setThreshold(String threshold) { this.threshold = Float.parseFloat(threshold) + NODE_SPLIT_SLACK; } + @SuppressWarnings({"unchecked"}) public void setLeft(Object left) { this.left = createRegressionTreeNode((Map) left); } + @SuppressWarnings({"unchecked"}) public void setRight(Object right) { this.right = createRegressionTreeNode((Map) right); } @@ -264,6 +266,7 @@ public void setWeight(String weight) { this.weight = Float.valueOf(weight); } + @SuppressWarnings({"unchecked"}) public void setRoot(Object root) { this.root = createRegressionTreeNode((Map)root); } @@ -300,6 +303,7 @@ public void validate() throws ModelException { } } + @SuppressWarnings({"unchecked"}) public void setTrees(Object trees) { this.trees = new ArrayList(); for (final Object o : (List) trees) { diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java index d72b29b8120b..d136eead4c6e 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java @@ -127,6 +127,7 @@ public DefaultLayer() { } public void setMatrix(Object matrixObj) { + @SuppressWarnings({"unchecked"}) final List> matrix = (List>) matrixObj; this.matrixRows = matrix.size(); this.matrixCols = matrix.get(0).size(); @@ -140,6 +141,7 @@ public void setMatrix(Object matrixObj) { } public void setBias(Object biasObj) { + @SuppressWarnings({"unchecked"}) final List vector = (List) biasObj; this.numUnits = vector.size(); this.biasVector = new float[numUnits]; @@ -246,6 +248,7 @@ public String describe() { } } + @SuppressWarnings({"unchecked"}) protected Layer createLayer(Object o) { final DefaultLayer layer = new DefaultLayer(); if (o != null) { @@ -254,6 +257,7 @@ protected Layer createLayer(Object o) { return layer; } + @SuppressWarnings({"unchecked"}) public void setLayers(Object layers) { this.layers = new ArrayList(); for (final Object o : (List) layers) { diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java index 03874b69cd66..3e76b4e9797f 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java @@ -111,6 +111,7 @@ public void setCsvFeatureSeparator(String csvFeatureSeparator) { } @Override + @SuppressWarnings({"unchecked"}) public void init(@SuppressWarnings("rawtypes") NamedList args) { super.init(args); threadManager = LTRThreadModule.getInstance(args); diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java index af9977538ca3..65bbbdd7d92f 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java @@ -79,6 +79,7 @@ public class LTRQParserPlugin extends QParserPlugin implements ResourceLoaderAwa public static final String RERANK_DOCS = "reRankDocs"; @Override + @SuppressWarnings({"unchecked"}) public void init(@SuppressWarnings("rawtypes") NamedList args) { super.init(args); threadManager = LTRThreadModule.getInstance(args); diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java index 60cabccab552..add702157ca5 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java @@ -123,6 +123,7 @@ public void loadStoredModels() { log.info("------ managed models ~ loading ------"); if ((managedData != null) && (managedData instanceof List)) { + @SuppressWarnings({"unchecked"}) final List> up = (List>) managedData; for (final Map u : up) { addModelFromMap(u); @@ -141,7 +142,9 @@ private void addModelFromMap(Map modelMap) { public synchronized void addModel(LTRScoringModel ltrScoringModel) throws ModelException { try { - log.info("adding model {}", ltrScoringModel.getName()); + if (log.isInfoEnabled()) { + log.info("adding model {}", ltrScoringModel.getName()); + } store.addModel(ltrScoringModel); } catch (final ModelException e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java index 24c77cafc77b..e921bcbb17d4 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java @@ -248,8 +248,10 @@ public void testDifferentTopN() throws IOException { hits = new TopDocs(hits.totalHits, slice); hits = rescorer.rescore(searcher, hits, topN); for (int i = topN - 1, j = 0; i >= 0; i--, j++) { - log.info("doc {} in pos {}", searcher.doc(hits.scoreDocs[j].doc) - .get("id"), j); + if (log.isInfoEnabled()) { + log.info("doc {} in pos {}", searcher.doc(hits.scoreDocs[j].doc) + .get("id"), j); + } assertEquals(i, Integer.parseInt(searcher.doc(hits.scoreDocs[j].doc).get("id"))); diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java index 9d22cf4e9f89..8b26bcee145e 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java @@ -159,13 +159,15 @@ protected static SortedMap setupTestInit( } if (fstore.exists()) { - log.info("remove feature store config file in {}", - fstore.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("remove feature store config file in {}", fstore.getAbsolutePath()); + } Files.delete(fstore.toPath()); } if (mstore.exists()) { - log.info("remove model store config file in {}", - mstore.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("remove model store config file in {}", mstore.getAbsolutePath()); + } Files.delete(mstore.toPath()); } if (!solrconfig.equals("solrconfig.xml")) { diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureExtractionFromMultipleSegments.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureExtractionFromMultipleSegments.java index a15178bd0076..8b75dd628827 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureExtractionFromMultipleSegments.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureExtractionFromMultipleSegments.java @@ -91,8 +91,10 @@ public void testFeatureExtractionFromMultipleSegments() throws Exception { query.add("fl", "*, score,id,normHits,description,fv:[features store='feature-store-6' format='dense' efi.user_text='apple']"); String res = restTestHarness.query("/query" + query.toQueryString()); + @SuppressWarnings({"unchecked"}) Map resultJson = (Map) Utils.fromJSONString(res); + @SuppressWarnings({"unchecked"}) List> docs = (List>)((Map)resultJson.get("response")).get("docs"); int passCount = 0; for (final Map doc : docs) { diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java index 48c1262c547d..052733e74ca2 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java @@ -100,8 +100,10 @@ public void test2NoMatch1YesMatchFeatureReturnsFvWith1FeatureAndDocScoreScaledBy String res = restTestHarness.query("/query" + yesMatchFeatureQuery.toQueryString()); + @SuppressWarnings({"unchecked"}) final Map jsonParse = (Map) Utils .fromJSONString(res); + @SuppressWarnings({"unchecked"}) final Double doc0Score = (Double) ((Map) ((ArrayList) ((Map) jsonParse .get("response")).get("docs")).get(0)).get("score"); @@ -165,8 +167,10 @@ public void test1NoMatchFeatureReturnsFvWith1MatchingFeatureFromStoreAndDocWith0 String res = restTestHarness.query("/query" + yesMatchFeatureQuery.toQueryString()); + @SuppressWarnings({"unchecked"}) final Map jsonParse = (Map) Utils .fromJSONString(res); + @SuppressWarnings({"unchecked"}) final Double doc0Score = (Double) ((Map) ((ArrayList) ((Map) jsonParse .get("response")).get("docs")).get(0)).get("score"); diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java index 8ff568426058..0a482ff295d0 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java @@ -97,14 +97,20 @@ public static void implTestOriginalScoreResponseDocsCheck(String modelName, assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='"+doc3Id+"'"); final String res = restTestHarness.query("/query" + query.toQueryString()); + @SuppressWarnings({"unchecked"}) final Map jsonParse = (Map) Utils .fromJSONString (res); + @SuppressWarnings({"unchecked"}) final String doc0Score = ((Double) ((Map) ((ArrayList) ((Map) jsonParse .get("response")).get("docs")).get(0)).get("score")).toString(); + + @SuppressWarnings({"unchecked"}) final String doc1Score = ((Double) ((Map) ((ArrayList) ((Map) jsonParse .get("response")).get("docs")).get(1)).get("score")).toString(); + @SuppressWarnings({"unchecked"}) final String doc2Score = ((Double) ((Map) ((ArrayList) ((Map) jsonParse .get("response")).get("docs")).get(2)).get("score")).toString(); + @SuppressWarnings({"unchecked"}) final String doc3Score = ((Double) ((Map) ((ArrayList) ((Map) jsonParse .get("response")).get("docs")).get(3)).get("score")).toString(); diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java index 045c625a218d..6eea812325cd 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java @@ -315,6 +315,7 @@ public float apply(float in) { } @Override + @SuppressWarnings({"unchecked"}) protected Layer createLayer(Object o) { final DefaultLayer layer = new DefaultLayer(); if (o != null) { diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java index 0e829e68d95f..f52acdcc045a 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java @@ -94,16 +94,20 @@ public void testFeaturePersistence() throws Exception { .readFileToString(mstorefile, "UTF-8"); //check feature/model stores on deletion + @SuppressWarnings({"unchecked"}) final ArrayList fStore = (ArrayList) ((Map) Utils.fromJSONString(fstorecontent)).get("managedList"); for (int idx = 0;idx < fStore.size(); ++ idx) { + @SuppressWarnings({"unchecked"}) String store = (String) ((Map)fStore.get(idx)).get("store"); assertTrue(store.equals("test") || store.equals("test2") || store.equals("test1")); } + @SuppressWarnings({"unchecked"}) final ArrayList mStore = (ArrayList) ((Map) Utils.fromJSONString(mstorecontent)).get("managedList"); for (int idx = 0;idx < mStore.size(); ++ idx) { + @SuppressWarnings({"unchecked"}) String store = (String) ((Map)mStore.get(idx)).get("store"); assertTrue(store.equals("test") || store.equals("test1")); } diff --git a/solr/contrib/prometheus-exporter/build.gradle b/solr/contrib/prometheus-exporter/build.gradle index c2fa1bdd9032..13a9748bc44c 100644 --- a/solr/contrib/prometheus-exporter/build.gradle +++ b/solr/contrib/prometheus-exporter/build.gradle @@ -18,6 +18,8 @@ apply plugin: 'java-library' +description = 'Prometheus exporter for exposing metrics from Solr using Metrics API and Search API' + dependencies { implementation project(':solr:core') implementation project(':lucene:analysis:common') diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SchedulerMetricsCollector.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SchedulerMetricsCollector.java index e1bdc607d70b..53b0aa1c6de8 100644 --- a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SchedulerMetricsCollector.java +++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SchedulerMetricsCollector.java @@ -77,7 +77,8 @@ public void start() { scheduler.scheduleWithFixedDelay(this::collectMetrics, 0, duration, timeUnit); } - private void collectMetrics() { + private@SuppressWarnings({"try"}) + void collectMetrics() { try (Histogram.Timer timer = metricsCollectionTime.startTimer()) { log.info("Beginning metrics collection"); diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/MetricsQuery.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/MetricsQuery.java index 9e79f7db1e43..c28801f4c2a5 100644 --- a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/MetricsQuery.java +++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/MetricsQuery.java @@ -88,6 +88,7 @@ public List getJsonQueries() { return jsonQueries; } + @SuppressWarnings({"unchecked", "rawtypes"}) public static List from(Node node) throws JsonQueryException { List metricsQueries = new ArrayList<>(); diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/PrometheusExporterSettings.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/PrometheusExporterSettings.java index 85f0ec8362f7..6c03af4e3711 100644 --- a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/PrometheusExporterSettings.java +++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/PrometheusExporterSettings.java @@ -57,13 +57,15 @@ public PrometheusExporterSettings build() { } public static PrometheusExporterSettings from(Node settings) { + @SuppressWarnings({"rawtypes"}) NamedList config = DOMUtil.childNodesToNamedList(settings); Builder builder = builder(); + @SuppressWarnings({"unchecked", "rawtypes"}) List httpClientSettings = config.getAll("httpClients"); - for (NamedList entry : httpClientSettings) { + for (@SuppressWarnings({"rawtypes"})NamedList entry : httpClientSettings) { Integer connectionTimeout = (Integer) entry.get("connectionTimeout"); if (connectionTimeout != null) { builder.withConnectionHttpTimeout(connectionTimeout); diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java index cf282156d0f1..04b0b75278ff 100644 --- a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java +++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java @@ -208,7 +208,7 @@ public static void main(String[] args) { solrExporter.start(); log.info("Solr Prometheus Exporter is running"); } catch (IOException e) { - log.error("Failed to start Solr Prometheus Exporter: " + e.toString()); + log.error("Failed to start Solr Prometheus Exporter: ", e); } catch (ArgumentParserException e) { parser.handleError(e); } diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/Async.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/Async.java index 53d7f0879d90..2b8c763e3feb 100644 --- a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/Async.java +++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/Async.java @@ -29,6 +29,7 @@ public class Async { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + @SuppressWarnings({"rawtypes"}) public static CompletableFuture> waitForAllSuccessfulResponses(List> futures) { CompletableFuture completed = CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])); diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java index 74f2772a6115..096c24848754 100644 --- a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java +++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java @@ -106,7 +106,7 @@ protected MetricSamples request(SolrClient client, MetricsQuery query) throws IO queryResponse = client.request(queryRequest, query.getCollection().get()); } } catch (SolrServerException | IOException e) { - log.error("failed to request: " + queryRequest.getPath() + " " + e.getMessage()); + log.error("failed to request: {} {}", queryRequest.getPath(), e.getMessage()); } JsonNode jsonNode = OBJECT_MAPPER.readTree((String) queryResponse.get("response")); @@ -164,7 +164,7 @@ protected MetricSamples request(SolrClient client, MetricsQuery query) throws IO name, labelNames, labelValues, value)); } } catch (JsonQueryException e) { - log.error("Error apply JSON query={} to result", jsonQuery.toString(), e); + log.error("Error apply JSON query={} to result", jsonQuery, e); scrapeErrorTotal.inc(); } } diff --git a/solr/contrib/velocity/build.gradle b/solr/contrib/velocity/build.gradle index b824433beed0..b17b38be9f64 100644 --- a/solr/contrib/velocity/build.gradle +++ b/solr/contrib/velocity/build.gradle @@ -18,6 +18,8 @@ apply plugin: 'java-library' +description = 'Solr Velocity Response Writer' + dependencies { implementation project(':solr:core') diff --git a/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java b/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java index a598ba291e8d..a4ddf556380f 100644 --- a/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java +++ b/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java @@ -95,7 +95,7 @@ public class VelocityResponseWriter implements QueryResponseWriter, SolrCoreAwar private Map customTools = new HashMap(); @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { log.warn("VelocityResponseWriter is deprecated. This may be removed in future Solr releases. Please SOLR-14065."); fileResourceLoaderBaseDir = null; String templateBaseDir = (String) args.get(TEMPLATE_BASE_DIR); @@ -103,11 +103,11 @@ public void init(NamedList args) { if (templateBaseDir != null && !templateBaseDir.isEmpty()) { fileResourceLoaderBaseDir = new File(templateBaseDir).getAbsoluteFile(); if (!fileResourceLoaderBaseDir.exists()) { // "*not* exists" condition! - log.warn(TEMPLATE_BASE_DIR + " specified does not exist: " + fileResourceLoaderBaseDir); + log.warn("{} specified does not exist: {}", TEMPLATE_BASE_DIR, fileResourceLoaderBaseDir); fileResourceLoaderBaseDir = null; } else { if (!fileResourceLoaderBaseDir.isDirectory()) { // "*not* a directory" condition - log.warn(TEMPLATE_BASE_DIR + " specified is not a directory: " + fileResourceLoaderBaseDir); + log.warn("{} specified is not a directory: {}", TEMPLATE_BASE_DIR, fileResourceLoaderBaseDir); fileResourceLoaderBaseDir = null; } } @@ -115,9 +115,11 @@ public void init(NamedList args) { initPropertiesFileName = (String) args.get(PROPERTIES_FILE); + @SuppressWarnings({"rawtypes"}) NamedList tools = (NamedList)args.get("tools"); if (tools != null) { for(Object t : tools) { + @SuppressWarnings({"rawtypes"}) Map.Entry tool = (Map.Entry)t; customTools.put(tool.getKey().toString(), tool.getValue().toString()); } @@ -132,7 +134,7 @@ public void inform(SolrCore core) { try { velocityInitProps.load(new InputStreamReader(core.getResourceLoader().openResource(initPropertiesFileName), StandardCharsets.UTF_8)); } catch (IOException e) { - log.warn("Error loading " + PROPERTIES_FILE + " specified property file: " + initPropertiesFileName, e); + log.warn("Error loading {} specified property file: {}", PROPERTIES_FILE, initPropertiesFileName, e); } } } @@ -228,11 +230,13 @@ private void doWrite(Writer writer, SolrQueryRequest request, SolrQueryResponse } } + @SuppressWarnings({"unchecked"}) private VelocityContext createContext(SolrQueryRequest request, SolrQueryResponse response) { VelocityContext context = new VelocityContext(); // Register useful Velocity "tools" String locale = request.getParams().get(LOCALE); + @SuppressWarnings({"rawtypes"}) Map toolConfig = new HashMap(); toolConfig.put("locale", locale); diff --git a/solr/core/build.gradle b/solr/core/build.gradle index 8c7a414f0c0a..71002c2c06b9 100644 --- a/solr/core/build.gradle +++ b/solr/core/build.gradle @@ -18,6 +18,8 @@ apply plugin: 'java-library' +description = 'Apache Solr Core' + dependencies { api project(':lucene:core') api project(':lucene:analysis:common') @@ -73,7 +75,6 @@ dependencies { }) implementation 'com.tdunning:t-digest' - implementation 'commons-fileupload:commons-fileupload' implementation 'io.opentracing:opentracing-api' implementation 'io.opentracing:opentracing-noop' diff --git a/solr/core/ivy.xml b/solr/core/ivy.xml index 19d74f310e15..c632c47b887b 100644 --- a/solr/core/ivy.xml +++ b/solr/core/ivy.xml @@ -38,7 +38,6 @@ - diff --git a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java index a37240ad48ea..f563472ac4c3 100644 --- a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java +++ b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java @@ -124,6 +124,7 @@ public Name getPermissionName(AuthorizationContext request) { return endPoint.permission(); } + @SuppressWarnings({"unchecked", "rawtypes"}) private static SpecProvider readSpec(EndPoint endPoint, List m) { return () -> { Map map = new LinkedHashMap(); @@ -179,6 +180,7 @@ public void call(SolrQueryRequest req, SolrQueryResponse rsp) { commands.get(cmd.name).invoke(req, rsp, cmd); } + @SuppressWarnings({"rawtypes"}) List errs = CommandOperation.captureErrors(cmds); if (!errs.isEmpty()) { log.error("{}{}", ERR, Utils.toJSONString(errs)); @@ -193,6 +195,7 @@ static class Cmd { final Object obj; ObjectMapper mapper = SolrJacksonAnnotationInspector.createObjectMapper(); int paramsCount; + @SuppressWarnings({"rawtypes"}) Class c; boolean isWrappedInPayloadObj = false; @@ -235,6 +238,7 @@ static class Cmd { } + @SuppressWarnings({"unchecked"}) void invoke(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation cmd) { try { if (paramsCount == 2) { diff --git a/solr/core/src/java/org/apache/solr/api/ApiBag.java b/solr/core/src/java/org/apache/solr/api/ApiBag.java index 84facfa631dd..57407550961c 100644 --- a/solr/core/src/java/org/apache/solr/api/ApiBag.java +++ b/solr/core/src/java/org/apache/solr/api/ApiBag.java @@ -69,6 +69,7 @@ public ApiBag(boolean isCoreSpecific) { /**Register a POJO annotated with {@link EndPoint} * @param o the instance to be used for invocations */ + @SuppressWarnings({"unchecked"}) public synchronized List registerObject(Object o) { List l = AnnotatedApi.getApis(o); for (Api api : l) { @@ -76,6 +77,7 @@ public synchronized List registerObject(Object o) { } return l; } + @SuppressWarnings({"unchecked"}) public synchronized void register(Api api) { register(api, Collections.EMPTY_MAP); } @@ -93,6 +95,7 @@ public synchronized void register(Api api, Map nameSubstitutes) } } + @SuppressWarnings({"unchecked"}) private void validateAndRegister(Api api, Map nameSubstitutes) { ValidatingJsonMap spec = api.getSpec(); Api introspect = new IntrospectApi(api, isCoreSpecific); @@ -157,6 +160,7 @@ public IntrospectApi(Api base, boolean isCoreSpecific) { this.isCoreSpecific = isCoreSpecific; } + @SuppressWarnings({"unchecked", "rawtypes"}) public void call(SolrQueryRequest req, SolrQueryResponse rsp) { String cmd = req.getParams().get("command"); @@ -197,6 +201,7 @@ public void call(SolrQueryRequest req, SolrQueryResponse rsp) { public static Map getParsedSchema(ValidatingJsonMap commands) { Map validators = new HashMap<>(); for (Object o : commands.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry cmd = (Map.Entry) o; try { validators.put((String) cmd.getKey(), new JsonSchemaValidator((Map) cmd.getValue())); @@ -289,6 +294,7 @@ public static SpecProvider constructSpec(PluginInfo info) { Object specObj = info == null ? null : info.attributes.get("spec"); if (specObj == null) specObj = "emptySpec"; if (specObj instanceof Map) { + @SuppressWarnings({"rawtypes"}) Map map = (Map) specObj; return () -> ValidatingJsonMap.getDeepCopy(map, 4, false); } else { @@ -296,6 +302,7 @@ public static SpecProvider constructSpec(PluginInfo info) { } } + @SuppressWarnings({"rawtypes"}) public static List getCommandOperations(ContentStream stream, Map validators, boolean validate) { List parsedCommands = null; try { @@ -327,6 +334,7 @@ public static List getCommandOperations(ContentStream stream, } } + @SuppressWarnings({"rawtypes"}) List errs = CommandOperation.captureErrors(commandsCopy); if (!errs.isEmpty()) { throw new ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "Error in command payload", errs); @@ -335,13 +343,15 @@ public static List getCommandOperations(ContentStream stream, } public static class ExceptionWithErrObject extends SolrException { + @SuppressWarnings({"rawtypes"}) private List errs; - public ExceptionWithErrObject(ErrorCode code, String msg, List errs) { + public ExceptionWithErrObject(ErrorCode code, String msg, @SuppressWarnings({"rawtypes"})List errs) { super(code, msg); this.errs = errs; } + @SuppressWarnings({"rawtypes"}) public List getErrs() { return errs; } diff --git a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java index d9dca564bc4b..c55a08b1f51e 100644 --- a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java +++ b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java @@ -265,6 +265,7 @@ public void call(SolrQueryRequest req, SolrQueryResponse rsp) { return api; } + @SuppressWarnings({"unchecked"}) private static CompositeApi getSubPathApi(PluginBag requestHandlers, String path, String fullPath, CompositeApi compositeApi) { String newPath = path.endsWith(CommonParams.INTROSPECT) ? path.substring(0, path.length() - CommonParams.INTROSPECT.length()) : path; @@ -286,6 +287,7 @@ public void call(SolrQueryRequest req1, SolrQueryResponse rsp) { result.put(prefix + e.getKey(), e.getValue()); } + @SuppressWarnings({"rawtypes"}) Map m = (Map) rsp.getValues().get("availableSubPaths"); if(m != null){ m.putAll(result); diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java index 1b4d9629368a..3c2b490bb294 100644 --- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java +++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java @@ -30,6 +30,7 @@ import java.util.function.Supplier; import org.apache.commons.io.output.ByteArrayOutputStream; +import org.apache.lucene.search.TotalHits.Relation; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; @@ -157,7 +158,8 @@ public EmbeddedSolrServer(CoreContainer coreContainer, String coreName, // It *should* be able to convert the response directly into a named list. @Override - public NamedList request(SolrRequest request, String coreName) throws SolrServerException, IOException { + @SuppressWarnings({"unchecked"}) + public NamedList request(@SuppressWarnings({"rawtypes"})SolrRequest request, String coreName) throws SolrServerException, IOException { String path = request.getPath(); if (path == null || !path.startsWith("/")) { @@ -238,6 +240,7 @@ public void writeResults(ResultContext ctx, JavaBinCodec codec) throws IOExcepti // write an empty list... SolrDocumentList docs = new SolrDocumentList(); docs.setNumFound(ctx.getDocList().matches()); + docs.setNumFoundExact(ctx.getDocList().hitCountRelation() == Relation.EQUAL_TO); docs.setStart(ctx.getDocList().offset()); docs.setMaxScore(ctx.getDocList().maxScore()); codec.writeSolrDocumentList(docs); @@ -268,12 +271,14 @@ public void writeResults(ResultContext ctx, JavaBinCodec codec) throws IOExcepti } catch (Exception ex) { throw new SolrServerException(ex); } finally { - if (req != null) req.close(); - SolrRequestInfo.clearRequestInfo(); + if (req != null) { + req.close(); + SolrRequestInfo.clearRequestInfo(); + } } } - private Set getContentStreams(SolrRequest request) throws IOException { + private Set getContentStreams(@SuppressWarnings({"rawtypes"})SolrRequest request) throws IOException { if (request.getMethod() == SolrRequest.METHOD.GET) return null; if (request instanceof ContentStreamUpdateRequest) { final ContentStreamUpdateRequest csur = (ContentStreamUpdateRequest) request; diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java index 5a17f4cf7913..5a7dcd5c817a 100644 --- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java +++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java @@ -172,20 +172,20 @@ public void destroy() { } private void executeDelay() { int delayMs = 0; for (Delay delay: delays) { - this.log.info("Delaying {}, for reason: {}", delay.delayValue, delay.reason); + log.info("Delaying {}, for reason: {}", delay.delayValue, delay.reason); if (delay.counter.decrementAndGet() == 0) { delayMs += delay.delayValue; } } if (delayMs > 0) { - this.log.info("Pausing this socket connection for {}ms...", delayMs); + log.info("Pausing this socket connection for {}ms...", delayMs); try { Thread.sleep(delayMs); } catch (InterruptedException e) { throw new RuntimeException(e); } - this.log.info("Waking up after the delay of {}ms...", delayMs); + log.info("Waking up after the delay of {}ms...", delayMs); } } diff --git a/solr/core/src/java/org/apache/solr/cloud/CloudConfigSetService.java b/solr/core/src/java/org/apache/solr/cloud/CloudConfigSetService.java index e98c33ccfb02..644b49ee47f4 100644 --- a/solr/core/src/java/org/apache/solr/cloud/CloudConfigSetService.java +++ b/solr/core/src/java/org/apache/solr/cloud/CloudConfigSetService.java @@ -78,6 +78,7 @@ public SolrResourceLoader createCoreResourceLoader(CoreDescriptor cd) { } @Override + @SuppressWarnings({"rawtypes"}) protected NamedList loadConfigSetFlags(CoreDescriptor cd, SolrResourceLoader loader) { try { return ConfigSetProperties.readFromResourceLoader(loader, "."); diff --git a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java index 478b2d5b60b9..ecc653b52f08 100644 --- a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java +++ b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java @@ -131,6 +131,7 @@ public static String unifiedResourcePath(SolrResourceLoader loader) { /**Read the list of public keys from ZK */ + @SuppressWarnings({"unchecked"}) public static Map getTrustedKeys(SolrZkClient zk, String dir) { Map result = new HashMap<>(); try { diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java index 9ba490045393..13985709a7bf 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java +++ b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java @@ -19,50 +19,13 @@ import java.io.Closeable; import java.io.IOException; import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.EnumSet; -import java.util.List; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; - -import org.apache.hadoop.fs.Path; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.solr.cloud.overseer.OverseerAction; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrException.ErrorCode; -import org.apache.solr.common.cloud.ClusterState; -import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.ZkCmdExecutor; -import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkNodeProps; -import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.util.RetryUtil; -import org.apache.solr.common.util.Utils; -import org.apache.solr.core.CoreContainer; -import org.apache.solr.core.SolrCore; -import org.apache.solr.logging.MDCLoggingContext; -import org.apache.solr.search.SolrIndexSearcher; -import org.apache.solr.update.PeerSync; -import org.apache.solr.update.UpdateLog; -import org.apache.solr.util.RefCounted; -import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.NoNodeException; -import org.apache.zookeeper.KeeperException.NodeExistsException; -import org.apache.zookeeper.KeeperException.SessionExpiredException; -import org.apache.zookeeper.Op; -import org.apache.zookeeper.OpResult; -import org.apache.zookeeper.OpResult.SetDataResult; -import org.apache.zookeeper.ZooDefs; -import org.apache.zookeeper.data.Stat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.params.CommonParams.ID; - public abstract class ElectionContext implements Closeable { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); final String electionPath; @@ -111,676 +74,4 @@ public ElectionContext copy(){ } } -class ShardLeaderElectionContextBase extends ElectionContext { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - protected final SolrZkClient zkClient; - protected String shardId; - protected String collection; - protected LeaderElector leaderElector; - protected ZkStateReader zkStateReader; - protected ZkController zkController; - private Integer leaderZkNodeParentVersion; - - // Prevents a race between cancelling and becoming leader. - private final Object lock = new Object(); - - public ShardLeaderElectionContextBase(LeaderElector leaderElector, - final String shardId, final String collection, final String coreNodeName, - ZkNodeProps props, ZkController zkController) { - super(coreNodeName, ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection - + "/leader_elect/" + shardId, ZkStateReader.getShardLeadersPath( - collection, shardId), props, zkController.getZkClient()); - this.leaderElector = leaderElector; - this.zkStateReader = zkController.getZkStateReader(); - this.zkClient = zkStateReader.getZkClient(); - this.zkController = zkController; - this.shardId = shardId; - this.collection = collection; - - String parent = new Path(leaderPath).getParent().toString(); - ZkCmdExecutor zcmd = new ZkCmdExecutor(30000); - // only if /collections/{collection} exists already do we succeed in creating this path - log.info("make sure parent is created {}", parent); - try { - zcmd.ensureExists(parent, (byte[])null, CreateMode.PERSISTENT, zkClient, 2); - } catch (KeeperException e) { - throw new RuntimeException(e); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new RuntimeException(e); - } - } - - @Override - public void cancelElection() throws InterruptedException, KeeperException { - super.cancelElection(); - synchronized (lock) { - if (leaderZkNodeParentVersion != null) { - try { - // We need to be careful and make sure we *only* delete our own leader registration node. - // We do this by using a multi and ensuring the parent znode of the leader registration node - // matches the version we expect - there is a setData call that increments the parent's znode - // version whenever a leader registers. - log.debug("Removing leader registration node on cancel: {} {}", leaderPath, leaderZkNodeParentVersion); - List ops = new ArrayList<>(2); - ops.add(Op.check(new Path(leaderPath).getParent().toString(), leaderZkNodeParentVersion)); - ops.add(Op.delete(leaderPath, -1)); - zkClient.multi(ops, true); - } catch (KeeperException.NoNodeException nne) { - // no problem - log.debug("No leader registration node found to remove: {}", leaderPath); - } catch (KeeperException.BadVersionException bve) { - log.info("Cannot remove leader registration node because the current registered node is not ours: {}", leaderPath); - // no problem - } catch (InterruptedException e) { - throw e; - } catch (Exception e) { - SolrException.log(log, e); - } - leaderZkNodeParentVersion = null; - } else { - log.info("No version found for ephemeral leader parent node, won't remove previous leader registration."); - } - } - } - - @Override - void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs) - throws KeeperException, InterruptedException, IOException { - // register as leader - if an ephemeral is already there, wait to see if it goes away - - String parent = new Path(leaderPath).getParent().toString(); - try { - RetryUtil.retryOnThrowable(NodeExistsException.class, 60000, 5000, () -> { - synchronized (lock) { - log.info("Creating leader registration node {} after winning as {}", leaderPath, leaderSeqPath); - List ops = new ArrayList<>(2); - - // We use a multi operation to get the parent nodes version, which will - // be used to make sure we only remove our own leader registration node. - // The setData call used to get the parent version is also the trigger to - // increment the version. We also do a sanity check that our leaderSeqPath exists. - - ops.add(Op.check(leaderSeqPath, -1)); - ops.add(Op.create(leaderPath, Utils.toJSON(leaderProps), zkClient.getZkACLProvider().getACLsToAdd(leaderPath), CreateMode.EPHEMERAL)); - ops.add(Op.setData(parent, null, -1)); - List results; - - results = zkClient.multi(ops, true); - for (OpResult result : results) { - if (result.getType() == ZooDefs.OpCode.setData) { - SetDataResult dresult = (SetDataResult) result; - Stat stat = dresult.getStat(); - leaderZkNodeParentVersion = stat.getVersion(); - return; - } - } - assert leaderZkNodeParentVersion != null; - } - }); - } catch (NoNodeException e) { - log.info("Will not register as leader because it seems the election is no longer taking place."); - return; - } catch (Throwable t) { - if (t instanceof OutOfMemoryError) { - throw (OutOfMemoryError) t; - } - throw new SolrException(ErrorCode.SERVER_ERROR, "Could not register as the leader because creating the ephemeral registration node in ZooKeeper failed", t); - } - - assert shardId != null; - boolean isAlreadyLeader = false; - if (zkStateReader.getClusterState() != null && - zkStateReader.getClusterState().getCollection(collection).getSlice(shardId).getReplicas().size() < 2) { - Replica leader = zkStateReader.getLeader(collection, shardId); - if (leader != null - && leader.getBaseUrl().equals(leaderProps.get(ZkStateReader.BASE_URL_PROP)) - && leader.getCoreName().equals(leaderProps.get(ZkStateReader.CORE_NAME_PROP))) { - isAlreadyLeader = true; - } - } - if (!isAlreadyLeader) { - ZkNodeProps m = ZkNodeProps.fromKeyVals(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(), - ZkStateReader.SHARD_ID_PROP, shardId, - ZkStateReader.COLLECTION_PROP, collection, - ZkStateReader.BASE_URL_PROP, leaderProps.get(ZkStateReader.BASE_URL_PROP), - ZkStateReader.CORE_NAME_PROP, leaderProps.get(ZkStateReader.CORE_NAME_PROP), - ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString()); - assert zkController != null; - assert zkController.getOverseer() != null; - zkController.getOverseer().offerStateUpdate(Utils.toJSON(m)); - } - } - - public LeaderElector getLeaderElector() { - return leaderElector; - } - - Integer getLeaderZkNodeParentVersion() { - synchronized (lock) { - return leaderZkNodeParentVersion; - } - } -} - -// add core container and stop passing core around... -final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private final CoreContainer cc; - private final SyncStrategy syncStrategy; - - private volatile boolean isClosed = false; - - public ShardLeaderElectionContext(LeaderElector leaderElector, - final String shardId, final String collection, - final String coreNodeName, ZkNodeProps props, ZkController zkController, CoreContainer cc) { - super(leaderElector, shardId, collection, coreNodeName, props, - zkController); - this.cc = cc; - syncStrategy = new SyncStrategy(cc); - } - - @Override - public void close() { - super.close(); - this.isClosed = true; - syncStrategy.close(); - } - - @Override - public void cancelElection() throws InterruptedException, KeeperException { - String coreName = leaderProps.getStr(ZkStateReader.CORE_NAME_PROP); - try (SolrCore core = cc.getCore(coreName)) { - if (core != null) { - core.getCoreDescriptor().getCloudDescriptor().setLeader(false); - } - } - - super.cancelElection(); - } - - @Override - public ElectionContext copy() { - return new ShardLeaderElectionContext(leaderElector, shardId, collection, id, leaderProps, zkController, cc); - } - - /* - * weAreReplacement: has someone else been the leader already? - */ - @Override - void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStart) throws KeeperException, - InterruptedException, IOException { - String coreName = leaderProps.getStr(ZkStateReader.CORE_NAME_PROP); - ActionThrottle lt; - try (SolrCore core = cc.getCore(coreName)) { - if (core == null ) { - // shutdown or removed - return; - } - MDCLoggingContext.setCore(core); - lt = core.getUpdateHandler().getSolrCoreState().getLeaderThrottle(); - } - - try { - lt.minimumWaitBetweenActions(); - lt.markAttemptingAction(); - - - int leaderVoteWait = cc.getZkController().getLeaderVoteWait(); - - log.debug("Running the leader process for shard={} and weAreReplacement={} and leaderVoteWait={}", shardId, weAreReplacement, leaderVoteWait); - if (zkController.getClusterState().getCollection(collection).getSlice(shardId).getReplicas().size() > 1) { - // Clear the leader in clusterstate. We only need to worry about this if there is actually more than one replica. - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(), - ZkStateReader.SHARD_ID_PROP, shardId, ZkStateReader.COLLECTION_PROP, collection); - zkController.getOverseer().getStateUpdateQueue().offer(Utils.toJSON(m)); - } - - boolean allReplicasInLine = false; - if (!weAreReplacement) { - allReplicasInLine = waitForReplicasToComeUp(leaderVoteWait); - } else { - allReplicasInLine = areAllReplicasParticipating(); - } - - if (isClosed) { - // Solr is shutting down or the ZooKeeper session expired while waiting for replicas. If the later, - // we cannot be sure we are still the leader, so we should bail out. The OnReconnect handler will - // re-register the cores and handle a new leadership election. - return; - } - - Replica.Type replicaType; - String coreNodeName; - boolean setTermToMax = false; - try (SolrCore core = cc.getCore(coreName)) { - - if (core == null) { - return; - } - - replicaType = core.getCoreDescriptor().getCloudDescriptor().getReplicaType(); - coreNodeName = core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName(); - // should I be leader? - ZkShardTerms zkShardTerms = zkController.getShardTerms(collection, shardId); - if (zkShardTerms.registered(coreNodeName) && !zkShardTerms.canBecomeLeader(coreNodeName)) { - if (!waitForEligibleBecomeLeaderAfterTimeout(zkShardTerms, coreNodeName, leaderVoteWait)) { - rejoinLeaderElection(core); - return; - } else { - // only log an error if this replica win the election - setTermToMax = true; - } - } - - if (isClosed) { - return; - } - - log.info("I may be the new leader - try and sync"); - - // we are going to attempt to be the leader - // first cancel any current recovery - core.getUpdateHandler().getSolrCoreState().cancelRecovery(); - - if (weAreReplacement) { - // wait a moment for any floating updates to finish - try { - Thread.sleep(2500); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, e); - } - } - - PeerSync.PeerSyncResult result = null; - boolean success = false; - try { - result = syncStrategy.sync(zkController, core, leaderProps, weAreReplacement); - success = result.isSuccess(); - } catch (Exception e) { - SolrException.log(log, "Exception while trying to sync", e); - result = PeerSync.PeerSyncResult.failure(); - } - - UpdateLog ulog = core.getUpdateHandler().getUpdateLog(); - - if (!success) { - boolean hasRecentUpdates = false; - if (ulog != null) { - // TODO: we could optimize this if necessary - try (UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates()) { - hasRecentUpdates = !recentUpdates.getVersions(1).isEmpty(); - } - } - - if (!hasRecentUpdates) { - // we failed sync, but we have no versions - we can't sync in that case - // - we were active - // before, so become leader anyway if no one else has any versions either - if (result.getOtherHasVersions().orElse(false)) { - log.info("We failed sync, but we have no versions - we can't sync in that case. But others have some versions, so we should not become leader"); - success = false; - } else { - log.info( - "We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway"); - success = true; - } - } - } - - // solrcloud_debug - if (log.isDebugEnabled()) { - try { - RefCounted searchHolder = core.getNewestSearcher(false); - SolrIndexSearcher searcher = searchHolder.get(); - try { - if (log.isDebugEnabled()) { - log.debug("{} synched {}", core.getCoreContainer().getZkController().getNodeName() - , searcher.count(new MatchAllDocsQuery())); - } - } finally { - searchHolder.decref(); - } - } catch (Exception e) { - log.error("Error in solrcloud_debug block", e); - } - } - if (!success) { - rejoinLeaderElection(core); - return; - } - - } - - boolean isLeader = true; - if (!isClosed) { - try { - if (replicaType == Replica.Type.TLOG) { - // stop replicate from old leader - zkController.stopReplicationFromLeader(coreName); - if (weAreReplacement) { - try (SolrCore core = cc.getCore(coreName)) { - Future future = core.getUpdateHandler().getUpdateLog().recoverFromCurrentLog(); - if (future != null) { - log.info("Replaying tlog before become new leader"); - future.get(); - } else { - log.info("New leader does not have old tlog to replay"); - } - } - } - } - // in case of leaderVoteWait timeout, a replica with lower term can win the election - if (setTermToMax) { - log.error("WARNING: Potential data loss -- Replica {} became leader after timeout (leaderVoteWait) {}" - , "without being up-to-date with the previous leader", coreNodeName); - zkController.getShardTerms(collection, shardId).setTermEqualsToLeader(coreNodeName); - } - super.runLeaderProcess(weAreReplacement, 0); - try (SolrCore core = cc.getCore(coreName)) { - if (core != null) { - core.getCoreDescriptor().getCloudDescriptor().setLeader(true); - publishActiveIfRegisteredAndNotActive(core); - } else { - return; - } - } - if (log.isInfoEnabled()) { - log.info("I am the new leader: {} {}", ZkCoreNodeProps.getCoreUrl(leaderProps), shardId); - } - - // we made it as leader - send any recovery requests we need to - syncStrategy.requestRecoveries(); - - } catch (SessionExpiredException e) { - throw new SolrException(ErrorCode.SERVER_ERROR, - "ZK session expired - cancelling election for " + collection + " " + shardId); - } catch (Exception e) { - isLeader = false; - SolrException.log(log, "There was a problem trying to register as the leader", e); - - try (SolrCore core = cc.getCore(coreName)) { - - if (core == null) { - if (log.isDebugEnabled()) { - log.debug("SolrCore not found: {} in {}", coreName, cc.getLoadedCoreNames()); - } - return; - } - - core.getCoreDescriptor().getCloudDescriptor().setLeader(false); - - // we could not publish ourselves as leader - try and rejoin election - try { - rejoinLeaderElection(core); - } catch (SessionExpiredException exc) { - throw new SolrException(ErrorCode.SERVER_ERROR, - "ZK session expired - cancelling election for " + collection + " " + shardId); - } - } - } - } else { - cancelElection(); - } - } finally { - MDCLoggingContext.clear(); - } - } - - /** - * Wait for other replicas with higher terms participate in the electioon - * @return true if after {@code timeout} there are no other replicas with higher term participate in the election, - * false if otherwise - */ - private boolean waitForEligibleBecomeLeaderAfterTimeout(ZkShardTerms zkShardTerms, String coreNodeName, int timeout) throws InterruptedException { - long timeoutAt = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeout, TimeUnit.MILLISECONDS); - while (!isClosed && !cc.isShutDown()) { - if (System.nanoTime() > timeoutAt) { - log.warn("After waiting for {}ms, no other potential leader was found, {} try to become leader anyway (core_term:{}, highest_term:{})", - timeout, coreNodeName, zkShardTerms.getTerm(coreNodeName), zkShardTerms.getHighestTerm()); - return true; - } - if (replicasWithHigherTermParticipated(zkShardTerms, coreNodeName)) { - log.info("Can't become leader, other replicas with higher term participated in leader election"); - return false; - } - Thread.sleep(500L); - } - return false; - } - - /** - * Do other replicas with higher term participated in the election - * @return true if other replicas with higher term participated in the election, false if otherwise - */ - private boolean replicasWithHigherTermParticipated(ZkShardTerms zkShardTerms, String coreNodeName) { - ClusterState clusterState = zkController.getClusterState(); - DocCollection docCollection = clusterState.getCollectionOrNull(collection); - Slice slices = (docCollection == null) ? null : docCollection.getSlice(shardId); - if (slices == null) return false; - - long replicaTerm = zkShardTerms.getTerm(coreNodeName); - boolean isRecovering = zkShardTerms.isRecovering(coreNodeName); - - for (Replica replica : slices.getReplicas()) { - if (replica.getName().equals(coreNodeName)) continue; - - if (clusterState.getLiveNodes().contains(replica.getNodeName())) { - long otherTerm = zkShardTerms.getTerm(replica.getName()); - boolean isOtherReplicaRecovering = zkShardTerms.isRecovering(replica.getName()); - - if (isRecovering && !isOtherReplicaRecovering) return true; - if (otherTerm > replicaTerm) return true; - } - } - return false; - } - - public void publishActiveIfRegisteredAndNotActive(SolrCore core) throws Exception { - if (core.getCoreDescriptor().getCloudDescriptor().hasRegistered()) { - ZkStateReader zkStateReader = zkController.getZkStateReader(); - zkStateReader.forceUpdateCollection(collection); - ClusterState clusterState = zkStateReader.getClusterState(); - Replica rep = getReplica(clusterState, collection, leaderProps.getStr(ZkStateReader.CORE_NODE_NAME_PROP)); - if (rep == null) return; - if (rep.getState() != Replica.State.ACTIVE || core.getCoreDescriptor().getCloudDescriptor().getLastPublished() != Replica.State.ACTIVE) { - log.debug("We have become the leader after core registration but are not in an ACTIVE state - publishing ACTIVE"); - zkController.publish(core.getCoreDescriptor(), Replica.State.ACTIVE); - } - } - } - - private Replica getReplica(ClusterState clusterState, String collectionName, String replicaName) { - if (clusterState == null) return null; - final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName); - if (docCollection == null) return null; - return docCollection.getReplica(replicaName); - } - - // returns true if all replicas are found to be up, false if not - private boolean waitForReplicasToComeUp(int timeoutms) throws InterruptedException { - long timeoutAt = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeoutms, TimeUnit.MILLISECONDS); - final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE; - - DocCollection docCollection = zkController.getClusterState().getCollectionOrNull(collection); - Slice slices = (docCollection == null) ? null : docCollection.getSlice(shardId); - int cnt = 0; - while (!isClosed && !cc.isShutDown()) { - // wait for everyone to be up - if (slices != null) { - int found = 0; - try { - found = zkClient.getChildren(shardsElectZkPath, null, true).size(); - } catch (KeeperException e) { - if (e instanceof KeeperException.SessionExpiredException) { - // if the session has expired, then another election will be launched, so - // quit here - throw new SolrException(ErrorCode.SERVER_ERROR, - "ZK session expired - cancelling election for " + collection + " " + shardId); - } - SolrException.log(log, - "Error checking for the number of election participants", e); - } - - // on startup and after connection timeout, wait for all known shards - if (found >= slices.getReplicas(EnumSet.of(Replica.Type.TLOG, Replica.Type.NRT)).size()) { - log.info("Enough replicas found to continue."); - return true; - } else { - if (cnt % 40 == 0) { - if (log.isInfoEnabled()) { - log.info("Waiting until we see more replicas up for shard {}: total={} found={} timeoute in={}ms" - , shardId, slices.getReplicas(EnumSet.of(Replica.Type.TLOG, Replica.Type.NRT)).size(), found, - TimeUnit.MILLISECONDS.convert(timeoutAt - System.nanoTime(), TimeUnit.NANOSECONDS)); - } - } - } - - if (System.nanoTime() > timeoutAt) { - log.info("Was waiting for replicas to come up, but they are taking too long - assuming they won't come back till later"); - return false; - } - } else { - log.warn("Shard not found: {} for collection {}", shardId, collection); - - return false; - - } - - Thread.sleep(500); - docCollection = zkController.getClusterState().getCollectionOrNull(collection); - slices = (docCollection == null) ? null : docCollection.getSlice(shardId); - cnt++; - } - return false; - } - - // returns true if all replicas are found to be up, false if not - private boolean areAllReplicasParticipating() throws InterruptedException { - final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE; - final DocCollection docCollection = zkController.getClusterState().getCollectionOrNull(collection); - - if (docCollection != null && docCollection.getSlice(shardId) != null) { - final Slice slices = docCollection.getSlice(shardId); - int found = 0; - try { - found = zkClient.getChildren(shardsElectZkPath, null, true).size(); - } catch (KeeperException e) { - if (e instanceof KeeperException.SessionExpiredException) { - // if the session has expired, then another election will be launched, so - // quit here - throw new SolrException(ErrorCode.SERVER_ERROR, - "ZK session expired - cancelling election for " + collection + " " + shardId); - } - SolrException.log(log, "Error checking for the number of election participants", e); - } - - if (found >= slices.getReplicasMap().size()) { - log.debug("All replicas are ready to participate in election."); - return true; - } - } else { - log.warn("Shard not found: {} for collection {}", shardId, collection); - return false; - } - return false; - } - - private void rejoinLeaderElection(SolrCore core) - throws InterruptedException, KeeperException, IOException { - // remove our ephemeral and re join the election - if (cc.isShutDown()) { - log.debug("Not rejoining election because CoreContainer is closed"); - return; - } - - log.info("There may be a better leader candidate than us - going back into recovery"); - - cancelElection(); - - core.getUpdateHandler().getSolrCoreState().doRecovery(cc, core.getCoreDescriptor()); - - leaderElector.joinElection(this, true); - } - -} - -final class OverseerElectionContext extends ElectionContext { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private final SolrZkClient zkClient; - private final Overseer overseer; - private volatile boolean isClosed = false; - - public OverseerElectionContext(SolrZkClient zkClient, Overseer overseer, final String zkNodeName) { - super(zkNodeName, Overseer.OVERSEER_ELECT, Overseer.OVERSEER_ELECT + "/leader", null, zkClient); - this.overseer = overseer; - this.zkClient = zkClient; - try { - new ZkCmdExecutor(zkClient.getZkClientTimeout()).ensureExists(Overseer.OVERSEER_ELECT, zkClient); - } catch (KeeperException e) { - throw new SolrException(ErrorCode.SERVER_ERROR, e); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new SolrException(ErrorCode.SERVER_ERROR, e); - } - } - - @Override - void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs) throws KeeperException, - InterruptedException { - if (isClosed) { - return; - } - log.info("I am going to be the leader {}", id); - final String id = leaderSeqPath - .substring(leaderSeqPath.lastIndexOf("/") + 1); - ZkNodeProps myProps = new ZkNodeProps(ID, id); - - zkClient.makePath(leaderPath, Utils.toJSON(myProps), - CreateMode.EPHEMERAL, true); - if(pauseBeforeStartMs >0){ - try { - Thread.sleep(pauseBeforeStartMs); - } catch (InterruptedException e) { - Thread.interrupted(); - log.warn("Wait interrupted ", e); - } - } - synchronized (this) { - if (!this.isClosed && !overseer.getZkController().getCoreContainer().isShutDown()) { - overseer.start(id); - } - } - } - - @Override - public void cancelElection() throws InterruptedException, KeeperException { - super.cancelElection(); - overseer.close(); - } - - @Override - public synchronized void close() { - this.isClosed = true; - overseer.close(); - } - - @Override - public ElectionContext copy() { - return new OverseerElectionContext(zkClient, overseer ,id); - } - - @Override - public void joinedElectionFired() { - overseer.close(); - } - - @Override - public void checkIfIamLeaderFired() { - // leader changed - close the overseer - overseer.close(); - } -} diff --git a/solr/core/src/java/org/apache/solr/cloud/ExclusiveSliceProperty.java b/solr/core/src/java/org/apache/solr/cloud/ExclusiveSliceProperty.java index bd9de94b6e17..448f4553c990 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ExclusiveSliceProperty.java +++ b/solr/core/src/java/org/apache/solr/cloud/ExclusiveSliceProperty.java @@ -362,7 +362,7 @@ private static class SliceReplica { this.replica = replica; } public String toString() { - StringBuilder sb = new StringBuilder(System.lineSeparator()).append(System.lineSeparator()).append("******EOE20 starting toString of SliceReplica"); + StringBuilder sb = new StringBuilder(System.lineSeparator()).append(System.lineSeparator()); sb.append(" :").append(System.lineSeparator()).append("slice: ").append(slice.toString()).append(System.lineSeparator()).append(" replica: ").append(replica.toString()).append(System.lineSeparator()); return sb.toString(); } diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java index 9df6a2d7e831..1c18acc0803f 100644 --- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java +++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java @@ -35,9 +35,9 @@ import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.cloud.SolrCloudManager; import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.impl.ClusterStateProvider; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.response.CollectionAdminResponse; +import org.apache.solr.cloud.api.collections.CreateCollectionCmd; import org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler; import org.apache.solr.cloud.autoscaling.OverseerTriggerThread; import org.apache.solr.cloud.overseer.ClusterStateMutator; @@ -79,8 +79,61 @@ import com.codahale.metrics.Timer; /** - * Cluster leader. Responsible for processing state updates, node assignments, creating/deleting - * collections, shards, replicas and setting various properties. + *

Cluster leader. Responsible for processing state updates, node assignments, creating/deleting + * collections, shards, replicas and setting various properties.

+ * + *

The Overseer is a single elected node in the SolrCloud cluster that is in charge of interactions with + * ZooKeeper that require global synchronization. It also hosts the Collection API implementation and the + * Autoscaling framework.

+ * + *

The Overseer deals with:

+ *
    + *
  • Cluster State updates, i.e. updating Collections' state.json files in ZooKeeper, see {@link ClusterStateUpdater},
  • + *
  • Collection API implementation, including Autoscaling replica placement computation, see + * {@link OverseerCollectionConfigSetProcessor} and {@link OverseerCollectionMessageHandler} (and the example below),
  • + *
  • Updating Config Sets, see {@link OverseerCollectionConfigSetProcessor} and {@link OverseerConfigSetMessageHandler},
  • + *
  • Autoscaling triggers, see {@link org.apache.solr.cloud.autoscaling.OverseerTriggerThread}.
  • + *
+ * + *

The nodes in the cluster communicate with the Overseer over queues implemented in ZooKeeper. There are essentially + * two queues:

+ *
    + *
  1. The state update queue, through which nodes request the Overseer to update the state.json file of a + * Collection in ZooKeeper. This queue is in Zookeeper at /overseer/queue,
  2. + *
  3. A queue shared between Collection API and Config Set API requests. This queue is in Zookeeper at + * /overseer/collection-queue-work.
  4. + *
+ * + *

An example of the steps involved in the Overseer processing a Collection creation API call:

+ *
    + *
  1. Client uses the Collection API with CREATE action and reaches a node of the cluster,
  2. + *
  3. The node (via {@link CollectionsHandler}) enqueues the request into the /overseer/collection-queue-work + * queue in ZooKeepeer,
  4. + *
  5. The {@link OverseerCollectionConfigSetProcessor} running on the Overseer node dequeues the message and using an + * executor service with a maximum pool size of {@link OverseerTaskProcessor#MAX_PARALLEL_TASKS} hands it for processing + * to {@link OverseerCollectionMessageHandler},
  6. + *
  7. Command {@link CreateCollectionCmd} then executes and does: + *
      + *
    1. Update some state directly in ZooKeeper (creating collection znode),
    2. + *
    3. Compute replica placement on available nodes in the cluster,
    4. + *
    5. Enqueue a state change request for creating the state.json file for the collection in ZooKeeper. + * This is done by enqueuing a message in /overseer/queue,
    6. + *
    7. The command then waits for the update to be seen in ZooKeeper...
    8. + *
  8. + *
  9. The {@link ClusterStateUpdater} (also running on the Overseer node) dequeues the state change message and creates the + * state.json file in ZooKeeper for the Collection. All the work of the cluster state updater + * (creations, updates, deletes) is done sequentially for the whole cluster by a single thread.
  10. + *
  11. The {@link CreateCollectionCmd} sees the state change in + * ZooKeeper and: + *
      + *
    1. Builds and sends requests to each node to create the appropriate cores for all the replicas of all shards + * of the collection. Nodes create the replicas and set them to {@link org.apache.solr.common.cloud.Replica.State#ACTIVE}.
    2. + *
  12. + *
  13. The collection creation command has succeeded from the Overseer perspective,
  14. + *
  15. {@link CollectionsHandler} checks the replicas in Zookeeper and verifies they are all + * {@link org.apache.solr.common.cloud.Replica.State#ACTIVE},
  16. + *
  17. The client receives a success return.
  18. + *
*/ public class Overseer implements SolrCloseable { public static final String QUEUE_OPERATION = "operation"; @@ -97,6 +150,12 @@ public class Overseer implements SolrCloseable { enum LeaderStatus {DONT_KNOW, NO, YES} + /** + *

This class is responsible for dequeueing state change requests from the ZooKeeper queue at /overseer/queue + * and executing the requested cluster change (essentially writing or updating state.json for a collection).

+ * + *

The cluster state updater is a single thread dequeueing and executing requests.

+ */ private class ClusterStateUpdater implements Runnable, Closeable { private final ZkStateReader reader; @@ -244,7 +303,7 @@ else if (LeaderStatus.YES != isLeader) { byte[] data = head.second(); final ZkNodeProps message = ZkNodeProps.load(data); if (log.isDebugEnabled()) { - log.debug("processMessage: queueSize: {}, message = {} current state version: {}", stateUpdateQueue.getZkStats().getQueueLength(), message, clusterState.getZkClusterStateVersion()); + log.debug("processMessage: queueSize: {}, message = {}", stateUpdateQueue.getZkStats().getQueueLength(), message); } processedNodes.add(head.first()); @@ -344,6 +403,7 @@ private void checkIfIamStillLeader() { return; } try { + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSON(data); String id = (String) m.get(ID); if(overseerCollectionConfigSetProcessor.getId().equals(id)){ @@ -401,8 +461,6 @@ private List processMessage(ClusterState clusterState, case MODIFYCOLLECTION: CollectionsHandler.verifyRuleParams(zkController.getCoreContainer() ,message.getProperties()); return Collections.singletonList(new CollectionMutator(getSolrCloudManager()).modifyCollection(clusterState,message)); - case MIGRATESTATEFORMAT: - return Collections.singletonList(new ClusterStateMutator(getSolrCloudManager()).migrateStateFormat(clusterState, message)); default: throw new RuntimeException("unknown operation:" + operation + " contents:" + message.getProperties()); @@ -664,7 +722,9 @@ private void doCompatCheck(BiConsumer consumer) { .setWithSegments(true) .setWithFieldInfo(true); CollectionAdminResponse rsp = req.process(client); + @SuppressWarnings({"unchecked"}) NamedList status = (NamedList)rsp.getResponse().get(CollectionAdminParams.SYSTEM_COLL); + @SuppressWarnings({"unchecked"}) Collection nonCompliant = (Collection)status.get("schemaNonCompliant"); if (!nonCompliant.contains("(NONE)")) { consumer.accept("indexFieldsNotMatchingSchema", nonCompliant); @@ -675,16 +735,20 @@ private void doCompatCheck(BiConsumer consumer) { String currentVersion = Version.LATEST.toString(); segmentVersions.add(currentVersion); segmentCreatedMajorVersions.add(currentMajorVersion); + @SuppressWarnings({"unchecked"}) NamedList shards = (NamedList)status.get("shards"); for (Map.Entry entry : shards) { + @SuppressWarnings({"unchecked"}) NamedList leader = (NamedList)((NamedList)entry.getValue()).get("leader"); if (leader == null) { continue; } + @SuppressWarnings({"unchecked"}) NamedList segInfos = (NamedList)leader.get("segInfos"); if (segInfos == null) { continue; } + @SuppressWarnings({"unchecked"}) NamedList infos = (NamedList)segInfos.get("info"); if (((Number)infos.get("numSegments")).intValue() > 0) { segmentVersions.add(infos.get("minSegmentLuceneVersion").toString()); @@ -692,8 +756,10 @@ private void doCompatCheck(BiConsumer consumer) { if (infos.get("commitLuceneVersion") != null) { segmentVersions.add(infos.get("commitLuceneVersion").toString()); } + @SuppressWarnings({"unchecked"}) NamedList segmentInfos = (NamedList)segInfos.get("segments"); segmentInfos.forEach((k, v) -> { + @SuppressWarnings({"unchecked"}) NamedList segment = (NamedList)v; segmentVersions.add(segment.get("version").toString()); if (segment.get("minVersion") != null) { @@ -974,16 +1040,6 @@ private void createOverseerNode(final SolrZkClient zkClient) { } } - public static boolean isLegacy(ZkStateReader stateReader) { - String legacyProperty = stateReader.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "false"); - return "true".equals(legacyProperty); - } - - public static boolean isLegacy(ClusterStateProvider clusterStateProvider) { - String legacyProperty = clusterStateProvider.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "false"); - return "true".equals(legacyProperty); - } - public ZkStateReader getZkStateReader() { return reader; } diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerConfigSetMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/OverseerConfigSetMessageHandler.java index 83d4c65167ba..ebb460dce35f 100644 --- a/solr/core/src/java/org/apache/solr/cloud/OverseerConfigSetMessageHandler.java +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerConfigSetMessageHandler.java @@ -77,19 +77,23 @@ public class OverseerConfigSetMessageHandler implements OverseerMessageHandler { // in this way, we prevent a Base ConfigSet from being deleted while it is being copied // but don't prevent different ConfigSets from being created with the same Base ConfigSet // at the same time. + @SuppressWarnings({"rawtypes"}) final private Set configSetWriteWip; + @SuppressWarnings({"rawtypes"}) final private Set configSetReadWip; private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public OverseerConfigSetMessageHandler(ZkStateReader zkStateReader) { this.zkStateReader = zkStateReader; - this.configSetWriteWip = new HashSet(); - this.configSetReadWip = new HashSet(); + this.configSetWriteWip = new HashSet<>(); + this.configSetReadWip = new HashSet<>(); } @Override + @SuppressWarnings({"unchecked"}) public OverseerSolrResponse processMessage(ZkNodeProps message, String operation) { + @SuppressWarnings({"rawtypes"}) NamedList results = new NamedList(); try { if (!operation.startsWith(CONFIGSETS_ACTION_PREFIX)) { @@ -126,6 +130,7 @@ public OverseerSolrResponse processMessage(ZkNodeProps message, String operation } results.add("Operation " + operation + " caused exception:", e); + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap nl = new SimpleOrderedMap(); nl.add("msg", e.getMessage()); nl.add("rspCode", e instanceof SolrException ? ((SolrException) e).code() : -1); @@ -165,6 +170,7 @@ private void markExclusiveTask(String configSetName, ZkNodeProps message) { markExclusive(configSetName, baseConfigSet); } + @SuppressWarnings({"unchecked"}) private void markExclusive(String configSetName, String baseConfigSetName) { synchronized (configSetWriteWip) { configSetWriteWip.add(configSetName); @@ -220,6 +226,7 @@ private String getBaseConfigSetIfCreate(ZkNodeProps message) { return null; } + @SuppressWarnings({"rawtypes"}) private NamedList getConfigSetProperties(String path) throws IOException { byte[] oldPropsData = null; try { @@ -256,7 +263,8 @@ private Map getNewProperties(ZkNodeProps message) { return properties; } - private void mergeOldProperties(Map newProps, NamedList oldProps) { + private void mergeOldProperties(Map newProps, @SuppressWarnings({"rawtypes"})NamedList oldProps) { + @SuppressWarnings({"unchecked"}) Iterator> it = oldProps.iterator(); while (it.hasNext()) { Map.Entry oldEntry = it.next(); @@ -304,6 +312,7 @@ private void createConfigSet(ZkNodeProps message) throws IOException { Map props = getNewProperties(message); if (props != null) { // read the old config properties and do a merge, if necessary + @SuppressWarnings({"rawtypes"}) NamedList oldProps = getConfigSetProperties(getPropertyPath(baseConfigSetName, propertyPath)); if (oldProps != null) { mergeOldProperties(props, oldProps); @@ -370,6 +379,7 @@ private void deleteConfigSet(String configSetName, boolean force) throws IOExcep } String propertyPath = ConfigSetProperties.DEFAULT_FILENAME; + @SuppressWarnings({"rawtypes"}) NamedList properties = getConfigSetProperties(getPropertyPath(configSetName, propertyPath)); if (properties != null) { Object immutable = properties.get(ConfigSetProperties.IMMUTABLE_CONFIGSET_ARG); diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/OverseerElectionContext.java new file mode 100644 index 000000000000..e25befa79cbb --- /dev/null +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerElectionContext.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.cloud; + +import java.lang.invoke.MethodHandles; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.SolrException.ErrorCode; +import org.apache.solr.common.cloud.SolrZkClient; +import org.apache.solr.common.cloud.ZkCmdExecutor; +import org.apache.solr.common.cloud.ZkNodeProps; +import org.apache.solr.common.util.Utils; +import org.apache.zookeeper.CreateMode; +import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.apache.solr.common.params.CommonParams.ID; + +final class OverseerElectionContext extends ElectionContext { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final SolrZkClient zkClient; + private final Overseer overseer; + private volatile boolean isClosed = false; + + public OverseerElectionContext(SolrZkClient zkClient, Overseer overseer, final String zkNodeName) { + super(zkNodeName, Overseer.OVERSEER_ELECT, Overseer.OVERSEER_ELECT + "/leader", null, zkClient); + this.overseer = overseer; + this.zkClient = zkClient; + try { + new ZkCmdExecutor(zkClient.getZkClientTimeout()).ensureExists(Overseer.OVERSEER_ELECT, zkClient); + } catch (KeeperException e) { + throw new SolrException(ErrorCode.SERVER_ERROR, e); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new SolrException(ErrorCode.SERVER_ERROR, e); + } + } + + @Override + void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs) throws KeeperException, + InterruptedException { + if (isClosed) { + return; + } + log.info("I am going to be the leader {}", id); + final String id = leaderSeqPath + .substring(leaderSeqPath.lastIndexOf("/") + 1); + ZkNodeProps myProps = new ZkNodeProps(ID, id); + + zkClient.makePath(leaderPath, Utils.toJSON(myProps), + CreateMode.EPHEMERAL, true); + if (pauseBeforeStartMs > 0) { + try { + Thread.sleep(pauseBeforeStartMs); + } catch (InterruptedException e) { + Thread.interrupted(); + log.warn("Wait interrupted ", e); + } + } + synchronized (this) { + if (!this.isClosed && !overseer.getZkController().getCoreContainer().isShutDown()) { + overseer.start(id); + } + } + } + + @Override + public void cancelElection() throws InterruptedException, KeeperException { + super.cancelElection(); + overseer.close(); + } + + @Override + public synchronized void close() { + this.isClosed = true; + overseer.close(); + } + + @Override + public ElectionContext copy() { + return new OverseerElectionContext(zkClient, overseer, id); + } + + @Override + public void joinedElectionFired() { + overseer.close(); + } + + @Override + public void checkIfIamLeaderFired() { + // leader changed - close the overseer + overseer.close(); + } + +} diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java b/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java index 125f98b8b92d..20e650ae8d80 100644 --- a/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java @@ -66,8 +66,10 @@ public OverseerNodePrioritizer(ZkStateReader zkStateReader, ZkDistributedQueue s public synchronized void prioritizeOverseerNodes(String overseerId) throws Exception { SolrZkClient zk = zkStateReader.getZkClient(); if(!zk.exists(ZkStateReader.ROLES,true))return; + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSON(zk.getData(ZkStateReader.ROLES, null, new Stat(), true)); + @SuppressWarnings({"rawtypes"}) List overseerDesignates = (List) m.get("overseer"); if(overseerDesignates==null || overseerDesignates.isEmpty()) return; String ldr = OverseerTaskProcessor.getLeaderNode(zk); diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerSolrResponse.java b/solr/core/src/java/org/apache/solr/cloud/OverseerSolrResponse.java index 92f6443eaced..4257a762ab3d 100644 --- a/solr/core/src/java/org/apache/solr/cloud/OverseerSolrResponse.java +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerSolrResponse.java @@ -21,11 +21,12 @@ public class OverseerSolrResponse extends SolrResponse { + @SuppressWarnings({"rawtypes"}) NamedList responseList = null; private long elapsedTime; - public OverseerSolrResponse(NamedList list) { + public OverseerSolrResponse(@SuppressWarnings({"rawtypes"})NamedList list) { responseList = list; } @@ -45,6 +46,7 @@ public void setElapsedTime(long elapsedTime) { } @Override + @SuppressWarnings({"unchecked"}) public NamedList getResponse() { return responseList; } diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java index 786a71846379..cf860335f3b5 100644 --- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java @@ -421,6 +421,7 @@ public static String getLeaderId(SolrZkClient zkClient) throws KeeperException,I } catch (KeeperException.NoNodeException e) { return null; } + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSON(data); return (String) m.get(ID); } diff --git a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java index 9d6f1608e0f9..2be35fbcbd2c 100644 --- a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java +++ b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java @@ -79,14 +79,16 @@ public class RecoveryStrategy implements Runnable, Closeable { public static class Builder implements NamedListInitializedPlugin { + @SuppressWarnings({"rawtypes"}) private NamedList args; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { this.args = args; } // this should only be used from SolrCoreState + @SuppressWarnings({"unchecked"}) public RecoveryStrategy create(CoreContainer cc, CoreDescriptor cd, RecoveryStrategy.RecoveryListener recoveryListener) { final RecoveryStrategy recoveryStrategy = newRecoveryStrategy(cc, cd, recoveryListener); @@ -641,9 +643,11 @@ public final void doSyncOrReplicateRecovery(SolrCore core) throws Exception { } // System.out.println("Attempting to PeerSync from " + leaderUrl // + " i am:" + zkController.getNodeName()); - PeerSyncWithLeader peerSyncWithLeader = new PeerSyncWithLeader(core, - leader.getCoreUrl(), ulog.getNumRecordsToKeep()); - boolean syncSuccess = peerSyncWithLeader.sync(recentVersions).isSuccess(); + boolean syncSuccess; + try (PeerSyncWithLeader peerSyncWithLeader = new PeerSyncWithLeader(core, + leader.getCoreUrl(), ulog.getNumRecordsToKeep())) { + syncSuccess = peerSyncWithLeader.sync(recentVersions).isSuccess(); + } if (syncSuccess) { SolrQueryRequest req = new LocalSolrQueryRequest(core, new ModifiableSolrParams()); diff --git a/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContext.java new file mode 100644 index 000000000000..f6c96caf2052 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContext.java @@ -0,0 +1,493 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.cloud; + +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.EnumSet; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; + +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.solr.cloud.overseer.OverseerAction; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.SolrException.ErrorCode; +import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; +import org.apache.solr.common.cloud.Replica; +import org.apache.solr.common.cloud.Slice; +import org.apache.solr.common.cloud.ZkCoreNodeProps; +import org.apache.solr.common.cloud.ZkNodeProps; +import org.apache.solr.common.cloud.ZkStateReader; +import org.apache.solr.common.util.Utils; +import org.apache.solr.core.CoreContainer; +import org.apache.solr.core.SolrCore; +import org.apache.solr.logging.MDCLoggingContext; +import org.apache.solr.search.SolrIndexSearcher; +import org.apache.solr.update.PeerSync; +import org.apache.solr.update.UpdateLog; +import org.apache.solr.util.RefCounted; +import org.apache.zookeeper.KeeperException; +import org.apache.zookeeper.KeeperException.SessionExpiredException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +// add core container and stop passing core around... +final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private final CoreContainer cc; + private final SyncStrategy syncStrategy; + + private volatile boolean isClosed = false; + + public ShardLeaderElectionContext(LeaderElector leaderElector, + final String shardId, final String collection, + final String coreNodeName, ZkNodeProps props, ZkController zkController, CoreContainer cc) { + super(leaderElector, shardId, collection, coreNodeName, props, + zkController); + this.cc = cc; + syncStrategy = new SyncStrategy(cc); + } + + @Override + public void close() { + super.close(); + this.isClosed = true; + syncStrategy.close(); + } + + @Override + public void cancelElection() throws InterruptedException, KeeperException { + String coreName = leaderProps.getStr(ZkStateReader.CORE_NAME_PROP); + try (SolrCore core = cc.getCore(coreName)) { + if (core != null) { + core.getCoreDescriptor().getCloudDescriptor().setLeader(false); + } + } + + super.cancelElection(); + } + + @Override + public ElectionContext copy() { + return new ShardLeaderElectionContext(leaderElector, shardId, collection, id, leaderProps, zkController, cc); + } + + /* + * weAreReplacement: has someone else been the leader already? + */ + @Override + void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStart) throws KeeperException, + InterruptedException, IOException { + String coreName = leaderProps.getStr(ZkStateReader.CORE_NAME_PROP); + ActionThrottle lt; + try (SolrCore core = cc.getCore(coreName)) { + if (core == null) { + // shutdown or removed + return; + } + MDCLoggingContext.setCore(core); + lt = core.getUpdateHandler().getSolrCoreState().getLeaderThrottle(); + } + + try { + lt.minimumWaitBetweenActions(); + lt.markAttemptingAction(); + + + int leaderVoteWait = cc.getZkController().getLeaderVoteWait(); + + log.debug("Running the leader process for shard={} and weAreReplacement={} and leaderVoteWait={}", shardId, weAreReplacement, leaderVoteWait); + if (zkController.getClusterState().getCollection(collection).getSlice(shardId).getReplicas().size() > 1) { + // Clear the leader in clusterstate. We only need to worry about this if there is actually more than one replica. + ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(), + ZkStateReader.SHARD_ID_PROP, shardId, ZkStateReader.COLLECTION_PROP, collection); + zkController.getOverseer().getStateUpdateQueue().offer(Utils.toJSON(m)); + } + + boolean allReplicasInLine = false; + if (!weAreReplacement) { + allReplicasInLine = waitForReplicasToComeUp(leaderVoteWait); + } else { + allReplicasInLine = areAllReplicasParticipating(); + } + + if (isClosed) { + // Solr is shutting down or the ZooKeeper session expired while waiting for replicas. If the later, + // we cannot be sure we are still the leader, so we should bail out. The OnReconnect handler will + // re-register the cores and handle a new leadership election. + return; + } + + Replica.Type replicaType; + String coreNodeName; + boolean setTermToMax = false; + try (SolrCore core = cc.getCore(coreName)) { + + if (core == null) { + return; + } + + replicaType = core.getCoreDescriptor().getCloudDescriptor().getReplicaType(); + coreNodeName = core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName(); + // should I be leader? + ZkShardTerms zkShardTerms = zkController.getShardTerms(collection, shardId); + if (zkShardTerms.registered(coreNodeName) && !zkShardTerms.canBecomeLeader(coreNodeName)) { + if (!waitForEligibleBecomeLeaderAfterTimeout(zkShardTerms, coreNodeName, leaderVoteWait)) { + rejoinLeaderElection(core); + return; + } else { + // only log an error if this replica win the election + setTermToMax = true; + } + } + + if (isClosed) { + return; + } + + log.info("I may be the new leader - try and sync"); + + // we are going to attempt to be the leader + // first cancel any current recovery + core.getUpdateHandler().getSolrCoreState().cancelRecovery(); + + if (weAreReplacement) { + // wait a moment for any floating updates to finish + try { + Thread.sleep(2500); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, e); + } + } + + PeerSync.PeerSyncResult result = null; + boolean success = false; + try { + result = syncStrategy.sync(zkController, core, leaderProps, weAreReplacement); + success = result.isSuccess(); + } catch (Exception e) { + SolrException.log(log, "Exception while trying to sync", e); + result = PeerSync.PeerSyncResult.failure(); + } + + UpdateLog ulog = core.getUpdateHandler().getUpdateLog(); + + if (!success) { + boolean hasRecentUpdates = false; + if (ulog != null) { + // TODO: we could optimize this if necessary + try (UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates()) { + hasRecentUpdates = !recentUpdates.getVersions(1).isEmpty(); + } + } + + if (!hasRecentUpdates) { + // we failed sync, but we have no versions - we can't sync in that case + // - we were active + // before, so become leader anyway if no one else has any versions either + if (result.getOtherHasVersions().orElse(false)) { + log.info("We failed sync, but we have no versions - we can't sync in that case. But others have some versions, so we should not become leader"); + success = false; + } else { + log.info( + "We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway"); + success = true; + } + } + } + + // solrcloud_debug + if (log.isDebugEnabled()) { + try { + RefCounted searchHolder = core.getNewestSearcher(false); + SolrIndexSearcher searcher = searchHolder.get(); + try { + if (log.isDebugEnabled()) { + log.debug("{} synched {}", core.getCoreContainer().getZkController().getNodeName() + , searcher.count(new MatchAllDocsQuery())); + } + } finally { + searchHolder.decref(); + } + } catch (Exception e) { + log.error("Error in solrcloud_debug block", e); + } + } + if (!success) { + rejoinLeaderElection(core); + return; + } + + } + + boolean isLeader = true; + if (!isClosed) { + try { + if (replicaType == Replica.Type.TLOG) { + // stop replicate from old leader + zkController.stopReplicationFromLeader(coreName); + if (weAreReplacement) { + try (SolrCore core = cc.getCore(coreName)) { + Future future = core.getUpdateHandler().getUpdateLog().recoverFromCurrentLog(); + if (future != null) { + log.info("Replaying tlog before become new leader"); + future.get(); + } else { + log.info("New leader does not have old tlog to replay"); + } + } + } + } + // in case of leaderVoteWait timeout, a replica with lower term can win the election + if (setTermToMax) { + log.error("WARNING: Potential data loss -- Replica {} became leader after timeout (leaderVoteWait) {}" + , "without being up-to-date with the previous leader", coreNodeName); + zkController.getShardTerms(collection, shardId).setTermEqualsToLeader(coreNodeName); + } + super.runLeaderProcess(weAreReplacement, 0); + try (SolrCore core = cc.getCore(coreName)) { + if (core != null) { + core.getCoreDescriptor().getCloudDescriptor().setLeader(true); + publishActiveIfRegisteredAndNotActive(core); + } else { + return; + } + } + if (log.isInfoEnabled()) { + log.info("I am the new leader: {} {}", ZkCoreNodeProps.getCoreUrl(leaderProps), shardId); + } + + // we made it as leader - send any recovery requests we need to + syncStrategy.requestRecoveries(); + + } catch (SessionExpiredException e) { + throw new SolrException(ErrorCode.SERVER_ERROR, + "ZK session expired - cancelling election for " + collection + " " + shardId); + } catch (Exception e) { + isLeader = false; + SolrException.log(log, "There was a problem trying to register as the leader", e); + + try (SolrCore core = cc.getCore(coreName)) { + + if (core == null) { + if (log.isDebugEnabled()) { + log.debug("SolrCore not found: {} in {}", coreName, cc.getLoadedCoreNames()); + } + return; + } + + core.getCoreDescriptor().getCloudDescriptor().setLeader(false); + + // we could not publish ourselves as leader - try and rejoin election + try { + rejoinLeaderElection(core); + } catch (SessionExpiredException exc) { + throw new SolrException(ErrorCode.SERVER_ERROR, + "ZK session expired - cancelling election for " + collection + " " + shardId); + } + } + } + } else { + cancelElection(); + } + } finally { + MDCLoggingContext.clear(); + } + } + + /** + * Wait for other replicas with higher terms participate in the electioon + * + * @return true if after {@code timeout} there are no other replicas with higher term participate in the election, + * false if otherwise + */ + private boolean waitForEligibleBecomeLeaderAfterTimeout(ZkShardTerms zkShardTerms, String coreNodeName, int timeout) throws InterruptedException { + long timeoutAt = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeout, TimeUnit.MILLISECONDS); + while (!isClosed && !cc.isShutDown()) { + if (System.nanoTime() > timeoutAt) { + log.warn("After waiting for {}ms, no other potential leader was found, {} try to become leader anyway (core_term:{}, highest_term:{})", + timeout, coreNodeName, zkShardTerms.getTerm(coreNodeName), zkShardTerms.getHighestTerm()); + return true; + } + if (replicasWithHigherTermParticipated(zkShardTerms, coreNodeName)) { + log.info("Can't become leader, other replicas with higher term participated in leader election"); + return false; + } + Thread.sleep(500L); + } + return false; + } + + /** + * Do other replicas with higher term participated in the election + * + * @return true if other replicas with higher term participated in the election, false if otherwise + */ + private boolean replicasWithHigherTermParticipated(ZkShardTerms zkShardTerms, String coreNodeName) { + ClusterState clusterState = zkController.getClusterState(); + DocCollection docCollection = clusterState.getCollectionOrNull(collection); + Slice slices = (docCollection == null) ? null : docCollection.getSlice(shardId); + if (slices == null) return false; + + long replicaTerm = zkShardTerms.getTerm(coreNodeName); + boolean isRecovering = zkShardTerms.isRecovering(coreNodeName); + + for (Replica replica : slices.getReplicas()) { + if (replica.getName().equals(coreNodeName)) continue; + + if (clusterState.getLiveNodes().contains(replica.getNodeName())) { + long otherTerm = zkShardTerms.getTerm(replica.getName()); + boolean isOtherReplicaRecovering = zkShardTerms.isRecovering(replica.getName()); + + if (isRecovering && !isOtherReplicaRecovering) return true; + if (otherTerm > replicaTerm) return true; + } + } + return false; + } + + public void publishActiveIfRegisteredAndNotActive(SolrCore core) throws Exception { + if (core.getCoreDescriptor().getCloudDescriptor().hasRegistered()) { + ZkStateReader zkStateReader = zkController.getZkStateReader(); + zkStateReader.forceUpdateCollection(collection); + ClusterState clusterState = zkStateReader.getClusterState(); + Replica rep = getReplica(clusterState, collection, leaderProps.getStr(ZkStateReader.CORE_NODE_NAME_PROP)); + if (rep == null) return; + if (rep.getState() != Replica.State.ACTIVE || core.getCoreDescriptor().getCloudDescriptor().getLastPublished() != Replica.State.ACTIVE) { + log.debug("We have become the leader after core registration but are not in an ACTIVE state - publishing ACTIVE"); + zkController.publish(core.getCoreDescriptor(), Replica.State.ACTIVE); + } + } + } + + private Replica getReplica(ClusterState clusterState, String collectionName, String replicaName) { + if (clusterState == null) return null; + final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName); + if (docCollection == null) return null; + return docCollection.getReplica(replicaName); + } + + // returns true if all replicas are found to be up, false if not + private boolean waitForReplicasToComeUp(int timeoutms) throws InterruptedException { + long timeoutAt = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeoutms, TimeUnit.MILLISECONDS); + final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE; + + DocCollection docCollection = zkController.getClusterState().getCollectionOrNull(collection); + Slice slices = (docCollection == null) ? null : docCollection.getSlice(shardId); + int cnt = 0; + while (!isClosed && !cc.isShutDown()) { + // wait for everyone to be up + if (slices != null) { + int found = 0; + try { + found = zkClient.getChildren(shardsElectZkPath, null, true).size(); + } catch (KeeperException e) { + if (e instanceof KeeperException.SessionExpiredException) { + // if the session has expired, then another election will be launched, so + // quit here + throw new SolrException(ErrorCode.SERVER_ERROR, + "ZK session expired - cancelling election for " + collection + " " + shardId); + } + SolrException.log(log, + "Error checking for the number of election participants", e); + } + + // on startup and after connection timeout, wait for all known shards + if (found >= slices.getReplicas(EnumSet.of(Replica.Type.TLOG, Replica.Type.NRT)).size()) { + log.info("Enough replicas found to continue."); + return true; + } else { + if (cnt % 40 == 0) { + if (log.isInfoEnabled()) { + log.info("Waiting until we see more replicas up for shard {}: total={} found={} timeoute in={}ms" + , shardId, slices.getReplicas(EnumSet.of(Replica.Type.TLOG, Replica.Type.NRT)).size(), found, + TimeUnit.MILLISECONDS.convert(timeoutAt - System.nanoTime(), TimeUnit.NANOSECONDS)); + } + } + } + + if (System.nanoTime() > timeoutAt) { + log.info("Was waiting for replicas to come up, but they are taking too long - assuming they won't come back till later"); + return false; + } + } else { + log.warn("Shard not found: {} for collection {}", shardId, collection); + + return false; + + } + + Thread.sleep(500); + docCollection = zkController.getClusterState().getCollectionOrNull(collection); + slices = (docCollection == null) ? null : docCollection.getSlice(shardId); + cnt++; + } + return false; + } + + // returns true if all replicas are found to be up, false if not + private boolean areAllReplicasParticipating() throws InterruptedException { + final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE; + final DocCollection docCollection = zkController.getClusterState().getCollectionOrNull(collection); + + if (docCollection != null && docCollection.getSlice(shardId) != null) { + final Slice slices = docCollection.getSlice(shardId); + int found = 0; + try { + found = zkClient.getChildren(shardsElectZkPath, null, true).size(); + } catch (KeeperException e) { + if (e instanceof KeeperException.SessionExpiredException) { + // if the session has expired, then another election will be launched, so + // quit here + throw new SolrException(ErrorCode.SERVER_ERROR, + "ZK session expired - cancelling election for " + collection + " " + shardId); + } + SolrException.log(log, "Error checking for the number of election participants", e); + } + + if (found >= slices.getReplicasMap().size()) { + log.debug("All replicas are ready to participate in election."); + return true; + } + } else { + log.warn("Shard not found: {} for collection {}", shardId, collection); + return false; + } + return false; + } + + private void rejoinLeaderElection(SolrCore core) + throws InterruptedException, KeeperException, IOException { + // remove our ephemeral and re join the election + if (cc.isShutDown()) { + log.debug("Not rejoining election because CoreContainer is closed"); + return; + } + + log.info("There may be a better leader candidate than us - going back into recovery"); + + cancelElection(); + + core.getUpdateHandler().getSolrCoreState().doRecovery(cc, core.getCoreDescriptor()); + + leaderElector.joinElection(this, true); + } + +} diff --git a/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContextBase.java b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContextBase.java new file mode 100644 index 000000000000..a9afc8df34e9 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContextBase.java @@ -0,0 +1,194 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.cloud; + +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.List; +import java.util.ArrayList; + +import org.apache.hadoop.fs.Path; +import org.apache.solr.cloud.overseer.OverseerAction; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.SolrException.ErrorCode; +import org.apache.solr.common.cloud.Replica; +import org.apache.solr.common.cloud.SolrZkClient; +import org.apache.solr.common.cloud.ZkCmdExecutor; +import org.apache.solr.common.cloud.ZkNodeProps; +import org.apache.solr.common.cloud.ZkStateReader; +import org.apache.solr.common.util.RetryUtil; +import org.apache.solr.common.util.Utils; +import org.apache.zookeeper.CreateMode; +import org.apache.zookeeper.KeeperException; +import org.apache.zookeeper.KeeperException.NoNodeException; +import org.apache.zookeeper.KeeperException.NodeExistsException; +import org.apache.zookeeper.Op; +import org.apache.zookeeper.OpResult; +import org.apache.zookeeper.OpResult.SetDataResult; +import org.apache.zookeeper.ZooDefs; +import org.apache.zookeeper.data.Stat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +class ShardLeaderElectionContextBase extends ElectionContext { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + protected final SolrZkClient zkClient; + protected String shardId; + protected String collection; + protected LeaderElector leaderElector; + protected ZkStateReader zkStateReader; + protected ZkController zkController; + private Integer leaderZkNodeParentVersion; + + // Prevents a race between cancelling and becoming leader. + private final Object lock = new Object(); + + public ShardLeaderElectionContextBase(LeaderElector leaderElector, + final String shardId, final String collection, final String coreNodeName, + ZkNodeProps props, ZkController zkController) { + super(coreNodeName, ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection + + "/leader_elect/" + shardId, ZkStateReader.getShardLeadersPath( + collection, shardId), props, zkController.getZkClient()); + this.leaderElector = leaderElector; + this.zkStateReader = zkController.getZkStateReader(); + this.zkClient = zkStateReader.getZkClient(); + this.zkController = zkController; + this.shardId = shardId; + this.collection = collection; + + String parent = new Path(leaderPath).getParent().toString(); + ZkCmdExecutor zcmd = new ZkCmdExecutor(30000); + // only if /collections/{collection} exists already do we succeed in creating this path + log.info("make sure parent is created {}", parent); + try { + zcmd.ensureExists(parent, (byte[]) null, CreateMode.PERSISTENT, zkClient, 2); + } catch (KeeperException e) { + throw new RuntimeException(e); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException(e); + } + } + + @Override + public void cancelElection() throws InterruptedException, KeeperException { + super.cancelElection(); + synchronized (lock) { + if (leaderZkNodeParentVersion != null) { + // no problem + // no problem + try { + // We need to be careful and make sure we *only* delete our own leader registration node. + // We do this by using a multi and ensuring the parent znode of the leader registration node + // matches the version we expect - there is a setData call that increments the parent's znode + // version whenever a leader registers. + log.debug("Removing leader registration node on cancel: {} {}", leaderPath, leaderZkNodeParentVersion); + List ops = new ArrayList<>(2); + ops.add(Op.check(new Path(leaderPath).getParent().toString(), leaderZkNodeParentVersion)); + ops.add(Op.delete(leaderPath, -1)); + zkClient.multi(ops, true); + } catch (InterruptedException e) { + throw e; + } catch (IllegalArgumentException e) { + SolrException.log(log, e); + } + leaderZkNodeParentVersion = null; + } else { + log.info("No version found for ephemeral leader parent node, won't remove previous leader registration."); + } + } + } + + @Override + void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs) + throws KeeperException, InterruptedException, IOException { + // register as leader - if an ephemeral is already there, wait to see if it goes away + + String parent = new Path(leaderPath).getParent().toString(); + try { + RetryUtil.retryOnThrowable(NodeExistsException.class, 60000, 5000, () -> { + synchronized (lock) { + log.info("Creating leader registration node {} after winning as {}", leaderPath, leaderSeqPath); + List ops = new ArrayList<>(2); + + // We use a multi operation to get the parent nodes version, which will + // be used to make sure we only remove our own leader registration node. + // The setData call used to get the parent version is also the trigger to + // increment the version. We also do a sanity check that our leaderSeqPath exists. + + ops.add(Op.check(leaderSeqPath, -1)); + ops.add(Op.create(leaderPath, Utils.toJSON(leaderProps), zkClient.getZkACLProvider().getACLsToAdd(leaderPath), CreateMode.EPHEMERAL)); + ops.add(Op.setData(parent, null, -1)); + List results; + + results = zkClient.multi(ops, true); + for (OpResult result : results) { + if (result.getType() == ZooDefs.OpCode.setData) { + SetDataResult dresult = (SetDataResult) result; + Stat stat = dresult.getStat(); + leaderZkNodeParentVersion = stat.getVersion(); + return; + } + } + assert leaderZkNodeParentVersion != null; + } + }); + } catch (NoNodeException e) { + log.info("Will not register as leader because it seems the election is no longer taking place."); + return; + } catch (Throwable t) { + if (t instanceof OutOfMemoryError) { + throw (OutOfMemoryError) t; + } + throw new SolrException(ErrorCode.SERVER_ERROR, "Could not register as the leader because creating the ephemeral registration node in ZooKeeper failed", t); + } + + assert shardId != null; + boolean isAlreadyLeader = false; + if (zkStateReader.getClusterState() != null && + zkStateReader.getClusterState().getCollection(collection).getSlice(shardId).getReplicas().size() < 2) { + Replica leader = zkStateReader.getLeader(collection, shardId); + if (leader != null + && leader.getBaseUrl().equals(leaderProps.get(ZkStateReader.BASE_URL_PROP)) + && leader.getCoreName().equals(leaderProps.get(ZkStateReader.CORE_NAME_PROP))) { + isAlreadyLeader = true; + } + } + if (!isAlreadyLeader) { + ZkNodeProps m = ZkNodeProps.fromKeyVals(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(), + ZkStateReader.SHARD_ID_PROP, shardId, + ZkStateReader.COLLECTION_PROP, collection, + ZkStateReader.BASE_URL_PROP, leaderProps.get(ZkStateReader.BASE_URL_PROP), + ZkStateReader.CORE_NAME_PROP, leaderProps.get(ZkStateReader.CORE_NAME_PROP), + ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString()); + assert zkController != null; + assert zkController.getOverseer() != null; + zkController.getOverseer().offerStateUpdate(Utils.toJSON(m)); + } + } + + public LeaderElector getLeaderElector() { + return leaderElector; + } + + Integer getLeaderZkNodeParentVersion() { + synchronized (lock) { + return leaderZkNodeParentVersion; + } + } +} \ No newline at end of file diff --git a/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java b/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java index 4d67d2bcd2b9..5a1b8dac6f2b 100644 --- a/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java +++ b/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java @@ -154,7 +154,7 @@ private PeerSync.PeerSyncResult syncReplicas(ZkController zkController, SolrCore } private PeerSync.PeerSyncResult syncWithReplicas(ZkController zkController, SolrCore core, - ZkNodeProps props, String collection, String shardId, boolean peerSyncOnlyWithActive) { + ZkNodeProps props, String collection, String shardId, boolean peerSyncOnlyWithActive) throws Exception { List nodes = zkController.getZkStateReader() .getReplicaProps(collection, shardId,core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName()); @@ -179,8 +179,9 @@ private PeerSync.PeerSyncResult syncWithReplicas(ZkController zkController, Solr // Fingerprinting here is off because the we currently rely on having at least one of the nodes return "true", and if replicas are out-of-sync // we still need to pick one as leader. A followup sync from the replica to the new leader (with fingerprinting on) should then fail and // initiate recovery-by-replication. - PeerSync peerSync = new PeerSync(core, syncWith, core.getUpdateHandler().getUpdateLog().getNumRecordsToKeep(), true, peerSyncOnlyWithActive, false); - return peerSync.sync(); + try (PeerSync peerSync = new PeerSync(core, syncWith, core.getUpdateHandler().getUpdateLog().getNumRecordsToKeep(), true, peerSyncOnlyWithActive, false)) { + return peerSync.sync(); + } } private void syncToMe(ZkController zkController, String collection, diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkCLI.java b/solr/core/src/java/org/apache/solr/cloud/ZkCLI.java index 5acd63bde6cb..ce2f613dfc4f 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkCLI.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkCLI.java @@ -34,7 +34,6 @@ import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; @@ -108,14 +107,13 @@ public static void main(String[] args) throws InterruptedException, CommandLineParser parser = new PosixParser(); Options options = new Options(); - - options.addOption(OptionBuilder + options.addOption(Option.builder(CMD) .hasArg(true) - .withDescription( + .desc( "cmd to run: " + BOOTSTRAP + ", " + UPCONFIG + ", " + DOWNCONFIG + ", " + LINKCONFIG + ", " + MAKEPATH + ", " + PUT + ", " + PUT_FILE + "," + GET + "," + GET_FILE + ", " + LIST + ", " + CLEAR - + ", " + UPDATEACLS + ", " + LS).create(CMD)); + + ", " + UPDATEACLS + ", " + LS).build()); Option zkHostOption = new Option("z", ZKHOST, true, "ZooKeeper host address"); @@ -260,6 +258,7 @@ public static void main(String[] args) throws InterruptedException, zkClient.printLayoutToStream(stdout); } else if (line.getOptionValue(CMD).equals(LS)) { + @SuppressWarnings({"rawtypes"}) List argList = line.getArgList(); if (argList.size() != 1) { stdout.println("-" + LS + " requires one arg - the path to list"); @@ -272,6 +271,7 @@ public static void main(String[] args) throws InterruptedException, stdout.println(sb.toString()); } else if (line.getOptionValue(CMD).equalsIgnoreCase(CLEAR)) { + @SuppressWarnings({"rawtypes"}) List arglist = line.getArgList(); if (arglist.size() != 1) { stdout.println("-" + CLEAR + " requires one arg - the path to clear"); @@ -279,6 +279,7 @@ public static void main(String[] args) throws InterruptedException, } zkClient.clean(arglist.get(0).toString()); } else if (line.getOptionValue(CMD).equalsIgnoreCase(MAKEPATH)) { + @SuppressWarnings({"rawtypes"}) List arglist = line.getArgList(); if (arglist.size() != 1) { stdout.println("-" + MAKEPATH + " requires one arg - the path to make"); @@ -286,6 +287,7 @@ public static void main(String[] args) throws InterruptedException, } zkClient.makePath(arglist.get(0).toString(), true); } else if (line.getOptionValue(CMD).equalsIgnoreCase(PUT)) { + @SuppressWarnings({"rawtypes"}) List arglist = line.getArgList(); if (arglist.size() != 2) { stdout.println("-" + PUT + " requires two args - the path to create and the data string"); @@ -298,6 +300,7 @@ public static void main(String[] args) throws InterruptedException, zkClient.create(path, arglist.get(1).toString().getBytes(StandardCharsets.UTF_8), CreateMode.PERSISTENT, true); } } else if (line.getOptionValue(CMD).equalsIgnoreCase(PUT_FILE)) { + @SuppressWarnings({"rawtypes"}) List arglist = line.getArgList(); if (arglist.size() != 2) { stdout.println("-" + PUT_FILE + " requires two args - the path to create in ZK and the path to the local file"); @@ -317,6 +320,7 @@ public static void main(String[] args) throws InterruptedException, } } else if (line.getOptionValue(CMD).equalsIgnoreCase(GET)) { + @SuppressWarnings({"rawtypes"}) List arglist = line.getArgList(); if (arglist.size() != 1) { stdout.println("-" + GET + " requires one arg - the path to get"); @@ -325,6 +329,7 @@ public static void main(String[] args) throws InterruptedException, byte [] data = zkClient.getData(arglist.get(0).toString(), null, null, true); stdout.println(new String(data, StandardCharsets.UTF_8)); } else if (line.getOptionValue(CMD).equalsIgnoreCase(GET_FILE)) { + @SuppressWarnings({"rawtypes"}) List arglist = line.getArgList(); if (arglist.size() != 2) { stdout.println("-" + GET_FILE + "requires two args - the path to get and the file to save it to"); @@ -333,6 +338,7 @@ public static void main(String[] args) throws InterruptedException, byte [] data = zkClient.getData(arglist.get(0).toString(), null, null, true); FileUtils.writeByteArrayToFile(new File(arglist.get(1).toString()), data); } else if (line.getOptionValue(CMD).equals(UPDATEACLS)) { + @SuppressWarnings({"rawtypes"}) List arglist = line.getArgList(); if (arglist.size() != 1) { stdout.println("-" + UPDATEACLS + " requires one arg - the path to update"); diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java index 9b0d3daa9806..75bef7d82c63 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java @@ -29,6 +29,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; @@ -297,6 +298,7 @@ public Object call() throws Exception { * @param cloudConfig configuration for this controller. TODO: possibly redundant with CoreContainer * @param descriptorsSupplier a supplier of the current core descriptors. used to know which cores to re-register on reconnect */ + @SuppressWarnings({"unchecked"}) public ZkController(final CoreContainer cc, String zkServerAddress, int zkClientConnectTimeout, CloudConfig cloudConfig, final Supplier> descriptorsSupplier) throws InterruptedException, TimeoutException, IOException { @@ -470,6 +472,8 @@ public boolean isClosed() { return cc.isShutDown(); }}); + // Refuse to start if ZK has a non empty /clusterstate.json + checkNoOldClusterstate(zkClient); this.overseerRunningMap = Overseer.getRunningMap(zkClient); this.overseerCompletedMap = Overseer.getCompletedMap(zkClient); @@ -491,6 +495,41 @@ public boolean isClosed() { assert ObjectReleaseTracker.track(this); } + /** + *

Verifies if /clusterstate.json exists in Zookeepeer, and if it does and is not empty, refuses to start and outputs + * a helpful message regarding collection migration.

+ * + *

If /clusterstate.json exists and is empty, it is removed.

+ */ + private void checkNoOldClusterstate(final SolrZkClient zkClient) throws InterruptedException { + try { + if (!zkClient.exists(ZkStateReader.UNSUPPORTED_CLUSTER_STATE, true)) { + return; + } + + final byte[] data = zkClient.getData(ZkStateReader.UNSUPPORTED_CLUSTER_STATE, null, null, true); + + if (Arrays.equals("{}".getBytes(StandardCharsets.UTF_8), data)) { + // Empty json. This log will only occur once. + log.warn("{} no longer supported starting with Solr 9. Found empty file on Zookeeper, deleting it.", ZkStateReader.UNSUPPORTED_CLUSTER_STATE); + zkClient.delete(ZkStateReader.UNSUPPORTED_CLUSTER_STATE, -1, true); + } else { + // /clusterstate.json not empty: refuse to start but do not automatically delete. A bit of a pain but user shouldn't + // have older collections at this stage anyway. + String message = ZkStateReader.UNSUPPORTED_CLUSTER_STATE + " no longer supported starting with Solr 9. " + + "It is present and not empty. Cannot start Solr. Please first migrate collections to stateFormat=2 using an " + + "older version of Solr or if you don't care about the data then delete the file from " + + "Zookeeper using a command line tool, for example: bin/solr zk rm /clusterstate.json -z host:port"; + log.error(message); + throw new SolrException(SolrException.ErrorCode.INVALID_STATE, message); + } + } catch (KeeperException e) { + // Convert checked exception to one acceptable by the caller (see also init() further down) + log.error("", e); + throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e); + } + } + public int getLeaderVoteWait() { return leaderVoteWait; } @@ -755,7 +794,10 @@ public SolrCloudManager getSolrCloudManager() { cloudSolrClient = new CloudSolrClient.Builder(new ZkClientClusterStateProvider(zkStateReader)).withSocketTimeout(30000).withConnectionTimeout(15000) .withHttpClient(cc.getUpdateShardHandler().getDefaultHttpClient()) .withConnectionTimeout(15000).withSocketTimeout(30000).build(); - cloudManager = new SolrClientCloudManager(new ZkDistributedQueueFactory(zkClient), cloudSolrClient); + cloudManager = new SolrClientCloudManager( + new ZkDistributedQueueFactory(zkClient), + cloudSolrClient, + cc.getObjectCache()); cloudManager.getClusterStateProvider().connect(); } return cloudManager; @@ -860,7 +902,6 @@ public static void createClusterZkNodes(SolrZkClient zkClient) cmdExecutor.ensureExists(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH, zkClient); cmdExecutor.ensureExists(ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH, zkClient); byte[] emptyJson = "{}".getBytes(StandardCharsets.UTF_8); - cmdExecutor.ensureExists(ZkStateReader.CLUSTER_STATE, emptyJson, CreateMode.PERSISTENT, zkClient); cmdExecutor.ensureExists(ZkStateReader.SOLR_SECURITY_CONF_PATH, emptyJson, CreateMode.PERSISTENT, zkClient); cmdExecutor.ensureExists(ZkStateReader.SOLR_AUTOSCALING_CONF_PATH, emptyJson, CreateMode.PERSISTENT, zkClient); bootstrapDefaultConfigSet(zkClient); @@ -1013,7 +1054,7 @@ private void registerLiveNodesListener() { log.warn("Unable to read autoscaling.json", e1); } if (createNodes) { - byte[] json = Utils.toJSON(Collections.singletonMap("timestamp", cloudManager.getTimeSource().getEpochTimeNs())); + byte[] json = Utils.toJSON(Collections.singletonMap("timestamp", getSolrCloudManager().getTimeSource().getEpochTimeNs())); for (String n : oldNodes) { String path = ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + n; @@ -1187,8 +1228,8 @@ public String register(String coreName, final CoreDescriptor desc, boolean recov // check replica's existence in clusterstate first try { - zkStateReader.waitForState(collection, Overseer.isLegacy(zkStateReader) ? 60000 : 100, - TimeUnit.MILLISECONDS, (collectionState) -> getReplicaOrNull(collectionState, shardId, coreZkNodeName) != null); + zkStateReader.waitForState(collection, 100, TimeUnit.MILLISECONDS, + (collectionState) -> getReplicaOrNull(collectionState, shardId, coreZkNodeName) != null); } catch (TimeoutException e) { throw new SolrException(ErrorCode.SERVER_ERROR, "Error registering SolrCore, timeout waiting for replica present in clusterstate"); } @@ -1565,9 +1606,7 @@ public void publish(final CoreDescriptor cd, final Replica.State state, boolean props.put(ZkStateReader.SHARD_ID_PROP, cd.getCloudDescriptor().getShardId()); props.put(ZkStateReader.COLLECTION_PROP, collection); props.put(ZkStateReader.REPLICA_TYPE, cd.getCloudDescriptor().getReplicaType().toString()); - if (!Overseer.isLegacy(zkStateReader)) { - props.put(ZkStateReader.FORCE_SET_STATE_PROP, "false"); - } + props.put(ZkStateReader.FORCE_SET_STATE_PROP, "false"); if (numShards != null) { props.put(ZkStateReader.NUM_SHARDS_PROP, numShards.toString()); } @@ -1784,10 +1823,12 @@ public void preRegister(CoreDescriptor cd, boolean publishState) { } String collectionName = cd.getCloudDescriptor().getCollectionName(); DocCollection collection = zkStateReader.getClusterState().getCollectionOrNull(collectionName); - log.debug(collection == null ? - "Collection {} not visible yet, but flagging it so a watch is registered when it becomes visible" : - "Registering watch for collection {}", - collectionName); + if (log.isDebugEnabled()) { + log.debug(collection == null ? + "Collection {} not visible yet, but flagging it so a watch is registered when it becomes visible" : + "Registering watch for collection {}", + collectionName); + } } catch (KeeperException e) { log.error("", e); throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e); @@ -1809,69 +1850,54 @@ public void preRegister(CoreDescriptor cd, boolean publishState) { /** * On startup, the node already published all of its replicas as DOWN, - * so in case of legacyCloud=false ( the replica must already present on Zk ) * we can skip publish the replica as down * @return Should publish the replica as down on startup */ private boolean isPublishAsDownOnStartup(CloudDescriptor cloudDesc) { - if (!Overseer.isLegacy(zkStateReader)) { Replica replica = zkStateReader.getClusterState().getCollection(cloudDesc.getCollectionName()) .getSlice(cloudDesc.getShardId()) .getReplica(cloudDesc.getCoreNodeName()); - if (replica.getNodeName().equals(getNodeName())) { - return false; - } - } - return true; + return !replica.getNodeName().equals(getNodeName()); } private void checkStateInZk(CoreDescriptor cd) throws InterruptedException, NotInClusterStateException { - if (!Overseer.isLegacy(zkStateReader)) { - CloudDescriptor cloudDesc = cd.getCloudDescriptor(); - String nodeName = cloudDesc.getCoreNodeName(); - if (nodeName == null) { - if (cc.repairCoreProperty(cd, CoreDescriptor.CORE_NODE_NAME) == false) { - throw new SolrException(ErrorCode.SERVER_ERROR, "No coreNodeName for " + cd); - } - nodeName = cloudDesc.getCoreNodeName(); - // verify that the repair worked. - if (nodeName == null) { - throw new SolrException(ErrorCode.SERVER_ERROR, "No coreNodeName for " + cd); - } - } - final String coreNodeName = nodeName; + CloudDescriptor cloudDesc = cd.getCloudDescriptor(); + String nodeName = cloudDesc.getCoreNodeName(); + if (nodeName == null) { + throw new SolrException(ErrorCode.SERVER_ERROR, "No coreNodeName for " + cd); + } + final String coreNodeName = nodeName; - if (cloudDesc.getShardId() == null) { - throw new SolrException(ErrorCode.SERVER_ERROR, "No shard id for " + cd); - } + if (cloudDesc.getShardId() == null) { + throw new SolrException(ErrorCode.SERVER_ERROR, "No shard id for " + cd); + } - AtomicReference errorMessage = new AtomicReference<>(); - AtomicReference collectionState = new AtomicReference<>(); - try { - zkStateReader.waitForState(cd.getCollectionName(), 10, TimeUnit.SECONDS, (c) -> { - collectionState.set(c); - if (c == null) - return false; - Slice slice = c.getSlice(cloudDesc.getShardId()); - if (slice == null) { - errorMessage.set("Invalid shard: " + cloudDesc.getShardId()); - return false; - } - Replica replica = slice.getReplica(coreNodeName); - if (replica == null) { - errorMessage.set("coreNodeName " + coreNodeName + " does not exist in shard " + cloudDesc.getShardId() + - ", ignore the exception if the replica was deleted"); - return false; - } - return true; - }); - } catch (TimeoutException e) { - String error = errorMessage.get(); - if (error == null) - error = "coreNodeName " + coreNodeName + " does not exist in shard " + cloudDesc.getShardId() + - ", ignore the exception if the replica was deleted"; - throw new NotInClusterStateException(ErrorCode.SERVER_ERROR, error); - } + AtomicReference errorMessage = new AtomicReference<>(); + AtomicReference collectionState = new AtomicReference<>(); + try { + zkStateReader.waitForState(cd.getCollectionName(), 10, TimeUnit.SECONDS, (c) -> { + collectionState.set(c); + if (c == null) + return false; + Slice slice = c.getSlice(cloudDesc.getShardId()); + if (slice == null) { + errorMessage.set("Invalid shard: " + cloudDesc.getShardId()); + return false; + } + Replica replica = slice.getReplica(coreNodeName); + if (replica == null) { + errorMessage.set("coreNodeName " + coreNodeName + " does not exist in shard " + cloudDesc.getShardId() + + ", ignore the exception if the replica was deleted"); + return false; + } + return true; + }); + } catch (TimeoutException e) { + String error = errorMessage.get(); + if (error == null) + error = "coreNodeName " + coreNodeName + " does not exist in shard " + cloudDesc.getShardId() + + ", ignore the exception if the replica was deleted"; + throw new NotInClusterStateException(ErrorCode.SERVER_ERROR, error); } } @@ -2241,8 +2267,10 @@ public void checkOverseerDesignate() { try { byte[] data = zkClient.getData(ZkStateReader.ROLES, null, new Stat(), true); if (data == null) return; + @SuppressWarnings({"rawtypes"}) Map roles = (Map) Utils.fromJSON(data); if (roles == null) return; + @SuppressWarnings({"rawtypes"}) List nodeList = (List) roles.get("overseer"); if (nodeList == null) return; if (nodeList.contains(getNodeName())) { @@ -2299,13 +2327,14 @@ public void removeOnReconnectListener(OnReconnect listener) { if (wasRemoved) { log.debug("Removed OnReconnect listener {}", listener); } else { - log.warn("Was asked to remove OnReconnect listener {}{}" - , listener - , ", but remove operation did not find it in the list of registered listeners."); + log.warn("Was asked to remove OnReconnect listener {}, but remove operation " + + "did not find it in the list of registered listeners." + , listener); } } } + @SuppressWarnings({"unchecked"}) Set getCurrentOnReconnectListeners() { HashSet clonedListeners; synchronized (reconnectListeners) { @@ -2667,6 +2696,7 @@ private static void ensureRegisteredSearcher(SolrCore core) throws InterruptedEx } registeredSearcher.decref(); } else { + @SuppressWarnings({"rawtypes"}) Future[] waitSearcher = new Future[1]; if (log.isInfoEnabled()) { log.info("No registered searcher found for core: {}, waiting until a searcher is registered before publishing as active", core.getName()); diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java b/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java index 465888ff765f..53d799b9f577 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java @@ -51,9 +51,16 @@ import org.slf4j.LoggerFactory; /** - * A ZK-based distributed queue. Optimized for single-consumer, + *

A ZK-based distributed queue. Optimized for single-consumer, * multiple-producer: if there are multiple consumers on the same ZK queue, - * the results should be correct but inefficient + * the results should be correct but inefficient.

+ * + *

This implementation (with help from subclass {@link OverseerTaskQueue}) is used for the + * /overseer/collection-queue-work queue used for Collection and Config Set API calls to the Overseer.

+ * + *

Implementation note: In order to enqueue a message into this queue, a {@link CreateMode#EPHEMERAL_SEQUENTIAL} response node is created + * and watched at /overseer/collection-queue-work/qnr-monotonically_increasng_id, then a corresponding + * {@link CreateMode#PERSISTENT} request node reusing the same id is created at /overseer/collection-queue-work/qn-response_id.

*/ public class ZkDistributedQueue implements DistributedQueue { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java b/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java index bd446c4b055e..cc3320528961 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java @@ -349,6 +349,7 @@ private void ensureTermNodeExist() { /** * Fetch latest terms from ZK */ + @SuppressWarnings({"unchecked"}) public void refreshTerms() { ShardTerms newTerms; try { diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java index e1cfe125232d..95fffa47f379 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java @@ -89,11 +89,12 @@ public AddReplicaCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { addReplica(state, message, results, null); } - List addReplica(ClusterState clusterState, ZkNodeProps message, NamedList results, Runnable onComplete) + @SuppressWarnings({"unchecked"}) + List addReplica(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results, Runnable onComplete) throws IOException, InterruptedException, KeeperException { if (log.isDebugEnabled()) { log.debug("addReplica() : {}", Utils.toJSONString(message)); @@ -144,7 +145,7 @@ List addReplica(ClusterState clusterState, ZkNodeProps message, Nam totalReplicas += entry.getValue(); } if (totalReplicas > 1) { - if (message.getStr(CoreAdminParams.NAME) != null) { + if (node != null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot create " + totalReplicas + " replicas if 'name' parameter is specified"); } if (message.getStr(CoreAdminParams.CORE_NODE_NAME) != null) { @@ -214,7 +215,7 @@ List addReplica(ClusterState clusterState, ZkNodeProps message, Nam .collect(Collectors.toList()); } - private ModifiableSolrParams getReplicaParams(ClusterState clusterState, ZkNodeProps message, NamedList results, String collectionName, DocCollection coll, boolean skipCreateReplicaInClusterState, String asyncId, ShardHandler shardHandler, CreateReplica createReplica) throws IOException, InterruptedException, KeeperException { + private ModifiableSolrParams getReplicaParams(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results, String collectionName, DocCollection coll, boolean skipCreateReplicaInClusterState, String asyncId, ShardHandler shardHandler, CreateReplica createReplica) throws IOException, InterruptedException, KeeperException { if (coll.getStr(WITH_COLLECTION) != null) { String withCollectionName = coll.getStr(WITH_COLLECTION); DocCollection withCollection = clusterState.getCollection(withCollectionName); @@ -241,29 +242,27 @@ private ModifiableSolrParams getReplicaParams(ClusterState clusterState, ZkNodeP ModifiableSolrParams params = new ModifiableSolrParams(); ZkStateReader zkStateReader = ocmh.zkStateReader; - if (!Overseer.isLegacy(zkStateReader)) { - if (!skipCreateReplicaInClusterState) { - ZkNodeProps props = new ZkNodeProps( - Overseer.QUEUE_OPERATION, ADDREPLICA.toLower(), - ZkStateReader.COLLECTION_PROP, collectionName, - ZkStateReader.SHARD_ID_PROP, createReplica.sliceName, - ZkStateReader.CORE_NAME_PROP, createReplica.coreName, - ZkStateReader.STATE_PROP, Replica.State.DOWN.toString(), - ZkStateReader.BASE_URL_PROP, zkStateReader.getBaseUrlForNodeName(createReplica.node), - ZkStateReader.NODE_NAME_PROP, createReplica.node, - ZkStateReader.REPLICA_TYPE, createReplica.replicaType.name()); - if (createReplica.coreNodeName != null) { - props = props.plus(ZkStateReader.CORE_NODE_NAME_PROP, createReplica.coreNodeName); - } - try { - ocmh.overseer.offerStateUpdate(Utils.toJSON(props)); - } catch (Exception e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exception updating Overseer state queue", e); - } + if (!skipCreateReplicaInClusterState) { + ZkNodeProps props = new ZkNodeProps( + Overseer.QUEUE_OPERATION, ADDREPLICA.toLower(), + ZkStateReader.COLLECTION_PROP, collectionName, + ZkStateReader.SHARD_ID_PROP, createReplica.sliceName, + ZkStateReader.CORE_NAME_PROP, createReplica.coreName, + ZkStateReader.STATE_PROP, Replica.State.DOWN.toString(), + ZkStateReader.BASE_URL_PROP, zkStateReader.getBaseUrlForNodeName(createReplica.node), + ZkStateReader.NODE_NAME_PROP, createReplica.node, + ZkStateReader.REPLICA_TYPE, createReplica.replicaType.name()); + if (createReplica.coreNodeName != null) { + props = props.plus(ZkStateReader.CORE_NODE_NAME_PROP, createReplica.coreNodeName); + } + try { + ocmh.overseer.offerStateUpdate(Utils.toJSON(props)); + } catch (Exception e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exception updating Overseer state queue", e); } - params.set(CoreAdminParams.CORE_NODE_NAME, - ocmh.waitToSeeReplicasInState(collectionName, Collections.singletonList(createReplica.coreName)).get(createReplica.coreName).getName()); } + params.set(CoreAdminParams.CORE_NODE_NAME, + ocmh.waitToSeeReplicasInState(collectionName, Collections.singletonList(createReplica.coreName)).get(createReplica.coreName).getName()); String configName = zkStateReader.readConfigName(collectionName); String routeKey = message.getStr(ShardParams._ROUTE_); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/AliasCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/AliasCmd.java index 3afc805d53af..611bd2d9dc01 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/AliasCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/AliasCmd.java @@ -53,6 +53,7 @@ abstract class AliasCmd implements OverseerCollectionMessageHandler.Cmd { * Creates a collection (for use in a routed alias), waiting for it to be ready before returning. * If the collection already exists then this is not an error.

*/ + @SuppressWarnings({"rawtypes"}) static NamedList createCollectionAndWait(ClusterState clusterState, String aliasName, Map aliasMetadata, String createCollName, OverseerCollectionMessageHandler ocmh) throws Exception { // Map alias metadata starting with a prefix to a create-collection API request diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java index 8a3df781d7ca..b577340845e4 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java @@ -200,7 +200,7 @@ private static int defaultCounterValue(DocCollection collection, boolean newColl } return defaultValue; } - + private static int defaultCounterValue(DocCollection collection, boolean newCollection) { if (newCollection) return 0; int defaultValue = collection.getReplicas().size(); @@ -277,6 +277,7 @@ public static boolean usePolicyFramework(DocCollection collection, SolrCloudMana return usePolicyFramework(Optional.of(collection), cloudManager); } + @SuppressWarnings({"unchecked"}) private static boolean usePolicyFramework(Optional collection, SolrCloudManager cloudManager) throws IOException, InterruptedException { boolean useLegacyAssignment = true; Map clusterProperties = cloudManager.getClusterStateProvider().getClusterProperties(); @@ -323,13 +324,15 @@ public int weight() { // // Gets a list of candidate nodes to put the required replica(s) on. Throws errors if not enough replicas // could be created on live nodes given maxShardsPerNode, Replication factor (if from createShard) etc. + @SuppressWarnings({"unchecked"}) public static List getNodesForNewReplicas(ClusterState clusterState, String collectionName, String shard, int nrtReplicas, int tlogReplicas, int pullReplicas, Object createNodeSet, SolrCloudManager cloudManager) throws IOException, InterruptedException, AssignmentException { - log.debug("getNodesForNewReplicas() shard: {} , nrtReplicas : {} , tlogReplicas: {} , pullReplicas: {} , createNodeSet {}", shard, nrtReplicas, tlogReplicas, pullReplicas, createNodeSet); + log.debug("getNodesForNewReplicas() shard: {} , nrtReplicas : {} , tlogReplicas: {} , pullReplicas: {} , createNodeSet {}" + , shard, nrtReplicas, tlogReplicas, pullReplicas, createNodeSet); DocCollection coll = clusterState.getCollection(collectionName); - Integer maxShardsPerNode = coll.getMaxShardsPerNode() == -1 ? Integer.MAX_VALUE : coll.getMaxShardsPerNode(); - List createNodeList = null; + int maxShardsPerNode = coll.getMaxShardsPerNode() == -1 ? Integer.MAX_VALUE : coll.getMaxShardsPerNode(); + List createNodeList; if (createNodeSet instanceof List) { createNodeList = (List) createNodeSet; @@ -338,9 +341,13 @@ public static List getNodesForNewReplicas(ClusterState clusterS createNodeList = createNodeSet == null ? null : new ArrayList<>(new LinkedHashSet<>(StrUtils.splitSmart((String) createNodeSet, ",", true))); } - HashMap nodeNameVsShardCount = getNodeNameVsShardCount(collectionName, clusterState, createNodeList); + // produces clear message when down nodes are the root cause, without this the user just + // gets a log message of detail about the nodes that are up, and a message that policies could not + // be satisfied which then requires study to diagnose the issue. + checkLiveNodes(createNodeList,clusterState); if (createNodeList == null) { // We only care if we haven't been told to put new replicas on specific nodes. + HashMap nodeNameVsShardCount = getNodeNameVsShardCount(collectionName, clusterState, null); long availableSlots = 0; for (Map.Entry ent : nodeNameVsShardCount.entrySet()) { //ADDREPLICA can put more than maxShardsPerNode on an instance, so this test is necessary. @@ -408,24 +415,21 @@ public static List getPositionsUsingPolicy(String collName, Lis static HashMap getNodeNameVsShardCount(String collectionName, ClusterState clusterState, List createNodeList) { - Set nodes = clusterState.getLiveNodes(); - - List nodeList = new ArrayList<>(nodes.size()); - nodeList.addAll(nodes); - if (createNodeList != null) nodeList.retainAll(createNodeList); - HashMap nodeNameVsShardCount = new HashMap<>(); - for (String s : nodeList) { + List liveNodes = createNodeList == null || createNodeList.isEmpty() ? + new ArrayList<>(clusterState.getLiveNodes()) : + checkLiveNodes(createNodeList, clusterState); + + for (String s : liveNodes) { nodeNameVsShardCount.put(s, new ReplicaCount(s)); } + + // if we were given a list, just use that, don't worry about counts if (createNodeList != null) { // Overrides petty considerations about maxShardsPerNode - if (createNodeList.size() != nodeNameVsShardCount.size()) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "At least one of the node(s) specified " + createNodeList + " are not currently active in " - + nodeNameVsShardCount.keySet() + ", no action taken."); - } return nodeNameVsShardCount; } + + // if we get here we were not given a createNodeList, build a map with real counts. DocCollection coll = clusterState.getCollection(collectionName); int maxShardsPerNode = coll.getMaxShardsPerNode() == -1 ? Integer.MAX_VALUE : coll.getMaxShardsPerNode(); Map collections = clusterState.getCollectionsMap(); @@ -450,6 +454,22 @@ static HashMap getNodeNameVsShardCount(String collectionNa return nodeNameVsShardCount; } + // throw an exception if any node int the supplied list is not live. + // Empty or null list always succeeds and returns the input. + private static List checkLiveNodes(List createNodeList, ClusterState clusterState) { + Set liveNodes = clusterState.getLiveNodes(); + if (createNodeList != null) { + if (!liveNodes.containsAll(createNodeList)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "At least one of the node(s) specified " + createNodeList + " are not currently active in " + + createNodeList + ", no action taken."); + } + // the logic that was extracted to this method used to create a defensive copy but no code + // was modifying the copy, if this method is made protected or public we want to go back to that + } + return createNodeList; // unmodified, but return for inline use + } + /** * Thrown if there is an exception while assigning nodes for replicas */ @@ -547,37 +567,50 @@ public static class LegacyAssignStrategy implements AssignStrategy { @Override public List assign(SolrCloudManager solrCloudManager, AssignRequest assignRequest) throws Assign.AssignmentException, IOException, InterruptedException { ClusterState clusterState = solrCloudManager.getClusterStateProvider().getClusterState(); - List nodeList = assignRequest.nodes; + List nodeList = assignRequest.nodes; // can this be empty list? - HashMap nodeNameVsShardCount = Assign.getNodeNameVsShardCount(assignRequest.collectionName, clusterState, assignRequest.nodes); if (nodeList == null || nodeList.isEmpty()) { + HashMap nodeNameVsShardCount = + Assign.getNodeNameVsShardCount(assignRequest.collectionName, clusterState, nodeList); + // if nodelist was empty, this map will be empty too. (passing null above however gets a full map) ArrayList sortedNodeList = new ArrayList<>(nodeNameVsShardCount.values()); sortedNodeList.sort(Comparator.comparingInt(Assign.ReplicaCount::weight)); nodeList = sortedNodeList.stream().map(replicaCount -> replicaCount.nodeName).collect(Collectors.toList()); } + // otherwise we get a div/0 below + assert !nodeList.isEmpty(); + int i = 0; List result = new ArrayList<>(); - for (String aShard : assignRequest.shardNames) - for (Map.Entry e : ImmutableMap.of(Replica.Type.NRT, assignRequest.numNrtReplicas, - Replica.Type.TLOG, assignRequest.numTlogReplicas, - Replica.Type.PULL, assignRequest.numPullReplicas - ).entrySet()) { + for (String aShard : assignRequest.shardNames) { + for (Map.Entry e : countsPerReplicaType(assignRequest).entrySet()) { for (int j = 0; j < e.getValue(); j++) { result.add(new ReplicaPosition(aShard, j, e.getKey(), nodeList.get(i % nodeList.size()))); i++; } } + } return result; } + + // keeps this big ugly construction block out of otherwise legible code + private ImmutableMap countsPerReplicaType(AssignRequest assignRequest) { + return ImmutableMap.of( + Replica.Type.NRT, assignRequest.numNrtReplicas, + Replica.Type.TLOG, assignRequest.numTlogReplicas, + Replica.Type.PULL, assignRequest.numPullReplicas + ); + } } public static class RulesBasedAssignStrategy implements AssignStrategy { public List rules; + @SuppressWarnings({"rawtypes"}) public List snitches; public ClusterState clusterState; - public RulesBasedAssignStrategy(List rules, List snitches, ClusterState clusterState) { + public RulesBasedAssignStrategy(List rules, @SuppressWarnings({"rawtypes"})List snitches, ClusterState clusterState) { this.rules = rules; this.snitches = snitches; this.clusterState = clusterState; @@ -647,8 +680,10 @@ public AssignStrategyFactory(SolrCloudManager solrCloudManager) { } public AssignStrategy create(ClusterState clusterState, DocCollection collection) throws IOException, InterruptedException { + @SuppressWarnings({"unchecked", "rawtypes"}) List ruleMaps = (List) collection.get("rule"); String policyName = collection.getStr(POLICY); + @SuppressWarnings({"rawtypes"}) List snitches = (List) collection.get(SNITCH); Strategy strategy = null; diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/BackupCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/BackupCmd.java index 5be97163163e..68565f8a69cb 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/BackupCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/BackupCmd.java @@ -66,7 +66,7 @@ public BackupCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { String extCollectionName = message.getStr(COLLECTION_PROP); boolean followAliases = message.getBool(FOLLOW_ALIASES, false); String collectionName; @@ -113,8 +113,8 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr String configName = ocmh.zkStateReader.readConfigName(collectionName); backupMgr.downloadConfigDir(location, backupName, configName); - //Save the collection's state. Can be part of the monolithic clusterstate.json or a individual state.json - //Since we don't want to distinguish we extract the state and back it up as a separate json + //Save the collection's state (coming from the collection's state.json) + //We extract the state and back it up as a separate json DocCollection collectionState = ocmh.zkStateReader.getClusterState().getCollection(collectionName); backupMgr.writeCollectionState(location, backupName, collectionName, collectionState); @@ -165,7 +165,8 @@ private Replica selectReplicaWithSnapshot(CollectionSnapshotMetaData snapshotMet return r.get(); } - private void copyIndexFiles(URI backupPath, String collectionName, ZkNodeProps request, NamedList results) throws Exception { + @SuppressWarnings({"unchecked"}) + private void copyIndexFiles(URI backupPath, String collectionName, ZkNodeProps request, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { String backupName = request.getStr(NAME); String asyncId = request.getStr(ASYNC); String repoName = request.getStr(CoreAdminParams.BACKUP_REPOSITORY); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateAliasCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateAliasCmd.java index fdadf77aedb6..9a9b0bbaf0a9 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateAliasCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateAliasCmd.java @@ -52,7 +52,7 @@ public CreateAliasCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { final String aliasName = message.getStr(CommonParams.NAME); ZkStateReader zkStateReader = ocmh.zkStateReader; diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java index 182b66e6d4f1..6498c8bd0efc 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java @@ -103,7 +103,8 @@ public CreateCollectionCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception { + @SuppressWarnings({"unchecked"}) + public void call(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { if (ocmh.zkStateReader.aliasesManager != null) { // not a mock ZkStateReader ocmh.zkStateReader.aliasesManager.update(); } @@ -154,9 +155,8 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul final String async = message.getStr(ASYNC); ZkStateReader zkStateReader = ocmh.zkStateReader; - boolean isLegacyCloud = Overseer.isLegacy(zkStateReader); - OverseerCollectionMessageHandler.createConfNode(stateManager, configName, collectionName, isLegacyCloud); + OverseerCollectionMessageHandler.createConfNode(stateManager, configName, collectionName); Map collectionParams = new HashMap<>(); Map collectionProps = message.getProperties(); @@ -236,21 +236,19 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul } String baseUrl = zkStateReader.getBaseUrlForNodeName(nodeName); - //in the new mode, create the replica in clusterstate prior to creating the core. + // create the replica in the collection's state.json in ZK prior to creating the core. // Otherwise the core creation fails - if (!isLegacyCloud) { - ZkNodeProps props = new ZkNodeProps( - Overseer.QUEUE_OPERATION, ADDREPLICA.toString(), - ZkStateReader.COLLECTION_PROP, collectionName, - ZkStateReader.SHARD_ID_PROP, replicaPosition.shard, - ZkStateReader.CORE_NAME_PROP, coreName, - ZkStateReader.STATE_PROP, Replica.State.DOWN.toString(), - ZkStateReader.BASE_URL_PROP, baseUrl, - ZkStateReader.NODE_NAME_PROP, nodeName, - ZkStateReader.REPLICA_TYPE, replicaPosition.type.name(), - CommonAdminParams.WAIT_FOR_FINAL_STATE, Boolean.toString(waitForFinalState)); - ocmh.overseer.offerStateUpdate(Utils.toJSON(props)); - } + ZkNodeProps props = new ZkNodeProps( + Overseer.QUEUE_OPERATION, ADDREPLICA.toString(), + ZkStateReader.COLLECTION_PROP, collectionName, + ZkStateReader.SHARD_ID_PROP, replicaPosition.shard, + ZkStateReader.CORE_NAME_PROP, coreName, + ZkStateReader.STATE_PROP, Replica.State.DOWN.toString(), + ZkStateReader.BASE_URL_PROP, baseUrl, + ZkStateReader.NODE_NAME_PROP, nodeName, + ZkStateReader.REPLICA_TYPE, replicaPosition.type.name(), + CommonAdminParams.WAIT_FOR_FINAL_STATE, Boolean.toString(waitForFinalState)); + ocmh.overseer.offerStateUpdate(Utils.toJSON(props)); // Need to create new params for each request ModifiableSolrParams params = new ModifiableSolrParams(); @@ -279,24 +277,19 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul sreq.actualShards = sreq.shards; sreq.params = params; - if (isLegacyCloud) { - shardHandler.submit(sreq, sreq.shards[0], sreq.params); - } else { - coresToCreate.put(coreName, sreq); - } + coresToCreate.put(coreName, sreq); } - if(!isLegacyCloud) { - // wait for all replica entries to be created - Map replicas = ocmh.waitToSeeReplicasInState(collectionName, coresToCreate.keySet()); - for (Map.Entry e : coresToCreate.entrySet()) { - ShardRequest sreq = e.getValue(); - sreq.params.set(CoreAdminParams.CORE_NODE_NAME, replicas.get(e.getKey()).getName()); - shardHandler.submit(sreq, sreq.shards[0], sreq.params); - } + // wait for all replica entries to be created + Map replicas = ocmh.waitToSeeReplicasInState(collectionName, coresToCreate.keySet()); + for (Map.Entry e : coresToCreate.entrySet()) { + ShardRequest sreq = e.getValue(); + sreq.params.set(CoreAdminParams.CORE_NODE_NAME, replicas.get(e.getKey()).getName()); + shardHandler.submit(sreq, sreq.shards[0], sreq.params); } shardRequestTracker.processResponses(results, shardHandler, false, null, Collections.emptySet()); + @SuppressWarnings({"rawtypes"}) boolean failure = results.get("failure") != null && ((SimpleOrderedMap)results.get("failure")).size() > 0; if (failure) { // Let's cleanup as we hit an exception diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateShardCmd.java index 023bc3c2703b..989003aee7d2 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateShardCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateShardCmd.java @@ -51,6 +51,7 @@ public CreateShardCmd(OverseerCollectionMessageHandler ocmh) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception { String extCollectionName = message.getStr(COLLECTION_PROP); String sliceName = message.getStr(SHARD_ID_PROP); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateSnapshotCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateSnapshotCmd.java index 1085c8a2a53f..a1109522212f 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateSnapshotCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateSnapshotCmd.java @@ -65,7 +65,8 @@ public CreateSnapshotCmd (OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + @SuppressWarnings({"unchecked"}) + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { String extCollectionName = message.getStr(COLLECTION_PROP); boolean followAliases = message.getBool(FOLLOW_ALIASES, false); @@ -92,6 +93,7 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr SolrSnapshotManager.createCollectionLevelSnapshot(zkClient, collectionName, new CollectionSnapshotMetaData(commitName)); log.info("Created a ZK path to store snapshot information for collection={} with commitName={}", collectionName, commitName); + @SuppressWarnings({"rawtypes"}) NamedList shardRequestResults = new NamedList(); Map shardByCoreName = new HashMap<>(); ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler(ocmh.overseer.getCoreContainer().getUpdateShardHandler().getDefaultHttpClient()); @@ -127,10 +129,12 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr Set failedShards = new HashSet<>(); shardRequestTracker.processResponses(shardRequestResults, shardHandler, false, null); + @SuppressWarnings({"rawtypes"}) NamedList success = (NamedList) shardRequestResults.get("success"); List replicas = new ArrayList<>(); if (success != null) { for ( int i = 0 ; i < success.size() ; i++) { + @SuppressWarnings({"rawtypes"}) NamedList resp = (NamedList)success.getVal(i); // Check if this core is the leader for the shard. The idea here is that during the backup diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteAliasCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteAliasCmd.java index 6cc2eecc99a0..d071e91a85fc 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteAliasCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteAliasCmd.java @@ -33,7 +33,7 @@ public DeleteAliasCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { String aliasName = message.getStr(NAME); ZkStateReader zkStateReader = ocmh.zkStateReader; diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteCollectionCmd.java index 648f5ba39107..70d8d2bc3779 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteCollectionCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteCollectionCmd.java @@ -69,7 +69,7 @@ public DeleteCollectionCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { Object o = message.get(MaintainRoutedAliasCmd.INVOKED_BY_ROUTED_ALIAS); if (o != null) { ((Runnable)o).run(); // this will ensure the collection is removed from the alias before it disappears. @@ -132,6 +132,7 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr okayExceptions.add(NonExistentCoreException.class.getName()); ZkNodeProps internalMsg = message.plus(NAME, collection); + @SuppressWarnings({"unchecked"}) List failedReplicas = ocmh.collectionCmd(internalMsg, params, results, null, asyncId, okayExceptions); for (Replica failedReplica : failedReplicas) { boolean isSharedFS = failedReplica.getBool(ZkStateReader.SHARED_STORAGE_PROP, false) && failedReplica.get("dataDir") != null; diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteNodeCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteNodeCmd.java index 5f6e29ce6ea4..19865d3300c3 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteNodeCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteNodeCmd.java @@ -51,7 +51,8 @@ public DeleteNodeCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + @SuppressWarnings({"unchecked"}) + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { ocmh.checkRequired(message, "node"); String node = message.getStr("node"); List sourceReplicas = ReplaceNodeCmd.getReplicasOfNode(node, state); @@ -91,7 +92,8 @@ static List verifyReplicaAvailability(List sourceReplicas, return res; } - static void cleanupReplicas(NamedList results, + @SuppressWarnings({"unchecked"}) + static void cleanupReplicas(@SuppressWarnings({"rawtypes"})NamedList results, ClusterState clusterState, List sourceReplicas, OverseerCollectionMessageHandler ocmh, @@ -103,6 +105,7 @@ static void cleanupReplicas(NamedList results, String shard = sourceReplica.getStr(SHARD_ID_PROP); String type = sourceReplica.getStr(ZkStateReader.REPLICA_TYPE); log.info("Deleting replica type={} for collection={} shard={} on node={}", type, coll, shard, node); + @SuppressWarnings({"rawtypes"}) NamedList deleteResult = new NamedList(); try { if (async != null) sourceReplica = sourceReplica.plus(ASYNC, async); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java index fa7676a8f077..c263203dcc89 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java @@ -63,13 +63,13 @@ public DeleteReplicaCmd(OverseerCollectionMessageHandler ocmh) { @Override @SuppressWarnings("unchecked") - public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { deleteReplica(clusterState, message, results,null); } @SuppressWarnings("unchecked") - void deleteReplica(ClusterState clusterState, ZkNodeProps message, NamedList results, Runnable onComplete) + void deleteReplica(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results, Runnable onComplete) throws KeeperException, InterruptedException { if (log.isDebugEnabled()) { log.debug("deleteReplica() : {}", Utils.toJSONString(message)); @@ -112,9 +112,10 @@ void deleteReplica(ClusterState clusterState, ZkNodeProps message, NamedList res * Delete replicas based on count for a given collection. If a shard is passed, uses that * else deletes given num replicas across all shards for the given collection. */ + @SuppressWarnings({"unchecked"}) void deleteReplicaBasedOnCount(ClusterState clusterState, ZkNodeProps message, - NamedList results, + @SuppressWarnings({"rawtypes"})NamedList results, Runnable onComplete, boolean parallel) throws KeeperException, InterruptedException { @@ -211,7 +212,8 @@ private void validateReplicaAvailability(Slice slice, String shard, String colle } } - void deleteCore(Slice slice, String collectionName, String replicaName,ZkNodeProps message, String shard, NamedList results, Runnable onComplete, boolean parallel) throws KeeperException, InterruptedException { + @SuppressWarnings({"unchecked"}) + void deleteCore(Slice slice, String collectionName, String replicaName,ZkNodeProps message, String shard, @SuppressWarnings({"rawtypes"})NamedList results, Runnable onComplete, boolean parallel) throws KeeperException, InterruptedException { Replica replica = slice.getReplica(replicaName); if (replica == null) { diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteShardCmd.java index e6d1e6a59676..ff7edfa9ffa3 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteShardCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteShardCmd.java @@ -62,7 +62,8 @@ public DeleteShardCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception { + @SuppressWarnings({"unchecked"}) + public void call(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { String extCollectionName = message.getStr(ZkStateReader.COLLECTION_PROP); String sliceId = message.getStr(ZkStateReader.SHARD_ID_PROP); @@ -109,6 +110,7 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul if (log.isInfoEnabled()) { log.info("Deleting replica for collection={} shard={} on node={}", replica.getStr(COLLECTION_PROP), replica.getStr(SHARD_ID_PROP), replica.getStr(CoreAdminParams.NODE)); } + @SuppressWarnings({"rawtypes"}) NamedList deleteResult = new NamedList(); try { ((DeleteReplicaCmd)ocmh.commandMap.get(DELETEREPLICA)).deleteReplica(clusterState, replica, deleteResult, () -> { @@ -119,6 +121,7 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul " on node=%s", replica.getStr(COLLECTION_PROP), replica.getStr(SHARD_ID_PROP), replica.getStr(NODE_NAME_PROP))); } } + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap success = (SimpleOrderedMap) deleteResult.get("success"); if (success != null) { synchronized (results) { diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteSnapshotCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteSnapshotCmd.java index 128a0cb8cccd..2f62139807ac 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteSnapshotCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteSnapshotCmd.java @@ -63,7 +63,8 @@ public DeleteSnapshotCmd (OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + @SuppressWarnings({"unchecked"}) + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { String extCollectionName = message.getStr(COLLECTION_PROP); boolean followAliases = message.getBool(FOLLOW_ALIASES, false); String collectionName; @@ -74,6 +75,7 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr } String commitName = message.getStr(CoreAdminParams.COMMIT_NAME); String asyncId = message.getStr(ASYNC); + @SuppressWarnings({"rawtypes"}) NamedList shardRequestResults = new NamedList(); ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler(ocmh.overseer.getCoreContainer().getUpdateShardHandler().getDefaultHttpClient()); SolrZkClient zkClient = ocmh.zkStateReader.getZkClient(); @@ -126,10 +128,12 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr } shardRequestTracker.processResponses(shardRequestResults, shardHandler, false, null); + @SuppressWarnings({"rawtypes"}) NamedList success = (NamedList) shardRequestResults.get("success"); List replicas = new ArrayList<>(); if (success != null) { for ( int i = 0 ; i < success.size() ; i++) { + @SuppressWarnings({"rawtypes"}) NamedList resp = (NamedList)success.getVal(i); // Unfortunately async processing logic doesn't provide the "core" name automatically. String coreName = (String)resp.get("core"); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/MaintainRoutedAliasCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/MaintainRoutedAliasCmd.java index 2728b9aa6012..396b45bedd5d 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/MaintainRoutedAliasCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/MaintainRoutedAliasCmd.java @@ -99,7 +99,7 @@ private void removeCollectionFromAlias(String aliasName, ZkStateReader.AliasesMa } @Override - public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { //---- PARSE PRIMARY MESSAGE PARAMS // important that we use NAME for the alias as that is what the Overseer will get a lock on before calling us final String aliasName = message.getStr(NAME); @@ -162,7 +162,9 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul } } - public void addTargetCollection(ClusterState clusterState, NamedList results, String aliasName, ZkStateReader.AliasesManager aliasesManager, Map aliasMetadata, RoutedAlias.Action action) throws Exception { + @SuppressWarnings({"unchecked"}) + public void addTargetCollection(ClusterState clusterState, @SuppressWarnings({"rawtypes"})NamedList results, String aliasName, ZkStateReader.AliasesManager aliasesManager, Map aliasMetadata, RoutedAlias.Action action) throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList createResults = createCollectionAndWait(clusterState, aliasName, aliasMetadata, action.targetCollection, ocmh); if (createResults != null) { @@ -171,7 +173,7 @@ public void addTargetCollection(ClusterState clusterState, NamedList results, St addCollectionToAlias(aliasName, aliasesManager, action.targetCollection); } - public void deleteTargetCollection(ClusterState clusterState, NamedList results, String aliasName, ZkStateReader.AliasesManager aliasesManager, RoutedAlias.Action action) throws Exception { + public void deleteTargetCollection(ClusterState clusterState, @SuppressWarnings({"rawtypes"})NamedList results, String aliasName, ZkStateReader.AliasesManager aliasesManager, RoutedAlias.Action action) throws Exception { Map delProps = new HashMap<>(); delProps.put(INVOKED_BY_ROUTED_ALIAS, (Runnable) () -> removeCollectionFromAlias(aliasName, aliasesManager, action.targetCollection)); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java index 38e84e4d8484..c41cb7fcfa49 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java @@ -74,7 +74,7 @@ public MigrateCmd(OverseerCollectionMessageHandler ocmh) { @Override - public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { String extSourceCollectionName = message.getStr("collection"); String splitKey = message.getStr("split.key"); String extTargetCollectionName = message.getStr("target.collection"); @@ -136,10 +136,11 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul } } + @SuppressWarnings({"unchecked"}) private void migrateKey(ClusterState clusterState, DocCollection sourceCollection, Slice sourceSlice, DocCollection targetCollection, Slice targetSlice, String splitKey, int timeout, - NamedList results, String asyncId, ZkNodeProps message) throws Exception { + @SuppressWarnings({"rawtypes"})NamedList results, String asyncId, ZkNodeProps message) throws Exception { String tempSourceCollectionName = "split_" + sourceSlice.getName() + "_temp_" + targetSlice.getName(); ZkStateReader zkStateReader = ocmh.zkStateReader; if (clusterState.hasCollection(tempSourceCollectionName)) { diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java index bd4fb81f61ec..f567b2ef2ca7 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java @@ -66,11 +66,11 @@ public MoveReplicaCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { moveReplica(ocmh.zkStateReader.getClusterState(), message, results); } - private void moveReplica(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception { + private void moveReplica(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { if (log.isDebugEnabled()) { log.debug("moveReplica() : {}", Utils.toJSONString(message)); } @@ -166,7 +166,8 @@ private void moveReplica(ClusterState clusterState, ZkNodeProps message, NamedLi } } - private void moveHdfsReplica(ClusterState clusterState, NamedList results, String dataDir, String targetNode, String async, + @SuppressWarnings({"unchecked"}) + private void moveHdfsReplica(ClusterState clusterState, @SuppressWarnings({"rawtypes"})NamedList results, String dataDir, String targetNode, String async, DocCollection coll, Replica replica, Slice slice, int timeout, boolean waitForFinalState) throws Exception { String skipCreateReplicaInClusterState = "true"; if (clusterState.getLiveNodes().contains(replica.getNodeName())) { @@ -179,6 +180,7 @@ private void moveHdfsReplica(ClusterState clusterState, NamedList results, Strin removeReplicasProps.getProperties().put(CoreAdminParams.DELETE_DATA_DIR, false); removeReplicasProps.getProperties().put(CoreAdminParams.DELETE_INDEX, false); if (async != null) removeReplicasProps.getProperties().put(ASYNC, async); + @SuppressWarnings({"rawtypes"}) NamedList deleteResult = new NamedList(); try { ocmh.deleteReplica(clusterState, removeReplicasProps, deleteResult, null); @@ -224,6 +226,7 @@ private void moveHdfsReplica(ClusterState clusterState, NamedList results, Strin ZkStateReader.REPLICA_TYPE, replica.getType().name()); if(async!=null) addReplicasProps.getProperties().put(ASYNC, async); + @SuppressWarnings({"rawtypes"}) NamedList addResult = new NamedList(); try { ocmh.addReplica(ocmh.zkStateReader.getClusterState(), addReplicasProps, addResult, null); @@ -234,6 +237,7 @@ private void moveHdfsReplica(ClusterState clusterState, NamedList results, Strin results.add("failure", errorString); log.warn("Error adding replica {} - trying to roll back...", addReplicasProps, e); addReplicasProps = addReplicasProps.plus(CoreAdminParams.NODE, replica.getNodeName()); + @SuppressWarnings({"rawtypes"}) NamedList rollback = new NamedList(); ocmh.addReplica(ocmh.zkStateReader.getClusterState(), addReplicasProps, rollback, null); if (rollback.get("failure") != null) { @@ -250,6 +254,7 @@ private void moveHdfsReplica(ClusterState clusterState, NamedList results, Strin log.debug("--- trying to roll back..."); // try to roll back addReplicasProps = addReplicasProps.plus(CoreAdminParams.NODE, replica.getNodeName()); + @SuppressWarnings({"rawtypes"}) NamedList rollback = new NamedList(); try { ocmh.addReplica(ocmh.zkStateReader.getClusterState(), addReplicasProps, rollback, null); @@ -269,7 +274,8 @@ private void moveHdfsReplica(ClusterState clusterState, NamedList results, Strin } } - private void moveNormalReplica(ClusterState clusterState, NamedList results, String targetNode, String async, + @SuppressWarnings({"unchecked"}) + private void moveNormalReplica(ClusterState clusterState, @SuppressWarnings({"rawtypes"})NamedList results, String targetNode, String async, DocCollection coll, Replica replica, Slice slice, int timeout, boolean waitForFinalState) throws Exception { String newCoreName = Assign.buildSolrCoreName(ocmh.overseer.getSolrCloudManager().getDistribStateManager(), coll, slice.getName(), replica.getType()); ZkNodeProps addReplicasProps = new ZkNodeProps( @@ -280,6 +286,7 @@ private void moveNormalReplica(ClusterState clusterState, NamedList results, Str ZkStateReader.REPLICA_TYPE, replica.getType().name()); if (async != null) addReplicasProps.getProperties().put(ASYNC, async); + @SuppressWarnings({"rawtypes"}) NamedList addResult = new NamedList(); SolrCloseableLatch countDownLatch = new SolrCloseableLatch(1, ocmh); ActiveReplicaWatcher watcher = null; @@ -325,6 +332,7 @@ private void moveNormalReplica(ClusterState clusterState, NamedList results, Str SHARD_ID_PROP, slice.getName(), REPLICA_PROP, replica.getName()); if (async != null) removeReplicasProps.getProperties().put(ASYNC, async); + @SuppressWarnings({"rawtypes"}) NamedList deleteResult = new NamedList(); try { ocmh.deleteReplica(clusterState, removeReplicasProps, deleteResult, null); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java index 8ac8f443f1e7..07ce33d27be6 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java @@ -220,7 +220,6 @@ public OverseerCollectionMessageHandler(ZkStateReader zkStateReader, String myId .put(MOCK_COLL_TASK, this::mockOperation) .put(MOCK_SHARD_TASK, this::mockOperation) .put(MOCK_REPLICA_TASK, this::mockOperation) - .put(MIGRATESTATEFORMAT, this::migrateStateFormat) .put(CREATESHARD, new CreateShardCmd(this)) .put(MIGRATE, new MigrateCmd(this)) .put(CREATE, new CreateCollectionCmd(this)) @@ -255,6 +254,7 @@ public OverseerSolrResponse processMessage(ZkNodeProps message, String operation MDCLoggingContext.setReplica(message.getStr(REPLICA_PROP)); log.debug("OverseerCollectionMessageHandler.processMessage : {} , {}", operation, message); + @SuppressWarnings({"rawtypes"}) NamedList results = new NamedList(); try { CollectionAction action = getCollectionAction(operation); @@ -286,7 +286,8 @@ public OverseerSolrResponse processMessage(ZkNodeProps message, String operation } @SuppressForbidden(reason = "Needs currentTimeMillis for mock requests") - private void mockOperation(ClusterState state, ZkNodeProps message, NamedList results) throws InterruptedException { + @SuppressWarnings({"unchecked"}) + private void mockOperation(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws InterruptedException { //only for test purposes Thread.sleep(message.getInt("sleep", 1)); if (log.isInfoEnabled()) { @@ -303,7 +304,8 @@ private CollectionAction getCollectionAction(String operation) { return action; } - private void reloadCollection(ClusterState clusterState, ZkNodeProps message, NamedList results) { + @SuppressWarnings({"unchecked"}) + private void reloadCollection(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) { ModifiableSolrParams params = new ModifiableSolrParams(); params.set(CoreAdminParams.ACTION, CoreAdminAction.RELOAD.toString()); @@ -312,7 +314,7 @@ private void reloadCollection(ClusterState clusterState, ZkNodeProps message, Na } @SuppressWarnings("unchecked") - private void processRebalanceLeaders(ClusterState clusterState, ZkNodeProps message, NamedList results) + private void processRebalanceLeaders(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { checkRequired(message, COLLECTION_PROP, SHARD_ID_PROP, CORE_NAME_PROP, ELECTION_NODE_PROP, CORE_NODE_NAME_PROP, BASE_URL_PROP, REJOIN_AT_HEAD_PROP); @@ -341,7 +343,7 @@ private void processRebalanceLeaders(ClusterState clusterState, ZkNodeProps mess } @SuppressWarnings("unchecked") - private void processReplicaAddPropertyCommand(ClusterState clusterState, ZkNodeProps message, NamedList results) + private void processReplicaAddPropertyCommand(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { checkRequired(message, COLLECTION_PROP, SHARD_ID_PROP, REPLICA_PROP, PROPERTY_PROP, PROPERTY_VALUE_PROP); SolrZkClient zkClient = zkStateReader.getZkClient(); @@ -352,7 +354,7 @@ private void processReplicaAddPropertyCommand(ClusterState clusterState, ZkNodeP overseer.offerStateUpdate(Utils.toJSON(m)); } - private void processReplicaDeletePropertyCommand(ClusterState clusterState, ZkNodeProps message, NamedList results) + private void processReplicaDeletePropertyCommand(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { checkRequired(message, COLLECTION_PROP, SHARD_ID_PROP, REPLICA_PROP, PROPERTY_PROP); SolrZkClient zkClient = zkStateReader.getZkClient(); @@ -363,7 +365,7 @@ private void processReplicaDeletePropertyCommand(ClusterState clusterState, ZkNo overseer.offerStateUpdate(Utils.toJSON(m)); } - private void balanceProperty(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception { + private void balanceProperty(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { if (StringUtils.isBlank(message.getStr(COLLECTION_PROP)) || StringUtils.isBlank(message.getStr(PROPERTY_PROP))) { throw new SolrException(ErrorCode.BAD_REQUEST, "The '" + COLLECTION_PROP + "' and '" + PROPERTY_PROP + @@ -409,7 +411,7 @@ private Map getCollectionStatus(Map collection, } @SuppressWarnings("unchecked") - void deleteReplica(ClusterState clusterState, ZkNodeProps message, NamedList results, Runnable onComplete) + void deleteReplica(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results, Runnable onComplete) throws Exception { ((DeleteReplicaCmd) commandMap.get(DELETEREPLICA)).deleteReplica(clusterState, message, results, onComplete); @@ -468,37 +470,8 @@ void checkResults(String label, NamedList results, boolean failureIsFata } } - - //TODO should we not remove in the next release ? - private void migrateStateFormat(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { - final String collectionName = message.getStr(COLLECTION_PROP); - - boolean firstLoop = true; - // wait for a while until the state format changes - TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, timeSource); - while (! timeout.hasTimedOut()) { - DocCollection collection = zkStateReader.getClusterState().getCollection(collectionName); - if (collection == null) { - throw new SolrException(ErrorCode.BAD_REQUEST, "Collection: " + collectionName + " not found"); - } - if (collection.getStateFormat() == 2) { - // Done. - results.add("success", new SimpleOrderedMap<>()); - return; - } - - if (firstLoop) { - // Actually queue the migration command. - firstLoop = false; - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, MIGRATESTATEFORMAT.toLower(), COLLECTION_PROP, collectionName); - overseer.offerStateUpdate(Utils.toJSON(m)); - } - timeout.sleep(100); - } - throw new SolrException(ErrorCode.SERVER_ERROR, "Could not migrate state format for collection: " + collectionName); - } - - void commit(NamedList results, String slice, Replica parentShardLeader) { + @SuppressWarnings({"unchecked"}) + void commit(@SuppressWarnings({"rawtypes"})NamedList results, String slice, Replica parentShardLeader) { log.debug("Calling soft commit to make sub shard updates visible"); String coreUrl = new ZkCoreNodeProps(parentShardLeader).getCoreUrl(); // HttpShardHandler is hard coded to send a QueryRequest hence we go direct @@ -616,7 +589,7 @@ void addPropertyParams(ZkNodeProps message, Map map) { } - private void modifyCollection(ClusterState clusterState, ZkNodeProps message, NamedList results) + private void modifyCollection(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { final String collectionName = message.getStr(ZkStateReader.COLLECTION_PROP); @@ -627,8 +600,7 @@ private void modifyCollection(ClusterState clusterState, ZkNodeProps message, Na if(configName != null) { validateConfigOrThrowSolrException(configName); - boolean isLegacyCloud = Overseer.isLegacy(zkStateReader); - createConfNode(cloudManager.getDistribStateManager(), configName, collectionName, isLegacyCloud); + createConfNode(cloudManager.getDistribStateManager(), configName, collectionName); reloadCollection(null, new ZkNodeProps(NAME, collectionName), results); } @@ -668,7 +640,7 @@ private void modifyCollection(ClusterState clusterState, ZkNodeProps message, Na } } - void cleanupCollection(String collectionName, NamedList results) throws Exception { + void cleanupCollection(String collectionName, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { log.error("Cleaning up collection [{}].", collectionName); Map props = makeMap( Overseer.QUEUE_OPERATION, DELETE.toLower(), @@ -707,7 +679,7 @@ Map waitToSeeReplicasInState(String collectionName, Collection< } } - List addReplica(ClusterState clusterState, ZkNodeProps message, NamedList results, Runnable onComplete) + List addReplica(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results, Runnable onComplete) throws Exception { return ((AddReplicaCmd) commandMap.get(ADDREPLICA)).addReplica(clusterState, message, results, onComplete); @@ -724,7 +696,7 @@ void validateConfigOrThrowSolrException(String configName) throws IOException, K * This doesn't validate the config (path) itself and is just responsible for creating the confNode. * That check should be done before the config node is created. */ - public static void createConfNode(DistribStateManager stateManager, String configName, String coll, boolean isLegacyCloud) throws IOException, AlreadyExistsException, BadVersionException, KeeperException, InterruptedException { + public static void createConfNode(DistribStateManager stateManager, String configName, String coll) throws IOException, AlreadyExistsException, BadVersionException, KeeperException, InterruptedException { if (configName != null) { String collDir = ZkStateReader.COLLECTIONS_ZKNODE + "/" + coll; @@ -736,11 +708,7 @@ public static void createConfNode(DistribStateManager stateManager, String confi stateManager.makePath(collDir, data, CreateMode.PERSISTENT, false); } } else { - if(isLegacyCloud){ - log.warn("Could not obtain config name"); - } else { - throw new SolrException(ErrorCode.BAD_REQUEST,"Unable to get config name"); - } + throw new SolrException(ErrorCode.BAD_REQUEST,"Unable to get config name"); } } @@ -939,7 +907,7 @@ public boolean isClosed() { } protected interface Cmd { - void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception; + void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception; } /* diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerRoleCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerRoleCmd.java index 16f93277eef0..8b2ce92664aa 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerRoleCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerRoleCmd.java @@ -56,7 +56,7 @@ public OverseerRoleCmd(OverseerCollectionMessageHandler ocmh, CollectionAction o } @Override - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { ZkStateReader zkStateReader = ocmh.zkStateReader; SolrZkClient zkClient = zkStateReader.getZkClient(); @@ -68,11 +68,11 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr if (nodeExists = zkClient.exists(ZkStateReader.ROLES, true)) { roles = (Map) Utils.fromJSON(zkClient.getData(ZkStateReader.ROLES, null, new Stat(), true)); } else { - roles = new LinkedHashMap(1); + roles = new LinkedHashMap<>(1); } List nodeList = (List) roles.get(roleName); - if (nodeList == null) roles.put(roleName, nodeList = new ArrayList()); + if (nodeList == null) roles.put(roleName, nodeList = new ArrayList<>()); if (ADDROLE == operation) { log.info("Overseer role added to {}", node); if (!nodeList.contains(node)) nodeList.add(node); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerStatusCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerStatusCmd.java index 6f0bbfd068f4..7bc51c957dc0 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerStatusCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerStatusCmd.java @@ -46,7 +46,7 @@ public OverseerStatusCmd(OverseerCollectionMessageHandler ocmh) { @Override @SuppressWarnings("unchecked") - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { ZkStateReader zkStateReader = ocmh.zkStateReader; String leaderNode = OverseerTaskProcessor.getLeaderNode(zkStateReader.getZkClient()); results.add("leader", leaderNode); @@ -60,10 +60,15 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr zkStateReader.getZkClient().getData("/overseer/collection-queue-work",null, stat, true); results.add("overseer_collection_queue_size", stat.getNumChildren()); + @SuppressWarnings({"rawtypes"}) NamedList overseerStats = new NamedList(); + @SuppressWarnings({"rawtypes"}) NamedList collectionStats = new NamedList(); + @SuppressWarnings({"rawtypes"}) NamedList stateUpdateQueueStats = new NamedList(); + @SuppressWarnings({"rawtypes"}) NamedList workQueueStats = new NamedList(); + @SuppressWarnings({"rawtypes"}) NamedList collectionQueueStats = new NamedList(); Stats stats = ocmh.stats; for (Map.Entry entry : stats.getStats().entrySet()) { diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java index c362dd332fb4..8eaf8f8a5103 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java @@ -38,7 +38,6 @@ import org.apache.solr.client.solrj.cloud.autoscaling.Policy; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.impl.HttpSolrClient; -import org.apache.solr.client.solrj.io.SolrClientCache; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.response.QueryResponse; @@ -163,7 +162,6 @@ public static Cmd get(String p) { Stream.of(Cmd.values()).collect(Collectors.toMap(Cmd::toLower, Function.identity()))); } - private SolrClientCache solrClientCache; private String zkHost; public ReindexCollectionCmd(OverseerCollectionMessageHandler ocmh) { @@ -171,7 +169,8 @@ public ReindexCollectionCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception { + @SuppressWarnings({"unchecked"}) + public void call(ClusterState clusterState, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { log.debug("*** called: {}", message); @@ -268,7 +267,6 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul Exception exc = null; boolean createdTarget = false; try { - solrClientCache = new SolrClientCache(ocmh.overseer.getCoreContainer().getUpdateShardHandler().getDefaultHttpClient()); zkHost = ocmh.zkStateReader.getZkClient().getZkServerAddress(); // set the running flag reindexingState.clear(); @@ -324,7 +322,6 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul propMap.put(ZkStateReader.MAX_SHARDS_PER_NODE, maxShardsPerNode); propMap.put(CommonAdminParams.WAIT_FOR_FINAL_STATE, true); - propMap.put(DocCollection.STATE_FORMAT, message.getInt(DocCollection.STATE_FORMAT, coll.getStateFormat())); if (rf != null) { propMap.put(ZkStateReader.REPLICATION_FACTOR, rf); } @@ -350,7 +347,6 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul CommonParams.NAME, chkCollection, ZkStateReader.NUM_SHARDS_PROP, "1", ZkStateReader.REPLICATION_FACTOR, "1", - DocCollection.STATE_FORMAT, "2", CollectionAdminParams.COLL_CONF, "_default", CommonAdminParams.WAIT_FOR_FINAL_STATE, "true" ); @@ -504,7 +500,6 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul exc = e; aborted = true; } finally { - solrClientCache.close(); if (aborted) { cleanup(collection, targetCollection, chkCollection, daemonUrl, targetCollection, createdTarget); if (exc != null) { @@ -550,7 +545,7 @@ public static Map getReindexingState(DistribStateManager stateMa } private long getNumberOfDocs(String collection) { - CloudSolrClient solrClient = solrClientCache.getCloudSolrClient(zkHost); + CloudSolrClient solrClient = ocmh.overseer.getCoreContainer().getSolrClientCache().getCloudSolrClient(zkHost); try { ModifiableSolrParams params = new ModifiableSolrParams(); params.add(CommonParams.Q, "*:*"); @@ -580,12 +575,14 @@ private boolean maybeAbort(String collection) throws Exception { // XXX see #waitForDaemon() for why we need this private String getDaemonUrl(SolrResponse rsp, DocCollection coll) { + @SuppressWarnings({"unchecked"}) Map rs = (Map)rsp.getResponse().get("result-set"); if (rs == null || rs.isEmpty()) { if (log.isDebugEnabled()) { log.debug(" -- Missing daemon information in response: {}", Utils.toJSONString(rsp)); } } + @SuppressWarnings({"unchecked"}) List list = (List)rs.get("docs"); if (list == null) { if (log.isDebugEnabled()) { @@ -595,6 +592,7 @@ private String getDaemonUrl(SolrResponse rsp, DocCollection coll) { } String replicaName = null; for (Object o : list) { + @SuppressWarnings({"unchecked"}) Map map = (Map)o; String op = (String)map.get("DaemonOp"); if (op == null) { @@ -629,6 +627,7 @@ private String getDaemonUrl(SolrResponse rsp, DocCollection coll) { // XXX currently this is complicated to due a bug in the way the daemon 'list' // XXX operation is implemented - see SOLR-13245. We need to query the actual // XXX SolrCore where the daemon is running + @SuppressWarnings({"unchecked"}) private void waitForDaemon(String daemonName, String daemonUrl, String sourceCollection, String targetCollection, Map reindexingState) throws Exception { HttpClient client = ocmh.overseer.getCoreContainer().getUpdateShardHandler().getDefaultHttpClient(); try (HttpSolrClient solrClient = new HttpSolrClient.Builder() @@ -680,6 +679,7 @@ private void waitForDaemon(String daemonName, String daemonUrl, String sourceCol } } + @SuppressWarnings({"unchecked"}) private void killDaemon(String daemonName, String daemonUrl) throws Exception { log.debug("-- killing daemon {} at {}", daemonName, daemonUrl); HttpClient client = ocmh.overseer.getCoreContainer().getUpdateShardHandler().getDefaultHttpClient(); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/RenameCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/RenameCmd.java index 7296f6c96dd2..17aea8e316a4 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/RenameCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/RenameCmd.java @@ -44,7 +44,7 @@ public RenameCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { String extCollectionName = message.getStr(CoreAdminParams.NAME); String target = message.getStr(CollectionAdminParams.TARGET); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReplaceNodeCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReplaceNodeCmd.java index 9b80b719c458..f1c1f8cc8a35 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReplaceNodeCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReplaceNodeCmd.java @@ -62,7 +62,8 @@ public ReplaceNodeCmd(OverseerCollectionMessageHandler ocmh) { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + @SuppressWarnings({"unchecked"}) + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { ZkStateReader zkStateReader = ocmh.zkStateReader; String source = message.getStr(CollectionParams.SOURCE_NODE, message.getStr("source")); String target = message.getStr(CollectionParams.TARGET_NODE, message.getStr("target")); @@ -103,6 +104,7 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr AtomicReference sessionWrapperRef = new AtomicReference<>(); try { for (ZkNodeProps sourceReplica : sourceReplicas) { + @SuppressWarnings({"rawtypes"}) NamedList nl = new NamedList(); String sourceCollection = sourceReplica.getStr(COLLECTION_PROP); if (log.isInfoEnabled()) { @@ -204,6 +206,7 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr log.info("Failed to create some replicas. Cleaning up all replicas on target node"); SolrCloseableLatch cleanupLatch = new SolrCloseableLatch(createdReplicas.size(), ocmh); for (ZkNodeProps createdReplica : createdReplicas) { + @SuppressWarnings({"rawtypes"}) NamedList deleteResult = new NamedList(); try { ocmh.deleteReplica(zkStateReader.getClusterState(), createdReplica.plus("parallel", "true"), deleteResult, () -> { diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/RestoreCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/RestoreCmd.java index 0e64e4bb5c2b..c7b5aa168957 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/RestoreCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/RestoreCmd.java @@ -64,7 +64,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.cloud.DocCollection.STATE_FORMAT; import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP; import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE; import static org.apache.solr.common.cloud.ZkStateReader.NRT_REPLICAS; @@ -88,6 +87,7 @@ public RestoreCmd(OverseerCollectionMessageHandler ocmh) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { // TODO maybe we can inherit createCollection's options/code @@ -107,6 +107,14 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr Properties properties = backupMgr.readBackupProperties(location, backupName); String backupCollection = properties.getProperty(BackupManager.COLLECTION_NAME_PROP); + + // Test if the collection is of stateFormat 1 (i.e. not 2) supported pre Solr 9, in which case can't restore it. + Object format = properties.get("stateFormat"); + if (format != null && !"2".equals(format)) { + throw new SolrException(ErrorCode.BAD_REQUEST, "Collection " + backupCollection + " is in stateFormat=" + format + + " no longer supported in Solr 9 and above. It can't be restored. If it originates in Solr 8 you can restore" + + " it there, migrate it to stateFormat=2 and backup again, it will then be restorable on Solr 9"); + } String backupCollectionAlias = properties.getProperty(BackupManager.COLLECTION_ALIAS_PROP); DocCollection backupCollectionState = backupMgr.readCollectionState(location, backupName, backupCollection); @@ -160,9 +168,6 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr Map propMap = new HashMap<>(); propMap.put(Overseer.QUEUE_OPERATION, CREATE.toString()); propMap.put("fromApi", "true"); // mostly true. Prevents autoCreated=true in the collection state. - if (properties.get(STATE_FORMAT) == null) { - propMap.put(STATE_FORMAT, "2"); - } propMap.put(REPLICATION_FACTOR, numNrtReplicas); propMap.put(NRT_REPLICAS, numNrtReplicas); propMap.put(TLOG_REPLICAS, numTlogReplicas); @@ -182,7 +187,6 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr propMap.put(CollectionAdminParams.COLL_CONF, restoreConfigName); // router.* - @SuppressWarnings("unchecked") Map routerProps = (Map) backupCollectionState.getProperties().get(DocCollection.DOC_ROUTER); for (Map.Entry pair : routerProps.entrySet()) { propMap.put(DocCollection.DOC_ROUTER + "." + pair.getKey(), pair.getValue()); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/RoutedAlias.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/RoutedAlias.java index 097048c86b38..7b4efc6463fa 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/RoutedAlias.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/RoutedAlias.java @@ -106,10 +106,10 @@ public static RoutedAlias fromProps(String aliasName, Map props) // v2 api case - the v2 -> v1 mapping mechanisms can't handle this conversion because they expect // strings or arrays of strings, not lists of objects. if (props.containsKey("router.routerList")) { - @SuppressWarnings("unchecked") // working around solrparams inability to express lists of objects - HashMap tmp = new HashMap(props); - @SuppressWarnings("unchecked") // working around solrparams inability to express lists of objects - List> v2RouterList = (List>) tmp.get("router.routerList"); + @SuppressWarnings({"unchecked", "rawtypes"}) + HashMap tmp = new HashMap(props); + @SuppressWarnings({"unchecked", "rawtypes"}) + List> v2RouterList = (List>) tmp.get("router.routerList"); Map o = v2RouterList.get(i); for (Map.Entry entry : o.entrySet()) { props.put(ROUTER_PREFIX + i + "." + entry.getKey(), String.valueOf(entry.getValue())); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/SetAliasPropCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/SetAliasPropCmd.java index fdee1d10dde6..ea491f67a68e 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/SetAliasPropCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/SetAliasPropCmd.java @@ -47,7 +47,7 @@ public class SetAliasPropCmd implements Cmd { } @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { String aliasName = message.getStr(NAME); final ZkStateReader.AliasesManager aliasesManager = messageHandler.zkStateReader.aliasesManager; diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java index 29d9a03702fe..2d04947be1cd 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java @@ -98,10 +98,11 @@ public SplitShardCmd(OverseerCollectionMessageHandler ocmh) { @SuppressWarnings("unchecked") @Override - public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception { + public void call(ClusterState state, ZkNodeProps message, @SuppressWarnings({"rawtypes"})NamedList results) throws Exception { split(state, message,(NamedList) results); } + @SuppressWarnings({"rawtypes"}) public boolean split(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception { final String asyncId = message.getStr(ASYNC); @@ -625,7 +626,7 @@ public boolean split(ClusterState clusterState, ZkNodeProps message, NamedList 10) { log.info("too_many_suggestions"); @@ -125,6 +126,7 @@ public void call(ClusterState state, ZkNodeProps message, NamedList results) thr private void executeAll(List requests) throws Exception { if (requests.isEmpty()) return; for (ZkNodeProps props : requests) { + @SuppressWarnings({"rawtypes"}) NamedList result = new NamedList(); ocmh.commandMap.get(MOVEREPLICA) .call(ocmh.overseer.getSolrCloudManager().getClusterStateProvider().getClusterState(), diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanAction.java index fdd34742799d..d129fdbefe99 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanAction.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanAction.java @@ -18,46 +18,23 @@ package org.apache.solr.cloud.autoscaling; -import java.io.IOException; +import java.util.Collections; +import java.util.Map; -import org.apache.solr.client.solrj.cloud.autoscaling.NoneSuggester; -import org.apache.solr.client.solrj.cloud.autoscaling.Policy; import org.apache.solr.client.solrj.cloud.SolrCloudManager; -import org.apache.solr.client.solrj.cloud.autoscaling.Suggester; -import org.apache.solr.client.solrj.impl.ClusterStateProvider; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.cloud.ClusterState; -import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.ZkStateReader; +import org.apache.solr.core.SolrResourceLoader; +import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS; + +/** + * This class configures the parent ComputePlanAction to compute plan + * only for collections which have autoAddReplicas=true. + */ public class AutoAddReplicasPlanAction extends ComputePlanAction { @Override - protected Suggester getSuggester(Policy.Session session, TriggerEvent event, ActionContext context, SolrCloudManager cloudManager) throws IOException { - // for backward compatibility - ClusterStateProvider stateProvider = cloudManager.getClusterStateProvider(); - String autoAddReplicas = stateProvider.getClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, (String) null); - if (autoAddReplicas != null && autoAddReplicas.equals("false")) { - return NoneSuggester.get(session); - } - - Suggester suggester = super.getSuggester(session, event, context, cloudManager); - ClusterState clusterState; - try { - clusterState = stateProvider.getClusterState(); - } catch (IOException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exception getting cluster state", e); - } - - boolean anyCollections = false; - for (DocCollection collection: clusterState.getCollectionsMap().values()) { - if (collection.getAutoAddReplicas()) { - anyCollections = true; - suggester.hint(Suggester.Hint.COLL, collection.getName()); - } - } - - if (!anyCollections) return NoneSuggester.get(session); - return suggester; + public void configure(SolrResourceLoader loader, SolrCloudManager cloudManager, Map properties) throws TriggerValidationException { + properties.put("collections", Collections.singletonMap(AUTO_ADD_REPLICAS, "true")); + super.configure(loader, cloudManager, properties); } } diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScaling.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScaling.java index 7b2fee77a22d..1a191ee858b6 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScaling.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScaling.java @@ -212,6 +212,7 @@ public synchronized Trigger create(TriggerEventType type, String name, Map AUTO_ADD_REPLICAS_TRIGGER_PROPS = (Map) Utils.fromJSONString(AUTO_ADD_REPLICAS_TRIGGER_DSL); public static final String SCHEDULED_MAINTENANCE_TRIGGER_NAME = ".scheduled_maintenance"; @@ -239,6 +240,7 @@ public synchronized Trigger create(TriggerEventType type, String name, Map SCHEDULED_MAINTENANCE_TRIGGER_PROPS = (Map) Utils.fromJSONString(SCHEDULED_MAINTENANCE_TRIGGER_DSL); } diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java index 1341a240bab1..23ec0754cbc8 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java @@ -120,6 +120,7 @@ Optional> getSubpathExecutor(Li } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { try { String httpMethod = (String) req.getContext().get("httpMethod"); @@ -187,11 +188,13 @@ public MapWriter.EntryWriter put(CharSequence k, Object v) { } + @SuppressWarnings({"unchecked"}) private void handleSuggestions(SolrQueryResponse rsp, AutoScalingConfig autoScalingConf, SolrParams params) { rsp.getValues().add("suggestions", PolicyHelper.getSuggestions(autoScalingConf, cloudManager, params)); } + @SuppressWarnings({"unchecked", "rawtypes"}) public void processOps(SolrQueryRequest req, SolrQueryResponse rsp, List ops) throws KeeperException, InterruptedException, IOException { while (true) { @@ -269,11 +272,13 @@ private AutoScalingConfig handleSetProperties(SolrQueryRequest req, SolrQueryRes return currentConfig.withProperties(configProps); } + @SuppressWarnings({"unchecked"}) private void handleDiagnostics(SolrQueryResponse rsp, AutoScalingConfig autoScalingConf) { Policy policy = autoScalingConf.getPolicy(); rsp.getValues().add("diagnostics", PolicyHelper.getDiagnostics(policy, cloudManager)); } + @SuppressWarnings({"unchecked"}) private AutoScalingConfig handleSetClusterPolicy(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op, AutoScalingConfig currentConfig) throws KeeperException, InterruptedException, IOException { List> clusterPolicy = (List>) op.getCommandData(); @@ -293,6 +298,7 @@ private AutoScalingConfig handleSetClusterPolicy(SolrQueryRequest req, SolrQuery return currentConfig; } + @SuppressWarnings({"unchecked"}) private AutoScalingConfig handleSetClusterPreferences(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op, AutoScalingConfig currentConfig) throws KeeperException, InterruptedException, IOException { List> preferences = (List>) op.getCommandData(); @@ -336,6 +342,7 @@ private AutoScalingConfig handleRemovePolicy(SolrQueryRequest req, SolrQueryResp return currentConfig; } + @SuppressWarnings({"unchecked"}) private AutoScalingConfig handleSetPolicies(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op, AutoScalingConfig currentConfig) throws KeeperException, InterruptedException, IOException { Map policiesMap = op.getDataMap(); @@ -361,6 +368,7 @@ private AutoScalingConfig handleSetPolicies(SolrQueryRequest req, SolrQueryRespo return currentConfig; } + @SuppressWarnings({"unchecked"}) private AutoScalingConfig handleResumeTrigger(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op, AutoScalingConfig currentConfig) throws KeeperException, InterruptedException { String triggerName = op.getStr(NAME); @@ -393,6 +401,7 @@ private AutoScalingConfig handleResumeTrigger(SolrQueryRequest req, SolrQueryRes return currentConfig; } + @SuppressWarnings({"unchecked"}) private AutoScalingConfig handleSuspendTrigger(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op, AutoScalingConfig currentConfig) throws KeeperException, InterruptedException { String triggerName = op.getStr(NAME); @@ -525,6 +534,7 @@ private AutoScalingConfig handleSetListener(SolrQueryRequest req, SolrQueryRespo return currentConfig; } + @SuppressWarnings({"unchecked"}) private AutoScalingConfig handleSetTrigger(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op, AutoScalingConfig currentConfig) throws KeeperException, InterruptedException { // we're going to modify the op - use a copy diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java index 87dd0c39ecec..33bf6b04dab6 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java @@ -17,38 +17,28 @@ package org.apache.solr.cloud.autoscaling; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; - import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.cloud.SolrCloudManager; -import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig; -import org.apache.solr.client.solrj.cloud.autoscaling.NoneSuggester; -import org.apache.solr.client.solrj.cloud.autoscaling.Policy; -import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper; -import org.apache.solr.client.solrj.cloud.autoscaling.Suggester; -import org.apache.solr.client.solrj.cloud.autoscaling.UnsupportedSuggester; +import org.apache.solr.client.solrj.cloud.autoscaling.*; import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.params.AutoScalingParams; import org.apache.solr.common.params.CollectionParams; -import org.apache.solr.common.params.CoreAdminParams; import org.apache.solr.common.util.Pair; import org.apache.solr.common.util.StrUtils; import org.apache.solr.core.SolrResourceLoader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.*; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Predicate; +import java.util.stream.Collectors; + import static org.apache.solr.cloud.autoscaling.TriggerEvent.NODE_NAMES; /** @@ -61,7 +51,8 @@ public class ComputePlanAction extends TriggerActionBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - Set collections = new HashSet<>(); + // accept all collections by default + Predicate collectionsPredicate = s -> true; public ComputePlanAction() { super(); @@ -72,9 +63,38 @@ public ComputePlanAction() { @Override public void configure(SolrResourceLoader loader, SolrCloudManager cloudManager, Map properties) throws TriggerValidationException { super.configure(loader, cloudManager, properties); - String colString = (String) properties.get("collections"); - if (colString != null && !colString.isEmpty()) { - collections.addAll(StrUtils.splitSmart(colString, ',')); + + Object value = properties.get("collections"); + if (value instanceof String) { + String colString = (String) value; + if (!colString.isEmpty()) { + List whiteListedCollections = StrUtils.splitSmart(colString, ','); + collectionsPredicate = whiteListedCollections::contains; + } + } else if (value instanceof Map) { + @SuppressWarnings({"unchecked"}) + Map matchConditions = (Map) value; + collectionsPredicate = collectionName -> { + try { + DocCollection collection = cloudManager.getClusterStateProvider().getCollection(collectionName); + if (collection == null) { + log.debug("Collection: {} was not found while evaluating conditions", collectionName); + return false; + } + for (Map.Entry entry : matchConditions.entrySet()) { + if (!entry.getValue().equals(collection.get(entry.getKey()))) { + if (log.isDebugEnabled()) { + log.debug("Collection: {} does not match condition: {}:{}", collectionName, entry.getKey(), entry.getValue()); + } + return false; + } + } + return true; + } catch (IOException e) { + log.error("Exception fetching collection information for: {}", collectionName, e); + return false; + } + }; } } @@ -114,6 +134,7 @@ public void process(TriggerEvent event, ActionContext context) throws Exception if (Thread.currentThread().isInterrupted()) { throw new InterruptedException("stopping - thread was interrupted"); } + @SuppressWarnings({"rawtypes"}) SolrRequest operation = suggester.getSuggestion(); opCount++; // prepare suggester for the next iteration @@ -142,16 +163,9 @@ public void process(TriggerEvent event, ActionContext context) throws Exception if (log.isDebugEnabled()) { log.debug("Computed Plan: {}", operation.getParams()); } - if (!collections.isEmpty()) { - String coll = operation.getParams().get(CoreAdminParams.COLLECTION); - if (coll != null && !collections.contains(coll)) { - // discard an op that doesn't affect our collections - log.debug("-- discarding due to collection={} not in {}", coll, collections); - continue; - } - } Map props = context.getProperties(); props.compute("operations", (k, v) -> { + @SuppressWarnings({"unchecked", "rawtypes"}) List operations = (List) v; if (operations == null) operations = new ArrayList<>(); operations.add(operation); @@ -200,6 +214,7 @@ protected int getMaxNumOps(TriggerEvent event, AutoScalingConfig autoScalingConf } protected int getRequestedNumOps(TriggerEvent event) { + @SuppressWarnings({"unchecked"}) Collection ops = (Collection) event.getProperty(TriggerEvent.REQUESTED_OPS, Collections.emptyList()); if (ops.isEmpty()) { return -1; @@ -217,33 +232,12 @@ protected Suggester getSuggester(Policy.Session session, TriggerEvent event, Act suggester = getNodeAddedSuggester(cloudManager, session, event); break; case NODELOST: - String preferredOp = (String) event.getProperty(AutoScalingParams.PREFERRED_OP, CollectionParams.CollectionAction.MOVEREPLICA.toLower()); - CollectionParams.CollectionAction action = CollectionParams.CollectionAction.get(preferredOp); - switch (action) { - case MOVEREPLICA: - suggester = session.getSuggester(action) - .hint(Suggester.Hint.SRC_NODE, event.getProperty(NODE_NAMES)); - break; - case DELETENODE: - int start = (Integer)event.getProperty(START, 0); - List srcNodes = (List) event.getProperty(NODE_NAMES); - if (srcNodes.isEmpty() || start >= srcNodes.size()) { - return NoneSuggester.get(session); - } - String sourceNode = srcNodes.get(start); - suggester = session.getSuggester(action) - .hint(Suggester.Hint.SRC_NODE, Collections.singletonList(sourceNode)); - event.getProperties().put(START, ++start); - break; - case NONE: - return NoneSuggester.get(session); - default: - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unsupported preferredOperation: " + action.toLower() + " specified for node lost trigger"); - } + suggester = getNodeLostSuggester(cloudManager, session, event); break; case SEARCHRATE: case METRIC: case INDEXSIZE: + @SuppressWarnings({"unchecked"}) List ops = (List)event.getProperty(TriggerEvent.REQUESTED_OPS, Collections.emptyList()); int start = (Integer)event.getProperty(START, 0); if (ops.isEmpty() || start >= ops.size()) { @@ -252,19 +246,22 @@ protected Suggester getSuggester(Policy.Session session, TriggerEvent event, Act TriggerEvent.Op op = ops.get(start); suggester = session.getSuggester(op.getAction()); if (suggester instanceof UnsupportedSuggester) { + @SuppressWarnings({"unchecked"}) List unsupportedOps = (List)context.getProperties().computeIfAbsent(TriggerEvent.UNSUPPORTED_OPS, k -> new ArrayList()); unsupportedOps.add(op); } for (Map.Entry e : op.getHints().entrySet()) { suggester = suggester.hint(e.getKey(), e.getValue()); } + if (applyCollectionHints(cloudManager, suggester) == 0) return NoneSuggester.get(session); suggester = suggester.forceOperation(true); event.getProperties().put(START, ++start); break; case SCHEDULED: - preferredOp = (String) event.getProperty(AutoScalingParams.PREFERRED_OP, CollectionParams.CollectionAction.MOVEREPLICA.toLower()); - action = CollectionParams.CollectionAction.get(preferredOp); + String preferredOp = (String) event.getProperty(AutoScalingParams.PREFERRED_OP, CollectionParams.CollectionAction.MOVEREPLICA.toLower()); + CollectionParams.CollectionAction action = CollectionParams.CollectionAction.get(preferredOp); suggester = session.getSuggester(action); + if (applyCollectionHints(cloudManager, suggester) == 0) return NoneSuggester.get(session); break; default: throw new UnsupportedOperationException("No support for events other than nodeAdded, nodeLost, searchRate, metric, scheduled and indexSize. Received: " + event.getEventType()); @@ -272,6 +269,54 @@ protected Suggester getSuggester(Policy.Session session, TriggerEvent event, Act return suggester; } + private Suggester getNodeLostSuggester(SolrCloudManager cloudManager, Policy.Session session, TriggerEvent event) throws IOException { + String preferredOp = (String) event.getProperty(AutoScalingParams.PREFERRED_OP, CollectionParams.CollectionAction.MOVEREPLICA.toLower()); + CollectionParams.CollectionAction action = CollectionParams.CollectionAction.get(preferredOp); + switch (action) { + case MOVEREPLICA: + Suggester s = session.getSuggester(action) + .hint(Suggester.Hint.SRC_NODE, event.getProperty(NODE_NAMES)); + if (applyCollectionHints(cloudManager, s) == 0) return NoneSuggester.get(session); + return s; + case DELETENODE: + int start = (Integer)event.getProperty(START, 0); + @SuppressWarnings({"unchecked"}) + List srcNodes = (List) event.getProperty(NODE_NAMES); + if (srcNodes.isEmpty() || start >= srcNodes.size()) { + return NoneSuggester.get(session); + } + String sourceNode = srcNodes.get(start); + s = session.getSuggester(action) + .hint(Suggester.Hint.SRC_NODE, event.getProperty(NODE_NAMES)); + if (applyCollectionHints(cloudManager, s) == 0) return NoneSuggester.get(session); + s.hint(Suggester.Hint.SRC_NODE, Collections.singletonList(sourceNode)); + event.getProperties().put(START, ++start); + return s; + case NONE: + return NoneSuggester.get(session); + default: + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unsupported preferredOperation: " + action.toLower() + " specified for node lost trigger"); + } + } + + /** + * Applies collection hints for all collections that match the {@link #collectionsPredicate} + * and returns the number of collections that matched. + * @return number of collections that match the {@link #collectionsPredicate} + * @throws IOException if {@link org.apache.solr.client.solrj.impl.ClusterStateProvider} throws IOException + */ + private int applyCollectionHints(SolrCloudManager cloudManager, Suggester s) throws IOException { + ClusterState clusterState = cloudManager.getClusterStateProvider().getClusterState(); + Set set = clusterState.getCollectionStates().keySet().stream() + .filter(collectionRef -> collectionsPredicate.test(collectionRef)) + .collect(Collectors.toSet()); + if (set.size() < clusterState.getCollectionStates().size()) { + // apply hints only if a subset of collections are selected + set.forEach(c -> s.hint(Suggester.Hint.COLL, c)); + } + return set.size(); + } + private Suggester getNodeAddedSuggester(SolrCloudManager cloudManager, Policy.Session session, TriggerEvent event) throws IOException { String preferredOp = (String) event.getProperty(AutoScalingParams.PREFERRED_OP, CollectionParams.CollectionAction.MOVEREPLICA.toLower()); Replica.Type replicaType = (Replica.Type) event.getProperty(AutoScalingParams.REPLICA_TYPE, Replica.Type.NRT); @@ -283,17 +328,18 @@ private Suggester getNodeAddedSuggester(SolrCloudManager cloudManager, Policy.Se case ADDREPLICA: // add all collection/shard pairs and let policy engine figure out which one // to place on the target node - // todo in future we can prune ineligible collection/shard pairs ClusterState clusterState = cloudManager.getClusterStateProvider().getClusterState(); Set> collShards = new HashSet<>(); - clusterState.getCollectionStates().forEach((collectionName, collectionRef) -> { - DocCollection docCollection = collectionRef.get(); - if (docCollection != null) { - docCollection.getActiveSlices().stream() - .map(slice -> new Pair<>(collectionName, slice.getName())) - .forEach(collShards::add); - } - }); + clusterState.getCollectionStates().entrySet().stream() + .filter(e -> collectionsPredicate.test(e.getKey())) + .forEach(entry -> { + DocCollection docCollection = entry.getValue().get(); + if (docCollection != null) { + docCollection.getActiveSlices().stream() + .map(slice -> new Pair<>(entry.getKey(), slice.getName())) + .forEach(collShards::add); + } + }); suggester.hint(Suggester.Hint.COLL_SHARD, collShards); suggester.hint(Suggester.Hint.REPLICATYPE, replicaType); break; diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java index e09b3ae1398d..1dfc3b1581c5 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java @@ -76,6 +76,7 @@ public void configure(SolrResourceLoader loader, SolrCloudManager cloudManager, } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void process(TriggerEvent event, ActionContext context) throws Exception { if (log.isDebugEnabled()) { log.debug("-- processing event: {} with context properties: {}", event, context.getProperties()); @@ -163,6 +164,7 @@ public void process(TriggerEvent event, ActionContext context) throws Exception } NamedList result = response.getResponse(); context.getProperties().compute("responses", (s, o) -> { + @SuppressWarnings({"unchecked"}) List> responses = (List>) o; if (responses == null) responses = new ArrayList<>(operations.size()); responses.add(result); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java index 06c69a4fe8cb..d3de649cf7a1 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java @@ -97,6 +97,7 @@ public void process(TriggerEvent event, ActionContext context) throws Exception if (log.isDebugEnabled()) { log.debug("-- delete inactive {} / {}", coll.getName(), s.getName()); } + @SuppressWarnings({"unchecked", "rawtypes"}) List operations = (List)context.getProperties().computeIfAbsent("operations", k -> new ArrayList<>()); operations.add(CollectionAdminRequest.deleteShard(coll.getName(), s.getName())); cleanup.computeIfAbsent(coll.getName(), c -> new ArrayList<>()).add(s.getName()); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java index 327a07084569..da4036600d63 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java @@ -218,6 +218,7 @@ protected Map getState() { } @Override + @SuppressWarnings({"unchecked"}) protected void setState(Map state) { this.lastAboveEventMap.clear(); this.lastBelowEventMap.clear(); @@ -248,6 +249,7 @@ public void restoreState(AutoScaling.Trigger old) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void run() { synchronized(this) { if (isClosed) { diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/MetricTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/MetricTrigger.java index 9058a9a67e04..573ac77bd6ba 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/MetricTrigger.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/MetricTrigger.java @@ -91,6 +91,7 @@ protected Map getState() { @Override protected void setState(Map state) { lastNodeEvent.clear(); + @SuppressWarnings({"unchecked"}) Map nodeTimes = (Map) state.get("lastNodeEvent"); if (nodeTimes != null) { lastNodeEvent.putAll(nodeTimes); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java index 6ae77bb7f3a8..42188e4587af 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java @@ -153,10 +153,12 @@ protected Map getState() { protected void setState(Map state) { this.lastLiveNodes.clear(); this.nodeNameVsTimeAdded.clear(); + @SuppressWarnings({"unchecked"}) Collection lastLiveNodes = (Collection)state.get("lastLiveNodes"); if (lastLiveNodes != null) { this.lastLiveNodes.addAll(lastLiveNodes); } + @SuppressWarnings({"unchecked"}) Map nodeNameVsTimeAdded = (Map)state.get("nodeNameVsTimeAdded"); if (nodeNameVsTimeAdded != null) { this.nodeNameVsTimeAdded.putAll(nodeNameVsTimeAdded); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java index 0a7a2673b63c..b1c58183a393 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java @@ -143,10 +143,12 @@ protected Map getState() { protected void setState(Map state) { this.lastLiveNodes.clear(); this.nodeNameVsTimeRemoved.clear(); + @SuppressWarnings({"unchecked"}) Collection lastLiveNodes = (Collection)state.get("lastLiveNodes"); if (lastLiveNodes != null) { this.lastLiveNodes.addAll(lastLiveNodes); } + @SuppressWarnings({"unchecked"}) Map nodeNameVsTimeRemoved = (Map)state.get("nodeNameVsTimeRemoved"); if (nodeNameVsTimeRemoved != null) { this.nodeNameVsTimeRemoved.putAll(nodeNameVsTimeRemoved); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java index d84bff462bea..e080eecc8ab8 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java @@ -320,6 +320,7 @@ public synchronized void add(AutoScaling.Trigger newTrigger) throws Exception { ActionContext actionContext = new ActionContext(cloudManager, newTrigger, new HashMap<>()); for (TriggerAction action : actions) { + @SuppressWarnings({"unchecked"}) List beforeActions = (List) actionContext.getProperties().computeIfAbsent(TriggerEventProcessorStage.BEFORE_ACTION.toString(), k -> new ArrayList()); beforeActions.add(action.getName()); triggerListeners1.fireListeners(event.getSource(), event, TriggerEventProcessorStage.BEFORE_ACTION, action.getName(), actionContext); @@ -329,6 +330,7 @@ public synchronized void add(AutoScaling.Trigger newTrigger) throws Exception { triggerListeners1.fireListeners(event.getSource(), event, TriggerEventProcessorStage.FAILED, action.getName(), actionContext, e, null); throw new TriggerActionException(event.getSource(), action.getName(), "Error processing action for trigger event: " + event, e); } + @SuppressWarnings({"unchecked"}) List afterActions = (List) actionContext.getProperties().computeIfAbsent(TriggerEventProcessorStage.AFTER_ACTION.toString(), k -> new ArrayList()); afterActions.add(action.getName()); triggerListeners1.fireListeners(event.getSource(), event, TriggerEventProcessorStage.AFTER_ACTION, action.getName(), actionContext); @@ -428,6 +430,7 @@ private void waitForPendingTasks(AutoScaling.Trigger newTrigger, List state) { lastNodeEvent.clear(); lastShardEvent.clear(); lastReplicaEvent.clear(); + @SuppressWarnings({"unchecked"}) Map collTimes = (Map)state.get("lastCollectionEvent"); if (collTimes != null) { lastCollectionEvent.putAll(collTimes); } + @SuppressWarnings({"unchecked"}) Map nodeTimes = (Map)state.get("lastNodeEvent"); if (nodeTimes != null) { lastNodeEvent.putAll(nodeTimes); } + @SuppressWarnings({"unchecked"}) Map shardTimes = (Map)state.get("lastShardEvent"); if (shardTimes != null) { lastShardEvent.putAll(shardTimes); } + @SuppressWarnings({"unchecked"}) Map replicaTimes = (Map)state.get("lastReplicaEvent"); if (replicaTimes != null) { lastReplicaEvent.putAll(replicaTimes); @@ -651,6 +655,7 @@ private void calculateHotOps(List ops, /** * This method implements a primitive form of proportional controller with a limiter. */ + @SuppressWarnings({"unchecked", "rawtypes"}) private void addReplicaHints(String collection, String shard, double r, int replicationFactor, List> hints) { int numReplicas = (int)Math.round((r - aboveRate) / (double) replicationFactor); // in one event add at least 1 replica diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java index 09b0865ec90c..b8414784447e 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java @@ -80,6 +80,7 @@ public void configure(SolrResourceLoader loader, SolrCloudManager cloudManager, } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String actionName, ActionContext context, Throwable error, String message) throws Exception { try { @@ -153,6 +154,7 @@ private void addMap(String prefix, SolrInputDocument doc, Map ma }); } + @SuppressWarnings({"rawtypes"}) private void addOperations(SolrInputDocument doc, List operations) { if (operations == null || operations.isEmpty()) { return; diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java index 535fd00c8544..d045f6ab2fcf 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java @@ -109,6 +109,7 @@ public void configure(SolrResourceLoader loader, SolrCloudManager cloudManager, } this.enabled = Boolean.parseBoolean(String.valueOf(this.properties.getOrDefault("enabled", "true"))); this.waitForSecond = ((Number) this.properties.getOrDefault("waitFor", -1L)).intValue(); + @SuppressWarnings({"unchecked"}) List> o = (List>) properties.get("actions"); if (o != null && !o.isEmpty()) { actions = new ArrayList<>(3); @@ -243,6 +244,7 @@ public boolean equals(Object obj) { * @see #getState * @lucene.internal */ + @SuppressWarnings({"unchecked"}) public Map deepCopyState() { return Utils.getDeepCopy(getState(), 10, false, true); } @@ -273,6 +275,7 @@ public void saveState() { } @Override + @SuppressWarnings({"unchecked"}) public void restoreState() { byte[] data = null; String path = ZkStateReader.SOLR_AUTOSCALING_TRIGGER_STATE_PATH + "/" + getName(); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEvent.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEvent.java index c61556ca3fab..91482e5083c2 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEvent.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEvent.java @@ -59,6 +59,7 @@ public Op(CollectionParams.CollectionAction action, Suggester.Hint hint, Object addHint(hint, hintValue); } + @SuppressWarnings({"unchecked"}) public void addHint(Suggester.Hint hint, Object value) { hint.validator.accept(value); if (hint.multiValued) { @@ -85,6 +86,7 @@ public void writeMap(EntryWriter ew) throws IOException { ew.put("hints", hints); } + @SuppressWarnings({"unchecked", "rawtypes"}) public static Op fromMap(Map map) { if (!map.containsKey("action")) { return null; @@ -281,6 +283,7 @@ public String toString() { return Utils.toJSONString(this); } + @SuppressWarnings({"unchecked"}) public static TriggerEvent fromMap(Map map) { String id = (String)map.get("id"); String source = (String)map.get("source"); @@ -294,6 +297,7 @@ public static TriggerEvent fromMap(Map map) { return res; } + @SuppressWarnings({"unchecked"}) public static void fixOps(String type, Map properties) { List ops = (List)properties.get(type); if (ops != null && !ops.isEmpty()) { diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java index 9f2da7a91146..ec41495ba80c 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java @@ -72,6 +72,7 @@ public TriggerEvent peekEvent() { continue; } try { + @SuppressWarnings({"unchecked"}) Map map = (Map) Utils.fromJSON(data); return fromMap(map); } catch (Exception e) { @@ -98,6 +99,7 @@ public TriggerEvent pollEvent() { continue; } try { + @SuppressWarnings({"unchecked"}) Map map = (Map) Utils.fromJSON(data); return fromMap(map); } catch (Exception e) { diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerUtils.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerUtils.java index 71a1ce4e763a..cecd933fd2f5 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerUtils.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerUtils.java @@ -61,6 +61,7 @@ public static void checkRequiredPropertyNames(Map properties, Ma } } + @SuppressWarnings({"unchecked", "rawtypes"}) public static void checkProperty(Map properties, Map results, String name, boolean required, Class... acceptClasses) { Object value = properties.get(name); if (value == null) { diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java index 1a8fdb4ffce9..25624f4e38bd 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java @@ -181,7 +181,6 @@ public SimCloudManager(TimeSource timeSource) throws Exception { if (distribStateManager == null) { this.stateManager = new SimDistribStateManager(SimDistribStateManager.createNewRootNode()); // init common paths - stateManager.makePath(ZkStateReader.CLUSTER_STATE); stateManager.makePath(ZkStateReader.CLUSTER_PROPS); stateManager.makePath(ZkStateReader.SOLR_AUTOSCALING_CONF_PATH); stateManager.makePath(ZkStateReader.LIVE_NODES_ZKNODE); @@ -217,6 +216,7 @@ public SimCloudManager(TimeSource timeSource) throws Exception { solrClient = new MockSearchableSolrClient() { @Override + @SuppressWarnings({"rawtypes"}) public NamedList request(SolrRequest request, String collection) throws SolrServerException, IOException { if (collection != null) { if (request instanceof AbstractUpdateRequest) { @@ -626,6 +626,7 @@ public void simRestartOverseer(String killNodeId) throws Exception { * @return future to obtain results * @see #getBackgroundTaskFailureCount */ + @SuppressWarnings({"unchecked", "rawtypes"}) public Future submit(Callable callable) { return simCloudManagerPool.submit(new LoggingCallable(backgroundTaskFailureCounter, callable)); } @@ -715,6 +716,7 @@ public DistributedQueueFactory getDistributedQueueFactory() { } @Override + @SuppressWarnings({"rawtypes"}) public SolrResponse request(SolrRequest req) throws IOException { try { // NOTE: we're doing 2 odd things here: @@ -743,6 +745,8 @@ private void incrementCount(String op) { * @param req autoscaling request * @return results */ + + @SuppressWarnings({"unchecked", "rawtypes"}) public SolrResponse simHandleSolrRequest(SolrRequest req) throws IOException, InterruptedException { // pay the penalty for remote request, at least 5 ms timeSource.sleep(5); @@ -790,12 +794,14 @@ public SolrResponse simHandleSolrRequest(SolrRequest req) throws IOException, In if (metricsHistoryHandler != null) { metricsHistoryHandler.handleRequest(queryRequest, queryResponse); } else { + queryRequest.close(); throw new UnsupportedOperationException("must add at least 1 node first"); } } else { if (metricsHandler != null) { metricsHandler.handleRequest(queryRequest, queryResponse); } else { + queryRequest.close(); throw new UnsupportedOperationException("must add at least 1 node first"); } } @@ -866,6 +872,7 @@ public SolrResponse simHandleSolrRequest(SolrRequest req) throws IOException, In if (log.isTraceEnabled()) { log.trace("Invoking Collection Action :{} with params {}", action.toLower(), params.toQueryString()); } + @SuppressWarnings({"rawtypes"}) NamedList results = new NamedList(); rsp.setResponse(results); incrementCount(action.name()); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java index df14c768e1cc..6943f2c99aae 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java @@ -48,7 +48,9 @@ import com.google.common.util.concurrent.AtomicDouble; import org.apache.commons.math3.stat.descriptive.SummaryStatistics; import org.apache.solr.client.solrj.cloud.DistribStateManager; +import org.apache.solr.client.solrj.cloud.autoscaling.AlreadyExistsException; import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig; +import org.apache.solr.client.solrj.cloud.autoscaling.BadVersionException; import org.apache.solr.client.solrj.cloud.autoscaling.Policy; import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper; import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo; @@ -98,6 +100,7 @@ import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.update.SolrIndexSplitter; import org.apache.zookeeper.CreateMode; +import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -119,8 +122,8 @@ *
  • using autoscaling policy for replica placements
  • *
  • maintaining and up-to-date list of /live_nodes and nodeAdded / nodeLost markers
  • *
  • running a simulated leader election on collection changes (with throttling), when needed
  • - *
  • maintaining an up-to-date /clusterstate.json (single file format), which also tracks replica states, - * leader election changes, replica property changes, etc. Note: this file is only written, + *
  • maintaining an up-to-date /state.json per-collection files, which also track replica states, + * leader election changes, replica property changes, etc. Note: these files are only written, * but never read by the framework!
  • *
  • maintaining an up-to-date /clusterprops.json. Note: this file is only written, but never read by the * framework!
  • @@ -153,12 +156,132 @@ public class SimClusterStateProvider implements ClusterStateProvider { private final Map> opDelays = new ConcurrentHashMap<>(); - private volatile int clusterStateVersion = 0; private volatile String overseerLeader = null; private volatile Map lastSavedProperties = null; - private final AtomicReference> collectionsStatesRef = new AtomicReference<>(); + private class CachedCollectionRef { + private final String name; + private int zkVersion; + private DocCollection coll; + ReentrantLock lock = new ReentrantLock(); + + CachedCollectionRef(String name, int zkVersion) { + this.name = name; + this.zkVersion = zkVersion; + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + public DocCollection getColl() throws InterruptedException, IOException { + DocCollection dc = coll; + if (dc != null) { + return dc; + } + lock.lock(); + try { + if (coll != null) { + return coll; + } else { + Map>> collMap = new HashMap<>(); + nodeReplicaMap.forEach((n, replicas) -> { + synchronized (replicas) { + replicas.forEach(ri -> { + if (!ri.getCollection().equals(name)) { + return; + } + Map props; + synchronized (ri) { + props = new HashMap<>(ri.getVariables()); + } + props.put(ZkStateReader.NODE_NAME_PROP, n); + props.put(ZkStateReader.CORE_NAME_PROP, ri.getCore()); + props.put(ZkStateReader.REPLICA_TYPE, ri.getType().toString()); + props.put(ZkStateReader.STATE_PROP, ri.getState().toString()); + Replica r = new Replica(ri.getName(), props, ri.getCollection(), ri.getShard()); + collMap.computeIfAbsent(ri.getCollection(), c -> new HashMap<>()) + .computeIfAbsent(ri.getShard(), s -> new HashMap<>()) + .put(ri.getName(), r); + }); + } + }); + + // add empty slices + sliceProperties.forEach((c, perSliceProps) -> { + if (!c.equals(name)) { + return; + } + perSliceProps.forEach((slice, props) -> { + collMap.computeIfAbsent(c, co -> new ConcurrentHashMap<>()).computeIfAbsent(slice, s -> new ConcurrentHashMap<>()); + }); + }); + // add empty collections + collProperties.keySet().forEach(c -> { + if (!c.equals(name)) { + return; + } + collMap.computeIfAbsent(c, co -> new ConcurrentHashMap<>()); + }); + + Map> shards = collMap.get(name); + Map slices = new HashMap<>(); + shards.forEach((s, replicas) -> { + Map sliceProps = sliceProperties.computeIfAbsent(name, c -> new ConcurrentHashMap<>()).computeIfAbsent(s, sl -> new ConcurrentHashMap<>()); + Slice slice = new Slice(s, replicas, sliceProps, name); + slices.put(s, slice); + }); + Map collProps = collProperties.computeIfAbsent(name, c -> new ConcurrentHashMap<>()); + Map routerProp = (Map) collProps.getOrDefault(DocCollection.DOC_ROUTER, Collections.singletonMap("name", DocRouter.DEFAULT_NAME)); + DocRouter router = DocRouter.getDocRouter((String)routerProp.getOrDefault("name", DocRouter.DEFAULT_NAME)); + String path = ZkStateReader.getCollectionPath(name); + coll = new DocCollection(name, slices, collProps, router, zkVersion + 1); + try { + SimDistribStateManager stateManager = cloudManager.getSimDistribStateManager(); + byte[] data = Utils.toJSON(Collections.singletonMap(name, coll)); + if (!stateManager.hasData(path)) { + try { + stateManager.makePath(path, data, CreateMode.PERSISTENT, true); + } catch (AlreadyExistsException e) { + // try updating + stateManager.setData(path, data, zkVersion); + } + } else { + stateManager.setData(path, data, zkVersion); + } + // verify version + VersionedData vd = stateManager.getData(path); + assert vd.getVersion() == zkVersion + 1; + zkVersion++; + } catch (KeeperException | BadVersionException e) { + // should never happen? + throw new RuntimeException("error saving " + coll, e); + } + } + } finally { + lock.unlock(); + } + return coll; + } + + public int getZkVersion() { + lock.lock(); + try { + return zkVersion; + } finally { + lock.unlock(); + } + } + + public void invalidate() { + lock.lock(); + try { + coll = null; + } finally { + lock.unlock(); + } + } + } + + private final Map collectionsStatesRef = new ConcurrentHashMap<>(); private final Random bulkUpdateRandom = new Random(0); @@ -199,6 +322,7 @@ public void copyFrom(ClusterStateProvider other) throws Exception { * Initialize from an existing cluster state * @param initialState initial cluster state */ + @SuppressWarnings({"unchecked"}) public void simSetClusterState(ClusterState initialState) throws Exception { lock.lockInterruptibly(); try { @@ -207,6 +331,7 @@ public void simSetClusterState(ClusterState initialState) throws Exception { sliceProperties.clear(); nodeReplicaMap.clear(); liveNodes.clear(); + collectionsStatesRef.clear(); for (String nodeId : stateManager.listData(ZkStateReader.LIVE_NODES_ZKNODE)) { if (stateManager.hasData(ZkStateReader.LIVE_NODES_ZKNODE + "/" + nodeId)) { stateManager.removeData(ZkStateReader.LIVE_NODES_ZKNODE + "/" + nodeId, -1); @@ -223,6 +348,8 @@ public void simSetClusterState(ClusterState initialState) throws Exception { createEphemeralLiveNode(nodeId); } initialState.forEachCollection(dc -> { + // DocCollection will be created later + collectionsStatesRef.put(dc.getName(), new CachedCollectionRef(dc.getName(), dc.getZNodeVersion())); collProperties.computeIfAbsent(dc.getName(), name -> new ConcurrentHashMap<>()).putAll(dc.getProperties()); opDelays.computeIfAbsent(dc.getName(), Utils.NEW_HASHMAP_FUN).putAll(defaultOpDelays); dc.getSlices().forEach(s -> { @@ -248,7 +375,6 @@ public void simSetClusterState(ClusterState initialState) throws Exception { }); }); }); - collectionsStatesRef.set(null); } finally { lock.unlock(); } @@ -287,9 +413,8 @@ public String simGetRandomNode(Random random) { return nodes.get(random.nextInt(nodes.size())); } - // todo: maybe hook up DistribStateManager /clusterstate.json watchers? - private ReplicaInfo getReplicaInfo(Replica r) { + @SuppressWarnings({"unchecked"}) final List list = nodeReplicaMap.computeIfAbsent (r.getNodeName(), Utils.NEW_SYNCHRONIZED_ARRAYLIST_FUN); synchronized (list) { @@ -306,6 +431,7 @@ private ReplicaInfo getReplicaInfo(Replica r) { * Add a new node to the cluster. * @param nodeId unique node id */ + @SuppressWarnings({"unchecked"}) public void simAddNode(String nodeId) throws Exception { ensureNotClosed(); if (liveNodes.contains(nodeId)) { @@ -331,8 +457,8 @@ public boolean simRemoveNode(String nodeId) throws Exception { // mark every replica on that node as down boolean res = liveNodes.remove(nodeId); setReplicaStates(nodeId, Replica.State.DOWN, collections); - if (!collections.isEmpty()) { - collectionsStatesRef.set(null); + for (String collection : collections) { + collectionsStatesRef.get(collection).invalidate();; } // remove ephemeral nodes stateManager.getRoot().removeEphemeralChildren(nodeId); @@ -363,7 +489,6 @@ public void simRemoveDeadNodes() throws Exception { try { Set myNodes = new HashSet<>(nodeReplicaMap.keySet()); myNodes.removeAll(liveNodes.get()); - collectionsStatesRef.set(null); } finally { lock.unlock(); } @@ -404,6 +529,7 @@ public synchronized String simGetOverseerLeader() { // this method needs to be called under a lock private void setReplicaStates(String nodeId, Replica.State state, Set changedCollections) { + @SuppressWarnings({"unchecked"}) List replicas = nodeReplicaMap.computeIfAbsent(nodeId, Utils.NEW_SYNCHRONIZED_ARRAYLIST_FUN); synchronized (replicas) { replicas.forEach(r -> { @@ -452,7 +578,7 @@ public boolean simRestoreNode(String nodeId) throws Exception { try { setReplicaStates(nodeId, Replica.State.ACTIVE, collections); if (!collections.isEmpty()) { - collectionsStatesRef.set(null); + collections.forEach(c -> collectionsStatesRef.get(c).invalidate()); simRunLeaderElection(collections, true); return true; } else { @@ -469,6 +595,7 @@ public boolean simRestoreNode(String nodeId) throws Exception { * @param message replica details * @param results result of the operation */ + @SuppressWarnings({"unchecked", "rawtypes"}) public void simAddReplica(ZkNodeProps message, NamedList results) throws Exception { if (message.getStr(CommonAdminParams.ASYNC) != null) { results.add(CoreAdminParams.REQUESTID, message.getStr(CommonAdminParams.ASYNC)); @@ -526,6 +653,7 @@ public void simAddReplica(ZkNodeProps message, NamedList results) throws Excepti * @param replicaInfo replica info * @param runLeaderElection if true then run a leader election after adding the replica. */ + @SuppressWarnings({"unchecked"}) public void simAddReplica(String nodeId, ReplicaInfo replicaInfo, boolean runLeaderElection) throws Exception { ensureNotClosed(); lock.lockInterruptibly(); @@ -604,7 +732,7 @@ public void simAddReplica(String nodeId, ReplicaInfo replicaInfo, boolean runLea cloudManager.getMetricManager().registerGauge(null, registry, () -> replicaSize, "", true, Type.CORE_IDX.metricsAttribute); // at this point nuke our cached DocCollection state - collectionsStatesRef.set(null); + collectionsStatesRef.get(replicaInfo.getCollection()).invalidate(); log.trace("-- simAddReplica {}", replicaInfo); if (runLeaderElection) { simRunLeaderElection(replicaInfo.getCollection(), replicaInfo.getShard(), true); @@ -624,6 +752,7 @@ public void simRemoveReplica(String nodeId, String collection, String coreNodeNa lock.lockInterruptibly(); try { + @SuppressWarnings({"unchecked"}) final List replicas = nodeReplicaMap.computeIfAbsent (nodeId, Utils.NEW_SYNCHRONIZED_ARRAYLIST_FUN); synchronized (replicas) { @@ -633,7 +762,7 @@ public void simRemoveReplica(String nodeId, String collection, String coreNodeNa colShardReplicaMap.computeIfAbsent(ri.getCollection(), c -> new ConcurrentHashMap<>()) .computeIfAbsent(ri.getShard(), s -> new ArrayList<>()) .remove(ri); - collectionsStatesRef.set(null); + collectionsStatesRef.get(ri.getCollection()).invalidate(); opDelay(ri.getCollection(), CollectionParams.CollectionAction.DELETEREPLICA.name()); @@ -668,26 +797,6 @@ public void simRemoveReplica(String nodeId, String collection, String coreNodeNa } } - /** - * Save clusterstate.json to {@link DistribStateManager}. - * @return saved state - */ - private ClusterState saveClusterState(ClusterState state) throws IOException { - ensureNotClosed(); - byte[] data = Utils.toJSON(state); - try { - VersionedData oldData = stateManager.getData(ZkStateReader.CLUSTER_STATE); - int version = oldData != null ? oldData.getVersion() : 0; - assert clusterStateVersion == version : "local clusterStateVersion out of sync"; - stateManager.setData(ZkStateReader.CLUSTER_STATE, data, version); - log.debug("** saved cluster state version {}", version); - clusterStateVersion++; - } catch (Exception e) { - throw new IOException(e); - } - return state; - } - /** * Delay an operation by a configured amount. * @param collection collection name @@ -725,7 +834,7 @@ private void simRunLeaderElection(Collection collections, boolean saveCl if (saveClusterState) { lock.lockInterruptibly(); try { - collectionsStatesRef.set(null); + collections.forEach(c -> collectionsStatesRef.get(c).invalidate()); } finally { lock.unlock(); } @@ -865,13 +974,13 @@ private void simRunLeaderElection(final String collection, final String slice, } if (log.isDebugEnabled()) { log.debug("-- elected new leader for {} / {} (currentVersion={}): {}", collection, - s.getName(), clusterStateVersion, ri); + s.getName(), col.getZNodeVersion(), ri); } stateChanged.set(true); } } finally { if (stateChanged.get() || saveState) { - collectionsStatesRef.set(null); + collectionsStatesRef.get(collection).invalidate(); } lock.unlock(); } @@ -882,6 +991,8 @@ private void simRunLeaderElection(final String collection, final String slice, * @param props collection details * @param results results of the operation. */ + + @SuppressWarnings({"unchecked", "rawtypes"}) public void simCreateCollection(ZkNodeProps props, NamedList results) throws Exception { ensureNotClosed(); if (props.getStr(CommonAdminParams.ASYNC) != null) { @@ -889,7 +1000,7 @@ public void simCreateCollection(ZkNodeProps props, NamedList results) throws Exc } boolean waitForFinalState = props.getBool(CommonAdminParams.WAIT_FOR_FINAL_STATE, false); final String collectionName = props.getStr(NAME); - log.debug("-- simCreateCollection {}, currentVersion={}", collectionName, clusterStateVersion); + log.debug("-- simCreateCollection {}", collectionName); String router = props.getStr("router.name", DocRouter.DEFAULT_NAME); String policy = props.getStr(Policy.POLICY); @@ -903,12 +1014,6 @@ public void simCreateCollection(ZkNodeProps props, NamedList results) throws Exc CreateCollectionCmd.checkReplicaTypes(props); // always force getting fresh state - lock.lockInterruptibly(); - try { - collectionsStatesRef.set(null); - } finally { - lock.unlock(); - } final ClusterState clusterState = getClusterState(); String withCollection = props.getStr(CollectionAdminParams.WITH_COLLECTION); @@ -962,8 +1067,7 @@ public void simCreateCollection(ZkNodeProps props, NamedList results) throws Exc CollectionAdminParams.COLOCATED_WITH, collectionName); cmd = new CollectionMutator(cloudManager).modifyCollection(clusterState,message); } - // force recreation of collection states - collectionsStatesRef.set(null); + collectionsStatesRef.put(collectionName, new CachedCollectionRef(collectionName, 0)); } finally { lock.unlock(); @@ -1043,7 +1147,7 @@ public void simCreateCollection(ZkNodeProps props, NamedList results) throws Exc // force recreation of collection states lock.lockInterruptibly(); try { - collectionsStatesRef.set(null); + collectionsStatesRef.get(collectionName).invalidate(); } finally { lock.unlock(); } @@ -1057,7 +1161,7 @@ public void simCreateCollection(ZkNodeProps props, NamedList results) throws Exc } } results.add("success", ""); - log.debug("-- finished createCollection {}, currentVersion={}", collectionName, clusterStateVersion); + log.debug("-- finished createCollection {}", collectionName); } /** @@ -1066,6 +1170,7 @@ public void simCreateCollection(ZkNodeProps props, NamedList results) throws Exc * @param async async id * @param results results of the operation */ + @SuppressWarnings({"unchecked", "rawtypes"}) public void simDeleteCollection(String collection, String async, NamedList results) throws Exception { ensureNotClosed(); if (async != null) { @@ -1106,7 +1211,8 @@ public void simDeleteCollection(String collection, String async, NamedList resul } } }); - collectionsStatesRef.set(null); + cloudManager.getDistribStateManager().removeRecursively(ZkStateReader.getCollectionPath(collection), true, true); + collectionsStatesRef.remove(collection); results.add("success", ""); } catch (Exception e) { log.warn("Exception", e); @@ -1121,7 +1227,13 @@ public void simDeleteCollection(String collection, String async, NamedList resul public void simDeleteAllCollections() throws Exception { lock.lockInterruptibly(); try { - collectionsStatesRef.set(null); + collectionsStatesRef.keySet().forEach(name -> { + try { + cloudManager.getDistribStateManager().removeRecursively(ZkStateReader.getCollectionPath(name), true, true); + } catch (Exception e) { + log.error("Unable to delete collection state.json"); + } + }); collProperties.clear(); sliceProperties.clear(); @@ -1153,6 +1265,7 @@ public void simDeleteAllCollections() throws Exception { * @param message operation details * @param results operation results. */ + @SuppressWarnings({"unchecked", "rawtypes"}) public void simMoveReplica(ZkNodeProps message, NamedList results) throws Exception { ensureNotClosed(); if (message.getStr(CommonAdminParams.ASYNC) != null) { @@ -1219,6 +1332,7 @@ public void simMoveReplica(ZkNodeProps message, NamedList results) throws Except * @param message operation details * @param results operation results */ + @SuppressWarnings({"unchecked", "rawtypes"}) public void simCreateShard(ZkNodeProps message, NamedList results) throws Exception { ensureNotClosed(); if (message.getStr(CommonAdminParams.ASYNC) != null) { @@ -1285,6 +1399,7 @@ public void simCreateShard(ZkNodeProps message, NamedList results) throws Except * @param message operation details * @param results operation results. */ + @SuppressWarnings({"unchecked", "rawtypes"}) public void simSplitShard(ZkNodeProps message, NamedList results) throws Exception { ensureNotClosed(); if (message.getStr(CommonAdminParams.ASYNC) != null) { @@ -1468,7 +1583,7 @@ public void simSplitShard(ZkNodeProps message, NamedList results) throws Excepti } // invalidate cached state - collectionsStatesRef.set(null); + collectionsStatesRef.get(collectionName).invalidate(); } finally { SplitShardCmd.unlockForSplit(cloudManager, collectionName, sliceName.get()); lock.unlock(); @@ -1482,6 +1597,7 @@ public void simSplitShard(ZkNodeProps message, NamedList results) throws Excepti * @param message operation details * @param results operation results */ + @SuppressWarnings({"unchecked", "rawtypes"}) public void simDeleteShard(ZkNodeProps message, NamedList results) throws Exception { ensureNotClosed(); if (message.getStr(CommonAdminParams.ASYNC) != null) { @@ -1516,7 +1632,7 @@ public void simDeleteShard(ZkNodeProps message, NamedList results) throws Except } } }); - collectionsStatesRef.set(null); + collectionsStatesRef.get(collectionName).invalidate(); results.add("success", ""); } catch (Exception e) { results.add("failure", e.toString()); @@ -1525,6 +1641,7 @@ public void simDeleteShard(ZkNodeProps message, NamedList results) throws Except } } + @SuppressWarnings({"rawtypes"}) public void createSystemCollection() throws IOException { try { @@ -1939,6 +2056,7 @@ private static String createRegistryName(String collection, String shard, Replic * Saves cluster properties to clusterprops.json. * @return current properties */ + @SuppressWarnings({"unchecked"}) private synchronized Map saveClusterProperties() throws Exception { if (lastSavedProperties != null && lastSavedProperties.equals(clusterProperties)) { return lastSavedProperties; @@ -2004,7 +2122,7 @@ public void simSetCollectionProperties(String coll, Map properti props.clear(); props.putAll(properties); } - collectionsStatesRef.set(null); + collectionsStatesRef.get(coll).invalidate(); } finally { lock.unlock(); } @@ -2025,7 +2143,7 @@ public void simSetCollectionProperty(String coll, String key, String value) thro } else { props.put(key, value); } - collectionsStatesRef.set(null); + collectionsStatesRef.get(coll).invalidate(); } finally { lock.unlock(); } @@ -2046,7 +2164,7 @@ public void simSetSliceProperties(String coll, String slice, Map if (properties != null) { sliceProps.putAll(properties); } - collectionsStatesRef.set(null); + collectionsStatesRef.get(coll).invalidate(); } finally { lock.unlock(); } @@ -2175,6 +2293,7 @@ public void simSetShardValue(String collection, String shard, String key, Object } } + @SuppressWarnings({"unchecked"}) public void simSetReplicaValues(String node, Map>> source, boolean overwrite) { List infos = nodeReplicaMap.get(node); if (infos == null) { @@ -2206,6 +2325,7 @@ public void simSetReplicaValues(String node, Map simGetReplicaInfos(String node) { + @SuppressWarnings({"unchecked"}) final List replicas = nodeReplicaMap.computeIfAbsent (node, Utils.NEW_SYNCHRONIZED_ARRAYLIST_FUN); // make a defensive copy to avoid ConcurrentModificationException @@ -2247,7 +2367,6 @@ public Map> simGetCollectionStats() throws IOExcepti lock.lockInterruptibly(); try { final Map> stats = new TreeMap<>(); - collectionsStatesRef.set(null); ClusterState state = getClusterState(); state.forEachCollection(coll -> { Map perColl = new LinkedHashMap<>(); @@ -2286,7 +2405,9 @@ public Map> simGetCollectionStats() throws IOExcepti } continue; } - AtomicLong buffered = (AtomicLong)sliceProperties.get(coll.getName()).get(s.getName()).get(BUFFERED_UPDATES); + AtomicLong buffered = (AtomicLong)sliceProperties + .getOrDefault(coll.getName(), Collections.emptyMap()) + .getOrDefault(s.getName(), Collections.emptyMap()).get(BUFFERED_UPDATES); if (buffered != null) { bufferedDocs += buffered.get(); } @@ -2389,7 +2510,7 @@ public ClusterState getClusterState() throws IOException { lock.lockInterruptibly(); try { Map states = getCollectionStates(); - ClusterState state = new ClusterState(clusterStateVersion, liveNodes.get(), states); + ClusterState state = new ClusterState(liveNodes.get(), states); return state; } finally { lock.unlock(); @@ -2399,65 +2520,18 @@ public ClusterState getClusterState() throws IOException { } } - // this method uses a simple cache in collectionsStatesRef. Operations that modify - // cluster state should always reset this cache so that the changes become visible private Map getCollectionStates() throws IOException, InterruptedException { lock.lockInterruptibly(); try { - Map collectionStates = collectionsStatesRef.get(); - if (collectionStates != null) { - return collectionStates; - } - collectionsStatesRef.set(null); - log.debug("** creating new collection states, currentVersion={}", clusterStateVersion); - Map>> collMap = new HashMap<>(); - nodeReplicaMap.forEach((n, replicas) -> { - synchronized (replicas) { - replicas.forEach(ri -> { - Map props; - synchronized (ri) { - props = new HashMap<>(ri.getVariables()); - } - props.put(ZkStateReader.NODE_NAME_PROP, n); - props.put(ZkStateReader.CORE_NAME_PROP, ri.getCore()); - props.put(ZkStateReader.REPLICA_TYPE, ri.getType().toString()); - props.put(ZkStateReader.STATE_PROP, ri.getState().toString()); - Replica r = new Replica(ri.getName(), props, ri.getCollection(), ri.getShard()); - collMap.computeIfAbsent(ri.getCollection(), c -> new HashMap<>()) - .computeIfAbsent(ri.getShard(), s -> new HashMap<>()) - .put(ri.getName(), r); - }); - } - }); - - // add empty slices - sliceProperties.forEach((c, perSliceProps) -> { - perSliceProps.forEach((slice, props) -> { - collMap.computeIfAbsent(c, co -> new ConcurrentHashMap<>()).computeIfAbsent(slice, s -> new ConcurrentHashMap<>()); - }); - }); - // add empty collections - collProperties.keySet().forEach(c -> { - collMap.computeIfAbsent(c, co -> new ConcurrentHashMap<>()); - }); - - Map res = new HashMap<>(); - collMap.forEach((coll, shards) -> { - Map slices = new HashMap<>(); - shards.forEach((s, replicas) -> { - Map sliceProps = sliceProperties.computeIfAbsent(coll, c -> new ConcurrentHashMap<>()).computeIfAbsent(s, sl -> new ConcurrentHashMap<>()); - Slice slice = new Slice(s, replicas, sliceProps, coll); - slices.put(s, slice); - }); - Map collProps = collProperties.computeIfAbsent(coll, c -> new ConcurrentHashMap<>()); - Map routerProp = (Map) collProps.getOrDefault(DocCollection.DOC_ROUTER, Collections.singletonMap("name", DocRouter.DEFAULT_NAME)); - DocRouter router = DocRouter.getDocRouter((String)routerProp.getOrDefault("name", DocRouter.DEFAULT_NAME)); - DocCollection dc = new DocCollection(coll, slices, collProps, router, clusterStateVersion, ZkStateReader.CLUSTER_STATE); - res.put(coll, dc); + Map collectionStates = new HashMap<>(); + collectionsStatesRef.forEach((name, cached) -> { + try { + collectionStates.put(name, cached.getColl()); + } catch (Exception e) { + throw new RuntimeException("error building collection " + name + " state", e); + } }); - saveClusterState(new ClusterState(clusterStateVersion, liveNodes.get(), res)); - collectionsStatesRef.set(res); - return res; + return collectionStates; } finally { lock.unlock(); } diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java index bc845e457682..ea9fa550f594 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java @@ -619,6 +619,7 @@ public List multi(Iterable ops) throws BadVersionException, NoSuch } @Override + @SuppressWarnings({"unchecked"}) public AutoScalingConfig getAutoScalingConfig(Watcher watcher) throws InterruptedException, IOException { Map map = new HashMap<>(); int version = 0; diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java index 318f63ecfc55..fb17881c00f0 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java @@ -184,6 +184,7 @@ public byte[] take() throws Exception { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void offer(byte[] data) throws Exception { Timer.Context time = stats.time(dir + "_offer"); updateLock.lockInterruptibly(); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java index 2a8103cdfbe0..17b6d28efcf2 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java @@ -152,6 +152,7 @@ public void simSetNodeValue(String node, String key, Object value) throws Interr * @param key property name * @param value property value. */ + @SuppressWarnings({"unchecked"}) public void simAddNodeValue(String node, String key, Object value) throws InterruptedException { lock.lockInterruptibly(); try { @@ -341,7 +342,9 @@ public Map>> getReplicaInfo(String node, C Map>> res = new HashMap<>(); // TODO: probably needs special treatment for "metrics:solr.core..." tags for (ReplicaInfo r : replicas) { + @SuppressWarnings({"unchecked"}) Map> perCollection = res.computeIfAbsent(r.getCollection(), Utils.NEW_HASHMAP_FUN); + @SuppressWarnings({"unchecked"}) List perShard = perCollection.computeIfAbsent(r.getShard(), Utils.NEW_ARRAYLIST_FUN); // XXX filter out some properties? perShard.add(r); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java index cb7a8bf79254..6adb812ae17b 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java @@ -130,6 +130,7 @@ public void init(SolrParams params) { * {@link #execute(SimScenario)}. * @param scenario current scenario */ + @SuppressWarnings({"unchecked"}) public void prepareCurrentParams(SimScenario scenario) { Properties props = new Properties(); scenario.context.forEach((k, v) -> { @@ -405,8 +406,9 @@ public void execute(SimScenario scenario) throws Exception { throw new IOException(SimAction.SAVE_SNAPSHOT + " must specify 'path'"); } boolean redact = Boolean.parseBoolean(params.get("redact", "false")); - SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(scenario.cluster, null); - snapshotCloudManager.saveSnapshot(new File(path), true, redact); + try (SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(scenario.cluster, null)) { + snapshotCloudManager.saveSnapshot(new File(path), true, redact); + } } } @@ -415,6 +417,7 @@ public void execute(SimScenario scenario) throws Exception { */ public static class LoadAutoscaling extends SimOp { @Override + @SuppressWarnings({"unchecked"}) public void execute(SimScenario scenario) throws Exception { Map map; boolean addDefaults = Boolean.parseBoolean(params.get("withDefaultTriggers", "true")); @@ -539,9 +542,11 @@ public void execute(SimScenario scenario) throws Exception { public static class ApplySuggestions extends SimOp { @Override public void execute(SimScenario scenario) throws Exception { + @SuppressWarnings({"unchecked"}) List suggestions = (List) scenario.context.getOrDefault(SUGGESTIONS_CTX_PROP, Collections.emptyList()); int unresolvedCount = 0; for (Suggester.SuggestionInfo suggestion : suggestions) { + @SuppressWarnings({"rawtypes"}) SolrRequest operation = suggestion.getOperation(); if (operation == null) { unresolvedCount++; @@ -595,6 +600,7 @@ public void execute(SimScenario scenario) throws Exception { req.setContentWriter(new RequestWriter.StringPayloadContentWriter(streamBody, "application/json")); } SolrResponse rsp = scenario.cluster.request(req); + @SuppressWarnings({"unchecked"}) List responses = (List) scenario.context.computeIfAbsent(RESPONSES_CTX_PROP, Utils.NEW_ARRAYLIST_FUN); responses.add(rsp); } @@ -704,6 +710,7 @@ public void wait(int waitSec) throws Exception { /** * Set a temporary listener to wait for a specific trigger event processing. */ + @SuppressWarnings({"unchecked"}) public static class SetEventListener extends SimOp { @Override public void execute(SimScenario scenario) throws Exception { @@ -738,10 +745,10 @@ public void execute(SimScenario scenario) throws Exception { } } final AutoScalingConfig.TriggerListenerConfig listenerConfig = new AutoScalingConfig.TriggerListenerConfig(name, cfgMap); - TriggerListener listener = new SimWaitListener(scenario.cluster.getTimeSource(), listenerConfig); if (scenario.context.containsKey("_sim_waitListener_" + trigger)) { throw new IOException("currently only one listener can be set per trigger. Trigger name: " + trigger); } + TriggerListener listener = new SimWaitListener(scenario.cluster.getTimeSource(), listenerConfig); scenario.context.put("_sim_waitListener_" + trigger, listener); scenario.cluster.getOverseerTriggerThread().getScheduledTriggers().addAdditionalListener(listener); } @@ -763,6 +770,7 @@ public void execute(SimScenario scenario) throws Exception { listener.wait(waitSec); scenario.context.remove(TRIGGER_EVENT_PREFIX + trigger); if (listener.getEvent() != null) { + @SuppressWarnings({"unchecked"}) Map ev = listener.getEvent().toMap(new LinkedHashMap<>()); scenario.context.put(TRIGGER_EVENT_PREFIX + trigger, ev); } @@ -940,6 +948,7 @@ public void execute(SimScenario scenario) throws Exception { */ public static class Dump extends SimOp { @Override + @SuppressWarnings({"unchecked"}) public void execute(SimScenario scenario) throws Exception { boolean redact = Boolean.parseBoolean(params.get("redact", "false")); boolean withData = Boolean.parseBoolean(params.get("withData", "false")); @@ -977,6 +986,7 @@ public void execute(SimScenario scenario) throws Exception { RedactionUtils.RedactionContext ctx = SimUtils.getRedactionContext(snapshotCloudManager.getClusterStateProvider().getClusterState()); data = RedactionUtils.redactNames(ctx.getRedactions(), data); } + snapshotCloudManager.close(); scenario.console.println(data); } } @@ -988,6 +998,7 @@ public void execute(SimScenario scenario) throws Exception { * @throws Exception on syntax errors */ public static SimScenario load(String data) throws Exception { + @SuppressWarnings("resource") SimScenario scenario = new SimScenario(); String[] lines = data.split("\\r?\\n"); for (int i = 0; i < lines.length; i++) { @@ -1109,7 +1120,7 @@ public void run() throws Exception { } @Override - public void close() throws Exception { + public void close() throws IOException { if (cluster != null) { cluster.close(); cluster = null; diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java index 6e0420089e5c..03c1f5b47526 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java @@ -349,6 +349,7 @@ public static Map calculateStats(SolrCloudManager cloudManager, * @param req request * @return request payload and parameters converted to V1 params */ + @SuppressWarnings({"unchecked"}) public static ModifiableSolrParams v2AdminRequestToV1Params(V2Request req) { Map reqMap = new HashMap<>(); req.toMap(reqMap); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java index 8ea286f4303f..9dc0b4a85c8e 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java @@ -90,6 +90,7 @@ public SnapshotCloudManager(SolrCloudManager other, AutoScalingConfig config) th SimUtils.checkConsistency(this, config); } + @SuppressWarnings({"unchecked"}) public SnapshotCloudManager(Map snapshot) throws Exception { Objects.requireNonNull(snapshot); init( @@ -120,6 +121,7 @@ public void saveSnapshot(File targetDir, boolean withAutoscaling, boolean redact } } + @SuppressWarnings({"unchecked"}) public static SnapshotCloudManager readSnapshot(File sourceDir) throws Exception { if (!sourceDir.exists()) { throw new Exception("Source path doesn't exist: " + sourceDir); @@ -241,6 +243,7 @@ public TimeSource getTimeSource() { } @Override + @SuppressWarnings({"rawtypes"}) public SolrResponse request(SolrRequest req) throws IOException { throw new UnsupportedOperationException("request"); } diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java index 3655fb382020..af385c166074 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java @@ -21,6 +21,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -29,7 +30,6 @@ import org.apache.solr.client.solrj.impl.ClusterStateProvider; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.Utils; import org.noggit.CharArr; import org.noggit.JSONWriter; @@ -46,17 +46,35 @@ public class SnapshotClusterStateProvider implements ClusterStateProvider { public SnapshotClusterStateProvider(ClusterStateProvider other) throws Exception { liveNodes = Set.copyOf(other.getLiveNodes()); ClusterState otherState = other.getClusterState(); - clusterState = new ClusterState(otherState.getZNodeVersion(), liveNodes, otherState.getCollectionsMap()); + clusterState = new ClusterState(liveNodes, otherState.getCollectionsMap()); clusterProperties = new HashMap<>(other.getClusterProperties()); } + @SuppressWarnings({"unchecked"}) public SnapshotClusterStateProvider(Map snapshot) { Objects.requireNonNull(snapshot); liveNodes = Set.copyOf((Collection)snapshot.getOrDefault("liveNodes", Collections.emptySet())); clusterProperties = (Map)snapshot.getOrDefault("clusterProperties", Collections.emptyMap()); Map stateMap = new HashMap<>((Map)snapshot.getOrDefault("clusterState", Collections.emptyMap())); - Number version = (Number)stateMap.remove("version"); - clusterState = ClusterState.load(version != null ? version.intValue() : null, stateMap, liveNodes, ZkStateReader.CLUSTER_STATE); + Map collectionStates = new HashMap<>(); + // back-compat with format = 1 + Integer stateVersion = Integer.valueOf(String.valueOf(stateMap.getOrDefault("version", 0))); + stateMap.remove("version"); + stateMap.forEach((name, state) -> { + Map mutableState = (Map)state; + Map collMap = (Map) mutableState.get(name); + if (collMap == null) { + // snapshot in format 1 + collMap = mutableState; + mutableState = Collections.singletonMap(name, state); + } + int version = Integer.parseInt(String.valueOf(collMap.getOrDefault("zNodeVersion", stateVersion))); + collMap.remove("zNodeVersion"); + byte[] data = Utils.toJSON(mutableState); + ClusterState collState = ClusterState.createFromJson(version, data, Collections.emptySet()); + collectionStates.put(name, collState.getCollection(name)); + }); + clusterState = new ClusterState(liveNodes, collectionStates); } public Map getSnapshot() { @@ -67,14 +85,18 @@ public Map getSnapshot() { } Map stateMap = new HashMap<>(); snapshot.put("clusterState", stateMap); - stateMap.put("version", clusterState.getZNodeVersion()); clusterState.forEachCollection(coll -> { CharArr out = new CharArr(); JSONWriter writer = new JSONWriter(out, 2); coll.write(writer); String json = out.toString(); try { - stateMap.put(coll.getName(), Utils.fromJSON(json.getBytes("UTF-8"))); + @SuppressWarnings({"unchecked"}) + Map collMap = new LinkedHashMap<>((Map)Utils.fromJSON(json.getBytes("UTF-8"))); + collMap.put("zNodeVersion", coll.getZNodeVersion()); + // format compatible with the real /state.json, which uses a mini-ClusterState + // consisting of a single collection + stateMap.put(coll.getName(), Collections.singletonMap(coll.getName(), collMap)); } catch (UnsupportedEncodingException e) { throw new RuntimeException("should not happen!", e); } diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java index fc6bd2d927d5..eb3a29fb399e 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java @@ -89,6 +89,7 @@ public SnapshotDistribStateManager(Map snapshot) { */ public SnapshotDistribStateManager(Map snapshot, AutoScalingConfig config) { snapshot.forEach((path, value) -> { + @SuppressWarnings({"unchecked"}) Map map = (Map)value; Number version = (Number)map.getOrDefault("version", 0); String owner = (String)map.get("owner"); @@ -209,6 +210,7 @@ public List multi(Iterable ops) throws BadVersionException, NoSuch } @Override + @SuppressWarnings({"unchecked"}) public AutoScalingConfig getAutoScalingConfig(Watcher watcher) throws InterruptedException, IOException { VersionedData vd = dataMap.get(ZkStateReader.SOLR_AUTOSCALING_CONF_PATH); Map map = new HashMap<>(); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java index 8ccf8491bf55..e8b78280881f 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java @@ -48,6 +48,7 @@ public class SnapshotNodeStateProvider implements NodeStateProvider { * @param config optional {@link AutoScalingConfig}, which will be used to determine what node and * replica tags to retrieve. If this is null then the other instance's config will be used. */ + @SuppressWarnings({"unchecked"}) public SnapshotNodeStateProvider(SolrCloudManager other, AutoScalingConfig config) throws Exception { if (config == null) { config = other.getDistribStateManager().getAutoScalingConfig(); @@ -96,6 +97,7 @@ public SnapshotNodeStateProvider(SolrCloudManager other, AutoScalingConfig confi * Populate this instance from a previously generated snapshot. * @param snapshot previous snapshot created using this class. */ + @SuppressWarnings({"unchecked"}) public SnapshotNodeStateProvider(Map snapshot) { Objects.requireNonNull(snapshot); nodeValues = (Map>)snapshot.getOrDefault("nodeValues", Collections.emptyMap()); @@ -130,6 +132,7 @@ public SnapshotNodeStateProvider(Map snapshot) { * Create a snapshot of all node and replica tag values available from the original source, per the original * autoscaling configuration. Note: */ + @SuppressWarnings({"unchecked"}) public Map getSnapshot() { Map snapshot = new LinkedHashMap<>(); snapshot.put("nodeValues", nodeValues); diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java index 397960f99946..8c69caf2ee90 100644 --- a/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java +++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java @@ -52,6 +52,7 @@ public ClusterStateMutator(SolrCloudManager dataProvider) { this.stateManager = dataProvider.getDistribStateManager(); } + @SuppressWarnings({"unchecked"}) public ZkWriteCommand createCollection(ClusterState clusterState, ZkNodeProps message) { String cName = message.getStr(NAME); log.debug("building a new cName: {}", cName); @@ -108,12 +109,7 @@ public ZkWriteCommand createCollection(ClusterState clusterState, ZkNodeProps me collectionProps.put("autoCreated", "true"); } - //TODO default to 2; but need to debug why BasicDistributedZk2Test fails early on - String znode = message.getInt(DocCollection.STATE_FORMAT, 1) == 1 ? null - : ZkStateReader.getCollectionPath(cName); - - DocCollection newCollection = new DocCollection(cName, - slices, collectionProps, router, -1, znode); + DocCollection newCollection = new DocCollection(cName, slices, collectionProps, router, -1); return new ZkWriteCommand(cName, newCollection); } @@ -189,16 +185,5 @@ public static String getAssignedCoreNodeName(DocCollection collection, String fo } return null; } - - public ZkWriteCommand migrateStateFormat(ClusterState clusterState, ZkNodeProps message) { - final String collection = message.getStr(ZkStateReader.COLLECTION_PROP); - if (!CollectionMutator.checkKeyExistence(message, ZkStateReader.COLLECTION_PROP)) return ZkStateWriter.NO_OP; - DocCollection coll = clusterState.getCollectionOrNull(collection); - if (coll == null || coll.getStateFormat() == 2) return ZkStateWriter.NO_OP; - - return new ZkWriteCommand(coll.getName(), - new DocCollection(coll.getName(), coll.getSlicesMap(), coll.getProperties(), coll.getRouter(), 0, - ZkStateReader.getCollectionPath(collection))); - } } diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/CollectionMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/CollectionMutator.java index 1c2be1b65186..b64ca49ac9e9 100644 --- a/solr/core/src/java/org/apache/solr/cloud/overseer/CollectionMutator.java +++ b/solr/core/src/java/org/apache/solr/cloud/overseer/CollectionMutator.java @@ -59,6 +59,7 @@ public ZkWriteCommand createShard(final ClusterState clusterState, ZkNodeProps m DocCollection collection = clusterState.getCollection(collectionName); Slice slice = collection.getSlice(shardId); if (slice == null) { + @SuppressWarnings({"unchecked"}) Map replicas = Collections.EMPTY_MAP; Map sliceProps = new HashMap<>(); String shardRange = message.getStr(ZkStateReader.SHARD_RANGE_PROP); @@ -136,7 +137,7 @@ public ZkWriteCommand modifyCollection(final ClusterState clusterState, ZkNodePr } return new ZkWriteCommand(coll.getName(), - new DocCollection(coll.getName(), coll.getSlicesMap(), m, coll.getRouter(), coll.getZNodeVersion(), coll.getZNode())); + new DocCollection(coll.getName(), coll.getSlicesMap(), m, coll.getRouter(), coll.getZNodeVersion())); } public static DocCollection updateSlice(String collectionName, DocCollection collection, Slice slice) { diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java index 7891cc1808f1..769be538d49d 100644 --- a/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java +++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java @@ -203,12 +203,26 @@ public ZkWriteCommand deleteReplicaProperty(ClusterState clusterState, ZkNodePro return new ZkWriteCommand(collectionName, newCollection); } + /** + * Handles state updates + */ public ZkWriteCommand setState(ClusterState clusterState, ZkNodeProps message) { - if (Overseer.isLegacy(cloudManager.getClusterStateProvider())) { - return updateState(clusterState, message); - } else { - return updateStateNew(clusterState, message); + String collectionName = message.getStr(ZkStateReader.COLLECTION_PROP); + if (!checkCollectionKeyExistence(message)) return ZkStateWriter.NO_OP; + String sliceName = message.getStr(ZkStateReader.SHARD_ID_PROP); + + if (collectionName == null || sliceName == null) { + log.error("Invalid collection and slice {}", message); + return ZkStateWriter.NO_OP; } + DocCollection collection = clusterState.getCollectionOrNull(collectionName); + Slice slice = collection != null ? collection.getSlice(sliceName) : null; + if (slice == null) { + log.error("No such slice exists {}", message); + return ZkStateWriter.NO_OP; + } + + return updateState(clusterState, message); } protected ZkWriteCommand updateState(final ClusterState prevState, ZkNodeProps message) { @@ -355,28 +369,6 @@ private ZkWriteCommand updateState(final ClusterState prevState, ZkNodeProps mes return new ZkWriteCommand(collectionName, newCollection); } - /** - * Handles non-legacy state updates - */ - protected ZkWriteCommand updateStateNew(ClusterState clusterState, final ZkNodeProps message) { - String collectionName = message.getStr(ZkStateReader.COLLECTION_PROP); - if (!checkCollectionKeyExistence(message)) return ZkStateWriter.NO_OP; - String sliceName = message.getStr(ZkStateReader.SHARD_ID_PROP); - - if (collectionName == null || sliceName == null) { - log.error("Invalid collection and slice {}", message); - return ZkStateWriter.NO_OP; - } - DocCollection collection = clusterState.getCollectionOrNull(collectionName); - Slice slice = collection != null ? collection.getSlice(sliceName) : null; - if (slice == null) { - log.error("No such slice exists {}", message); - return ZkStateWriter.NO_OP; - } - - return updateState(clusterState, message); - } - private DocCollection checkAndCompleteShardSplit(ClusterState prevState, DocCollection collection, String coreNodeName, String sliceName, Replica replica) { Slice slice = collection.getSlice(sliceName); Map sliceProps = slice.getProperties(); diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java index cb89371ca14a..155fbc218e01 100644 --- a/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java +++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java @@ -38,9 +38,8 @@ import static java.util.Collections.singletonMap; /** - * ZkStateWriter is responsible for writing updates to the cluster state stored in ZooKeeper for - * both stateFormat=1 collection (stored in shared /clusterstate.json in ZK) and stateFormat=2 collections - * each of which get their own individual state.json in ZK. + * ZkStateWriter is responsible for writing updates to the cluster state stored in ZooKeeper for collections + * each of which gets their own individual state.json in ZK. * * Updates to the cluster state are specified using the * {@link #enqueueUpdate(ClusterState, List, ZkWriteCallback)} method. The class buffers updates @@ -67,7 +66,6 @@ public class ZkStateWriter { protected Map updates = new HashMap<>(); private int numUpdates = 0; protected ClusterState clusterState = null; - protected boolean isClusterStateModified = false; protected long lastUpdatedTime = 0; /** @@ -115,14 +113,9 @@ public ClusterState enqueueUpdate(ClusterState prevState, List c for (ZkWriteCommand cmd : cmds) { if (cmd == NO_OP) continue; - if (!isClusterStateModified && clusterStateGetModifiedWith(cmd, prevState)) { - isClusterStateModified = true; - } prevState = prevState.copyWith(cmd.name, cmd.collection); - if (cmd.collection == null || cmd.collection.getStateFormat() != 1) { - updates.put(cmd.name, cmd.collection); - numUpdates++; - } + updates.put(cmd.name, cmd.collection); + numUpdates++; } clusterState = prevState; @@ -144,15 +137,6 @@ private boolean isNoOps(List cmds) { return true; } - /** - * Check whether {@value ZkStateReader#CLUSTER_STATE} (for stateFormat = 1) get changed given command - */ - private boolean clusterStateGetModifiedWith(ZkWriteCommand command, ClusterState state) { - DocCollection previousCollection = state.getCollectionOrNull(command.name); - boolean wasPreviouslyStateFormat1 = previousCollection != null && previousCollection.getStateFormat() == 1; - boolean isCurrentlyStateFormat1 = command.collection != null && command.collection.getStateFormat() == 1; - return wasPreviouslyStateFormat1 || isCurrentlyStateFormat1; - } /** * Logic to decide a flush after processing a list of ZkWriteCommand * @@ -163,7 +147,7 @@ private boolean maybeFlushAfter() { } public boolean hasPendingUpdates() { - return numUpdates != 0 || isClusterStateModified; + return numUpdates != 0; } /** @@ -192,23 +176,21 @@ public ClusterState writePendingUpdates() throws IllegalStateException, KeeperEx // let's clean up the state.json of this collection only, the rest should be clean by delete collection cmd log.debug("going to delete state.json {}", path); reader.getZkClient().clean(path); - } else if (c.getStateFormat() > 1) { + } else { byte[] data = Utils.toJSON(singletonMap(c.getName(), c)); if (reader.getZkClient().exists(path, true)) { if (log.isDebugEnabled()) { log.debug("going to update_collection {} version: {}", path, c.getZNodeVersion()); } Stat stat = reader.getZkClient().setData(path, data, c.getZNodeVersion(), true); - DocCollection newCollection = new DocCollection(name, c.getSlicesMap(), c.getProperties(), c.getRouter(), stat.getVersion(), path); + DocCollection newCollection = new DocCollection(name, c.getSlicesMap(), c.getProperties(), c.getRouter(), stat.getVersion()); clusterState = clusterState.copyWith(name, newCollection); } else { log.debug("going to create_collection {}", path); reader.getZkClient().create(path, data, CreateMode.PERSISTENT, true); - DocCollection newCollection = new DocCollection(name, c.getSlicesMap(), c.getProperties(), c.getRouter(), 0, path); + DocCollection newCollection = new DocCollection(name, c.getSlicesMap(), c.getProperties(), c.getRouter(), 0); clusterState = clusterState.copyWith(name, newCollection); } - } else if (c.getStateFormat() == 1) { - isClusterStateModified = true; } } @@ -216,15 +198,6 @@ public ClusterState writePendingUpdates() throws IllegalStateException, KeeperEx numUpdates = 0; } - if (isClusterStateModified) { - assert clusterState.getZkClusterStateVersion() >= 0; - byte[] data = Utils.toJSON(clusterState); - Stat stat = reader.getZkClient().setData(ZkStateReader.CLUSTER_STATE, data, clusterState.getZkClusterStateVersion(), true); - Map collections = clusterState.getCollectionsMap(); - // use the reader's live nodes because our cluster state's live nodes may be stale - clusterState = new ClusterState(stat.getVersion(), reader.getClusterState().getLiveNodes(), collections); - isClusterStateModified = false; - } lastUpdatedTime = System.nanoTime(); success = true; } catch (KeeperException.BadVersionException bve) { diff --git a/solr/core/src/java/org/apache/solr/cloud/rule/ReplicaAssigner.java b/solr/core/src/java/org/apache/solr/cloud/rule/ReplicaAssigner.java index 9e47dea72781..6f68d1cecd9e 100644 --- a/solr/core/src/java/org/apache/solr/cloud/rule/ReplicaAssigner.java +++ b/solr/core/src/java/org/apache/solr/cloud/rule/ReplicaAssigner.java @@ -70,9 +70,10 @@ public class ReplicaAssigner { * @param shardVsNodes The current state of the system. can be an empty map if no nodes * are created in this collection till now */ + @SuppressWarnings({"unchecked"}) public ReplicaAssigner(List rules, Map shardVsReplicaCount, - List snitches, + @SuppressWarnings({"rawtypes"})List snitches, Map> shardVsNodes, List participatingLiveNodes, SolrCloudManager cloudManager, ClusterState clusterState) { @@ -185,6 +186,7 @@ private Map tryAllPermutations(List shardNames, } + @SuppressWarnings({"unchecked"}) private Map tryAPermutationOfRules(int[] rulePermutation, List replicaPositions, boolean fuzzyPhase) { Map> nodeVsTagsCopy = getDeepCopy(nodeVsTags, 2); Map result = new LinkedHashMap<>(); @@ -332,10 +334,12 @@ public Set getTagNames() { /** * This method uses the snitches and get the tags for all the nodes */ - private Map> getTagsForNodes(final SolrCloudManager cloudManager, List snitchConf) { + @SuppressWarnings({"unchecked"}) + private Map> getTagsForNodes(final SolrCloudManager cloudManager, @SuppressWarnings({"rawtypes"})List snitchConf) { + @SuppressWarnings({"rawtypes"}) Map snitches = getSnitchInfos(cloudManager, snitchConf); - for (Class c : Snitch.WELL_KNOWN_SNITCHES) { + for (@SuppressWarnings({"rawtypes"})Class c : Snitch.WELL_KNOWN_SNITCHES) { if (snitches.containsKey(c)) continue;// it is already specified explicitly , ignore try { snitches.put(c, new SnitchInfoImpl(Collections.EMPTY_MAP, (Snitch) c.getConstructor().newInstance(), cloudManager)); @@ -409,11 +413,12 @@ protected SnitchContext getSnitchCtx(String node, SnitchInfoImpl info, SolrCloud return new ServerSnitchContext(info, node, snitchSession, cloudManager); } - public static void verifySnitchConf(SolrCloudManager cloudManager, List snitchConf) { + public static void verifySnitchConf(SolrCloudManager cloudManager, @SuppressWarnings({"rawtypes"})List snitchConf) { getSnitchInfos(cloudManager, snitchConf); } + @SuppressWarnings({"unchecked", "rawtypes"}) static Map getSnitchInfos(SolrCloudManager cloudManager, List snitchConf) { if (snitchConf == null) snitchConf = Collections.emptyList(); Map snitches = new LinkedHashMap<>(); diff --git a/solr/core/src/java/org/apache/solr/cloud/rule/Rule.java b/solr/core/src/java/org/apache/solr/cloud/rule/Rule.java index e54f5a0fb6a5..e9397e59cbd5 100644 --- a/solr/core/src/java/org/apache/solr/cloud/rule/Rule.java +++ b/solr/core/src/java/org/apache/solr/cloud/rule/Rule.java @@ -46,8 +46,9 @@ public class Rule { Condition replica; Condition tag; - public Rule(Map m) { + public Rule(@SuppressWarnings({"rawtypes"})Map m) { for (Object o : m.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry e = (Map.Entry) o; Condition condition = new Condition(String.valueOf(e.getKey()), String.valueOf(e.getValue())); if (condition.name.equals(SHARD_ID_PROP)) shard = condition; @@ -69,7 +70,7 @@ public Rule(Map m) { } - static Object parseObj(Object o, Class typ) { + static Object parseObj(Object o, @SuppressWarnings({"rawtypes"})Class typ) { if (o == null) return o; if (typ == String.class) return String.valueOf(o); if (typ == Integer.class) { @@ -79,6 +80,7 @@ static Object parseObj(Object o, Class typ) { return o; } + @SuppressWarnings({"rawtypes"}) public static Map parseRule(String s) { Map result = new LinkedHashMap<>(); s = s.trim(); @@ -98,7 +100,9 @@ public static Map parseRule(String s) { @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public String toString() { + @SuppressWarnings({"rawtypes"}) Map map = new LinkedHashMap(); if (shard != SHARD_DEFAULT) map.put(shard.name, shard.operand.toStr(shard.val)); if (replica != REPLICA_DEFAULT) map.put(replica.name, replica.operand.toStr(replica.val)); @@ -359,6 +363,11 @@ public boolean equals(Object obj) { return false; } + @Override + public int hashCode() { + throw new UnsupportedOperationException("TODO unimplemented"); + } + @Override public String toString() { return name + ":" + operand.toStr(val) + (fuzzy ? "~" : ""); diff --git a/solr/core/src/java/org/apache/solr/cloud/rule/ServerSnitchContext.java b/solr/core/src/java/org/apache/solr/cloud/rule/ServerSnitchContext.java index 36560113ae4a..93083425eff9 100644 --- a/solr/core/src/java/org/apache/solr/cloud/rule/ServerSnitchContext.java +++ b/solr/core/src/java/org/apache/solr/cloud/rule/ServerSnitchContext.java @@ -41,6 +41,7 @@ public ServerSnitchContext(SnitchInfo perSnitch, } + @SuppressWarnings({"rawtypes"}) public Map getZkJson(String path) throws KeeperException, InterruptedException { try { return Utils.getJson(cloudManager.getDistribStateManager(), path) ; diff --git a/solr/core/src/java/org/apache/solr/core/AbstractSolrEventListener.java b/solr/core/src/java/org/apache/solr/core/AbstractSolrEventListener.java index 83b2a93787f3..9d2787496014 100644 --- a/solr/core/src/java/org/apache/solr/core/AbstractSolrEventListener.java +++ b/solr/core/src/java/org/apache/solr/core/AbstractSolrEventListener.java @@ -29,11 +29,13 @@ public class AbstractSolrEventListener implements SolrEventListener { public AbstractSolrEventListener(SolrCore core) { this.core = core; } + @SuppressWarnings({"rawtypes"}) private NamedList args; + @SuppressWarnings({"rawtypes"}) public NamedList getArgs() { return args; } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { this.args = args.clone(); } @@ -67,6 +69,7 @@ public String toString() { * @param currentSearcher If null, add FIRST_SEARCHER, otherwise NEW_SEARCHER * @param nlst The named list to add the EVENT value to */ + @SuppressWarnings({"unchecked", "rawtypes"}) protected NamedList addEventParms(SolrIndexSearcher currentSearcher, NamedList nlst) { NamedList result = new NamedList(); result.addAll(nlst); diff --git a/solr/core/src/java/org/apache/solr/core/BlobRepository.java b/solr/core/src/java/org/apache/solr/core/BlobRepository.java index a7d524f3c16b..4e0d86485480 100644 --- a/solr/core/src/java/org/apache/solr/core/BlobRepository.java +++ b/solr/core/src/java/org/apache/solr/core/BlobRepository.java @@ -79,9 +79,11 @@ public class BlobRepository { } private final CoreContainer coreContainer; + @SuppressWarnings({"rawtypes"}) private Map blobs = createMap(); // for unit tests to override + @SuppressWarnings({"rawtypes"}) ConcurrentHashMap createMap() { return new ConcurrentHashMap<>(); } @@ -118,8 +120,9 @@ BlobContentRef getBlobIncRef(String key, Decoder decoder) { return getBlobIncRef(key.concat(decoder.getName()), () -> addBlob(key, decoder)); } + @SuppressWarnings({"unchecked", "rawtypes"}) BlobContentRef getBlobIncRef(String key, Decoder decoder, String url, String sha512) { - StringBuffer keyBuilder = new StringBuffer(key); + StringBuilder keyBuilder = new StringBuilder(key); if (decoder != null) keyBuilder.append(decoder.getName()); keyBuilder.append("/").append(sha512); @@ -127,6 +130,7 @@ BlobContentRef getBlobIncRef(String key, Decoder decoder, String url, String sha } // do the actual work returning the appropriate type... + @SuppressWarnings({"unchecked"}) private BlobContentRef getBlobIncRef(String key, Callable> blobCreator) { BlobContent aBlob; if (this.coreContainer.isZooKeeperAware()) { @@ -273,7 +277,7 @@ private Replica getSystemCollReplica() { * * @param ref The reference that is already there. Doing multiple calls with same ref will not matter */ - public void decrementBlobRefCount(BlobContentRef ref) { + public void decrementBlobRefCount(@SuppressWarnings({"rawtypes"})BlobContentRef ref) { if (ref == null) return; synchronized (ref.blob.references) { if (!ref.blob.references.remove(ref)) { @@ -285,6 +289,7 @@ public void decrementBlobRefCount(BlobContentRef ref) { } } + @SuppressWarnings({"unchecked", "rawtypes"}) public static class BlobContent { public final String key; private final T content; // holds byte buffer or cached object, holding both is a waste of memory diff --git a/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java index 34113253943c..0b4e193b8d71 100644 --- a/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java +++ b/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java @@ -397,7 +397,7 @@ public void incRef(Directory directory) { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings("rawtypes") NamedList args) { maxWriteMBPerSecFlush = (Double) args.get("maxWriteMBPerSecFlush"); maxWriteMBPerSecMerge = (Double) args.get("maxWriteMBPerSecMerge"); maxWriteMBPerSecRead = (Double) args.get("maxWriteMBPerSecRead"); @@ -405,7 +405,7 @@ public void init(NamedList args) { // override global config if (args.get(SolrXmlConfig.SOLR_DATA_HOME) != null) { - dataHomePath = Paths.get((String) args.get(SolrXmlConfig.SOLR_DATA_HOME)); + dataHomePath = Paths.get((String) args.get(SolrXmlConfig.SOLR_DATA_HOME)).toAbsolutePath().normalize(); } if (dataHomePath != null) { log.info("{} = {}", SolrXmlConfig.SOLR_DATA_HOME, dataHomePath); diff --git a/solr/core/src/java/org/apache/solr/core/CodecFactory.java b/solr/core/src/java/org/apache/solr/core/CodecFactory.java index 36c67eba1452..7ded16990f74 100644 --- a/solr/core/src/java/org/apache/solr/core/CodecFactory.java +++ b/solr/core/src/java/org/apache/solr/core/CodecFactory.java @@ -25,7 +25,7 @@ */ public abstract class CodecFactory implements NamedListInitializedPlugin { @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { } public abstract Codec getCodec(); diff --git a/solr/core/src/java/org/apache/solr/core/ConfigOverlay.java b/solr/core/src/java/org/apache/solr/core/ConfigOverlay.java index 967db856ccfe..4726ae298ef7 100644 --- a/solr/core/src/java/org/apache/solr/core/ConfigOverlay.java +++ b/solr/core/src/java/org/apache/solr/core/ConfigOverlay.java @@ -41,6 +41,7 @@ public class ConfigOverlay implements MapSerializable { private Map props; private Map userProps; + @SuppressWarnings({"unchecked"}) public ConfigOverlay(Map jsonObj, int znodeVersion) { if (jsonObj == null) jsonObj = Collections.EMPTY_MAP; this.znodeVersion = znodeVersion; @@ -61,7 +62,9 @@ public Object getXPathProperty(String xpath, boolean onlyPrimitive) { return Utils.getObjectByPath(props, onlyPrimitive, hierarchy); } + @SuppressWarnings({"unchecked"}) public ConfigOverlay setUserProperty(String key, Object val) { + @SuppressWarnings({"rawtypes"}) Map copy = new LinkedHashMap(userProps); copy.put(key, val); Map jsonObj = new LinkedHashMap<>(this.data); @@ -71,6 +74,7 @@ public ConfigOverlay setUserProperty(String key, Object val) { public ConfigOverlay unsetUserProperty(String key) { if (!userProps.containsKey(key)) return this; + @SuppressWarnings({"unchecked", "rawtypes"}) Map copy = new LinkedHashMap(userProps); copy.remove(key); Map jsonObj = new LinkedHashMap<>(this.data); @@ -78,6 +82,7 @@ public ConfigOverlay unsetUserProperty(String key) { return new ConfigOverlay(jsonObj, znodeVersion); } + @SuppressWarnings({"unchecked", "rawtypes"}) public ConfigOverlay setProperty(String name, Object val) { List hierarchy = checkEditable(name, false, true); Map deepCopy = (Map) Utils.fromJSON(Utils.toJSON(props)); @@ -114,6 +119,7 @@ private List checkEditable(String propName, boolean isXPath, boolean fai } + @SuppressWarnings({"rawtypes"}) public ConfigOverlay unsetProperty(String name) { List hierarchy = checkEditable(name, false, true); Map deepCopy = (Map) Utils.fromJSON(Utils.toJSON(props)); @@ -164,6 +170,7 @@ public String toString() { //The path maps to the xml xpath and value of 1 means it is a tag with a string value and value // of 0 means it is an attribute with string value + @SuppressWarnings({"rawtypes"}) private static Map editable_prop_map = (Map) Utils.fromJSONResource("EditableSolrConfigAttributes.json"); public static boolean isEditableProp(String path, boolean isXpath, List hierarchy) { @@ -171,6 +178,7 @@ public static boolean isEditableProp(String path, boolean isXpath, List } + @SuppressWarnings({"rawtypes"}) public static Class checkEditable(String path, boolean isXpath, List hierarchy) { List parts = StrUtils.splitSmart(path, isXpath ? '/' : '.'); Object obj = editable_prop_map; @@ -195,8 +203,10 @@ public static Class checkEditable(String path, boolean isXpath, List hie return null; } + @SuppressWarnings({"rawtypes"}) static Class[] types = new Class[]{String.class, Boolean.class, Integer.class, Float.class}; + @SuppressWarnings({"rawtypes"}) private static Class checkType(Object o, boolean isXpath, boolean isAttr) { if (o instanceof Long) { Long aLong = (Long) o; @@ -209,6 +219,7 @@ private static Class checkType(Object o, boolean isXpath, boolean isAttr) { } } + @SuppressWarnings({"unchecked"}) public Map getEditableSubProperties(String xpath) { Object o = Utils.getObjectByPath(props, false, StrUtils.splitSmart(xpath, '/')); if (o instanceof Map) { @@ -229,6 +240,7 @@ public Map toMap(Map map) { return map; } + @SuppressWarnings({"unchecked", "rawtypes"}) public Map getNamedPlugins(String typ) { Map reqHandlers = (Map) data.get(typ); if (reqHandlers == null) return Collections.EMPTY_MAP; @@ -236,6 +248,7 @@ public Map getNamedPlugins(String typ) { } + @SuppressWarnings({"unchecked", "rawtypes"}) public ConfigOverlay addNamedPlugin(Map info, String typ) { Map dataCopy = Utils.getDeepCopy(data, 4); Map existing = (Map) dataCopy.get(typ); @@ -244,6 +257,7 @@ public ConfigOverlay addNamedPlugin(Map info, String typ) { return new ConfigOverlay(dataCopy, this.znodeVersion); } + @SuppressWarnings({"unchecked", "rawtypes"}) public ConfigOverlay deleteNamedPlugin(String name, String typ) { Map dataCopy = Utils.getDeepCopy(data, 4); Map reqHandler = (Map) dataCopy.get(typ); diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSet.java b/solr/core/src/java/org/apache/solr/core/ConfigSet.java index 671f81a9052d..d6cb31de4609 100644 --- a/solr/core/src/java/org/apache/solr/core/ConfigSet.java +++ b/solr/core/src/java/org/apache/solr/core/ConfigSet.java @@ -32,10 +32,12 @@ public class ConfigSet { private final IndexSchema indexSchema; + @SuppressWarnings({"rawtypes"}) private final NamedList properties; private final boolean trusted; + @SuppressWarnings({"rawtypes"}) public ConfigSet(String name, SolrConfig solrConfig, IndexSchema indexSchema, NamedList properties, boolean trusted) { this.name = name; @@ -57,6 +59,7 @@ public IndexSchema getIndexSchema() { return indexSchema; } + @SuppressWarnings({"rawtypes"}) public NamedList getProperties() { return properties; } diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java b/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java index feaef8fac25b..a8ca1ec4973f 100644 --- a/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java +++ b/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java @@ -52,6 +52,7 @@ public class ConfigSetProperties { * @param name the name of the config set properties file * @return the properties in a NamedList */ + @SuppressWarnings({"rawtypes"}) public static NamedList readFromResourceLoader(SolrResourceLoader loader, String name) { InputStreamReader reader; try { @@ -72,6 +73,7 @@ public static NamedList readFromResourceLoader(SolrResourceLoader loader, String } } + @SuppressWarnings({"unchecked", "rawtypes"}) public static NamedList readFromInputStream(InputStreamReader reader) { try { Object object = fromJSON(reader); diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java index 7b5ca794ca3d..84f94d5272be 100644 --- a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java +++ b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java @@ -61,6 +61,7 @@ public static ConfigSetService createConfigSetService(NodeConfig nodeConfig, Sol * @param dcore the core's CoreDescriptor * @return a ConfigSet */ + @SuppressWarnings({"rawtypes"}) public final ConfigSet loadConfigSet(CoreDescriptor dcore) { SolrResourceLoader coreLoader = createCoreResourceLoader(dcore); @@ -158,6 +159,7 @@ protected IndexSchema createIndexSchema(CoreDescriptor cd, SolrConfig solrConfig * @param loader the core's resource loader * @return the ConfigSet properties */ + @SuppressWarnings({"rawtypes"}) protected NamedList loadConfigSetProperties(CoreDescriptor cd, SolrResourceLoader loader) { return ConfigSetProperties.readFromResourceLoader(loader, cd.getConfigSetPropertiesName()); } @@ -166,6 +168,7 @@ protected NamedList loadConfigSetProperties(CoreDescriptor cd, SolrResourceLoade * Return the ConfigSet flags or null if none. */ // TODO should fold into configSetProps -- SOLR-14059 + @SuppressWarnings({"rawtypes"}) protected NamedList loadConfigSetFlags(CoreDescriptor cd, SolrResourceLoader loader) { return null; } diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java index 30c3f83a10be..71ef939cb5bb 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java +++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java @@ -42,6 +42,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import org.apache.commons.lang3.StringUtils; @@ -61,9 +62,9 @@ import org.apache.solr.client.solrj.impl.SolrHttpClientContextBuilder; import org.apache.solr.client.solrj.impl.SolrHttpClientContextBuilder.AuthSchemeRegistryProvider; import org.apache.solr.client.solrj.impl.SolrHttpClientContextBuilder.CredentialsProviderProvider; +import org.apache.solr.client.solrj.io.SolrClientCache; import org.apache.solr.client.solrj.util.SolrIdentifierValidator; import org.apache.solr.cloud.CloudDescriptor; -import org.apache.solr.cloud.Overseer; import org.apache.solr.cloud.OverseerTaskQueue; import org.apache.solr.cloud.ZkController; import org.apache.solr.cloud.autoscaling.AutoScalingHandler; @@ -76,6 +77,7 @@ import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.IOUtils; +import org.apache.solr.common.util.ObjectCache; import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.common.util.Utils; import org.apache.solr.core.DirectoryFactory.DirContext; @@ -100,6 +102,7 @@ import org.apache.solr.handler.admin.ZookeeperReadAPI; import org.apache.solr.handler.admin.ZookeeperStatusHandler; import org.apache.solr.handler.component.ShardHandlerFactory; +import org.apache.solr.handler.sql.CalciteSolrDriver; import org.apache.solr.logging.LogWatcher; import org.apache.solr.logging.MDCLoggingContext; import org.apache.solr.metrics.SolrCoreMetricManager; @@ -186,6 +189,7 @@ public CoreLoadFailure(CoreDescriptor cd, Exception loadFailure) { private final OrderedExecutor replayUpdatesExecutor; + @SuppressWarnings({"rawtypes"}) protected volatile LogWatcher logging = null; private volatile CloserThread backgroundCloser = null; @@ -228,9 +232,14 @@ public CoreLoadFailure(CoreDescriptor cd, Exception loadFailure) { protected volatile AutoscalingHistoryHandler autoscalingHistoryHandler; + private volatile SolrClientCache solrClientCache; + + private final ObjectCache objectCache = new ObjectCache(); + private PackageStoreAPI packageStoreAPI; private PackageLoader packageLoader; + private Set allowPaths; // Bits for the state variable. public final static long LOAD_COMPLETE = 0x1L; @@ -331,8 +340,22 @@ public CoreContainer(NodeConfig config, CoresLocator locator, boolean asyncSolrC ExecutorUtil.newMDCAwareCachedThreadPool( cfg.getReplayUpdatesThreads(), new SolrNamedThreadFactory("replayUpdatesExecutor"))); + + this.allowPaths = new java.util.HashSet<>(); + this.allowPaths.add(cfg.getSolrHome()); + this.allowPaths.add(cfg.getCoreRootDirectory()); + if (cfg.getSolrDataHome() != null) { + this.allowPaths.add(cfg.getSolrDataHome()); + } + if (!cfg.getAllowPaths().isEmpty()) { + this.allowPaths.addAll(cfg.getAllowPaths()); + if (log.isInfoEnabled()) { + log.info("Allowing use of paths: {}", cfg.getAllowPaths()); + } + } } + @SuppressWarnings({"unchecked"}) private synchronized void initializeAuthorizationPlugin(Map authorizationConf) { authorizationConf = Utils.getDeepCopy(authorizationConf, 4); int newVersion = readVersion(authorizationConf); @@ -367,6 +390,7 @@ private synchronized void initializeAuthorizationPlugin(Map auth } } + @SuppressWarnings({"unchecked"}) private void initializeAuditloggerPlugin(Map auditConf) { auditConf = Utils.getDeepCopy(auditConf, 4); int newVersion = readVersion(auditConf); @@ -402,6 +426,7 @@ private void initializeAuditloggerPlugin(Map auditConf) { } + @SuppressWarnings({"unchecked", "rawtypes"}) private synchronized void initializeAuthenticationPlugin(Map authenticationConfig) { authenticationConfig = Utils.getDeepCopy(authenticationConfig, 4); int newVersion = readVersion(authenticationConfig); @@ -499,6 +524,7 @@ public Lookup getAuthSchemeRegistry() { } } + @SuppressWarnings({"rawtypes"}) private static int readVersion(Map conf) { if (conf == null) return -1; Map meta = (Map) conf.get(""); @@ -576,6 +602,15 @@ public PackageLoader getPackageLoader() { public PackageStoreAPI getPackageStoreAPI() { return packageStoreAPI; } + + public SolrClientCache getSolrClientCache() { + return solrClientCache; + } + + public ObjectCache getObjectCache() { + return objectCache; + } + //------------------------------------------------------------------- // Initialization / Cleanup //------------------------------------------------------------------- @@ -636,6 +671,11 @@ public void load() { updateShardHandler = new UpdateShardHandler(cfg.getUpdateShardHandlerConfig()); updateShardHandler.initializeMetrics(solrMetricsContext, "updateShardHandler"); + solrClientCache = new SolrClientCache(updateShardHandler.getDefaultHttpClient()); + + // initialize CalciteSolrDriver instance to use this solrClientCache + CalciteSolrDriver.INSTANCE.setSolrClientCache(solrClientCache); + solrCores.load(loader); @@ -835,6 +875,7 @@ public void load() { } // MetricsHistoryHandler supports both cloud and standalone configs + @SuppressWarnings({"unchecked"}) private void createMetricsHistoryHandler() { PluginInfo plugin = cfg.getMetricsConfig().getHistoryHandler(); Map initArgs; @@ -887,6 +928,7 @@ public void securityNodeChanged() { /** * Make sure securityConfHandler is initialized */ + @SuppressWarnings({"unchecked"}) private void reloadSecurityProperties() { SecurityConfHandler.SecurityConfig securityConfig = securityConfHandler.getSecurityConfig(false); initializeAuthorizationPlugin((Map) securityConfig.getData().get("authorization")); @@ -896,9 +938,9 @@ private void reloadSecurityProperties() { private void warnUsersOfInsecureSettings() { if (authenticationPlugin == null || authorizationPlugin == null) { - log.warn("Not all security plugins configured! authentication={} authorization={}. Solr is only as secure as {}{}" - , "you make it. Consider configuring authentication/authorization before exposing Solr to users internal or " - , "external. See https://s.apache.org/solrsecurity for more info", + log.warn("Not all security plugins configured! authentication={} authorization={}. Solr is only as secure as " + + "you make it. Consider configuring authentication/authorization before exposing Solr to users internal or " + + "external. See https://s.apache.org/solrsecurity for more info", (authenticationPlugin != null) ? "enabled" : "disabled", (authorizationPlugin != null) ? "enabled" : "disabled"); } @@ -975,6 +1017,8 @@ public void shutdown() { // Now clear all the cores that are being operated upon. solrCores.close(); + objectCache.clear(); + // It's still possible that one of the pending dynamic load operation is waiting, so wake it up if so. // Since all the pending operations queues have been drained, there should be nothing to do. synchronized (solrCores.getModifyLock()) { @@ -1017,6 +1061,9 @@ public void shutdown() { } catch (Exception e) { log.warn("Error shutting down CoreAdminHandler. Continuing to close CoreContainer.", e); } + if (solrClientCache != null) { + solrClientCache.close(); + } } finally { try { @@ -1168,14 +1215,15 @@ public SolrCore create(String coreName, Path instancePath, Map p throw new SolrException(ErrorCode.SERVER_ERROR, "Core with name '" + coreName + "' already exists."); } + // Validate paths are relative to known locations to avoid path traversal + assertPathAllowed(cd.getInstanceDir()); + assertPathAllowed(Paths.get(cd.getDataDir())); + boolean preExisitingZkEntry = false; try { if (getZkController() != null) { - if (!Overseer.isLegacy(getZkController().getZkStateReader())) { - if (cd.getCloudDescriptor().getCoreNodeName() == null) { - throw new SolrException(ErrorCode.SERVER_ERROR, "non legacy mode coreNodeName missing " + parameters.toString()); - - } + if (cd.getCloudDescriptor().getCoreNodeName() == null) { + throw new SolrException(ErrorCode.SERVER_ERROR, "coreNodeName missing " + parameters.toString()); } preExisitingZkEntry = getZkController().checkIfCoreNodeNameAlreadyExists(cd); } @@ -1230,6 +1278,29 @@ public SolrCore create(String coreName, Path instancePath, Map p } } + /** + * Checks that the given path is relative to SOLR_HOME, SOLR_DATA_HOME, coreRootDirectory or one of the paths + * specified in solr.xml's allowPaths element. Delegates to {@link SolrPaths#assertPathAllowed(Path, Set)} + * @param pathToAssert path to check + * @throws SolrException if path is outside allowed paths + */ + public void assertPathAllowed(Path pathToAssert) throws SolrException { + SolrPaths.assertPathAllowed(pathToAssert, allowPaths); + } + + /** + *

    Return the file system paths that should be allowed for various API requests. + * This list is compiled at startup from SOLR_HOME, SOLR_DATA_HOME and the + * allowPaths configuration of solr.xml. + * These paths are used by the {@link #assertPathAllowed(Path)} method call.

    + *

    NOTE:

    This method is currently only in use in tests in order to + * modify the mutable Set directly. Please treat this as a private method. + */ + @VisibleForTesting + public Set getAllowPaths() { + return allowPaths; + } + /** * Creates a new core based on a CoreDescriptor. * @@ -1473,7 +1544,6 @@ public Map getCoreInitFailures() { return ImmutableMap.copyOf(coreInitFailures); } - // ---------------- Core name related methods --------------- private CoreDescriptor reloadCoreDescriptor(CoreDescriptor oldDesc) { @@ -1581,7 +1651,7 @@ public void reload(String name) { } catch (SolrCoreState.CoreIsClosedException e) { throw e; } catch (Exception e) { - coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, (Exception) e)); + coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, e)); throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to reload core [" + cd.getName() + "]", e); } finally { if (!success && newCore != null && newCore.getOpenCount() > 0) { @@ -1736,6 +1806,7 @@ public CoreDescriptor getCoreDescriptor(String coreName) { return solrCores.getCoreDescriptor(coreName); } + /** Where cores are created (absolute). */ public Path getCoreRootDirectory() { return cfg.getCoreRootDirectory(); } @@ -1815,6 +1886,7 @@ public void waitForLoadingCore(String name, long timeoutMs) { // ---------------- CoreContainer request handlers -------------- + @SuppressWarnings({"rawtypes"}) protected T createHandler(String path, String handlerClass, Class clazz) { T handler = loader.newInstance(handlerClass, clazz, null, new Class[]{CoreContainer.class}, new Object[]{this}); if (handler instanceof SolrRequestHandler) { @@ -1861,6 +1933,7 @@ public String getManagementPath() { return cfg.getManagementPath(); } + @SuppressWarnings({"rawtypes"}) public LogWatcher getLogging() { return logging; } @@ -1892,6 +1965,7 @@ public CoreDescriptor getUnloadedCoreDescriptor(String cname) { return solrCores.getUnloadedCoreDescriptor(cname); } + /** The primary path of a Solr server's config, cores, and misc things. Absolute. */ //TODO return Path public String getSolrHome() { return solrHome.toString(); @@ -1953,43 +2027,6 @@ public TransientSolrCoreCache getTransientCache() { return solrCores.getTransientCacheHandler(); } - - /** - * @param cd CoreDescriptor, presumably a deficient one - * @param prop The property that needs to be repaired. - * @return true if we were able to successfuly perisist the repaired coreDescriptor, false otherwise. - *

    - * See SOLR-11503, This can be removed when there's no chance we'll need to upgrade a - * Solr installation created with legacyCloud=true from 6.6.1 through 7.1 - */ - public boolean repairCoreProperty(CoreDescriptor cd, String prop) { - // So far, coreNodeName is the only property that we need to repair, this may get more complex as other properties - // are added. - - if (CoreDescriptor.CORE_NODE_NAME.equals(prop) == false) { - throw new SolrException(ErrorCode.SERVER_ERROR, - String.format(Locale.ROOT, "The only supported property for repair is currently [%s]", - CoreDescriptor.CORE_NODE_NAME)); - } - - // Try to read the coreNodeName from the cluster state. - - String coreName = cd.getName(); - DocCollection coll = getZkController().getZkStateReader().getClusterState().getCollection(cd.getCollectionName()); - for (Replica rep : coll.getReplicas()) { - if (coreName.equals(rep.getCoreName())) { - log.warn("Core properties file for node {} found with no coreNodeName, attempting to repair with value {}. See SOLR-11503. {}" - , "This message should only appear if upgrading from collections created Solr 6.6.1 through 7.1." - , rep.getCoreName(), rep.getName()); - cd.getCloudDescriptor().setCoreNodeName(rep.getName()); - coresLocator.persist(this, cd); - return true; - } - } - log.error("Could not repair coreNodeName in core.properties file for core {}", coreName); - return false; - } - /** * @param solrCore the core against which we check if there has been a tragic exception * @return whether this Solr core has tragic exception diff --git a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java index 10729295bcec..d622734391d8 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java +++ b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java @@ -182,7 +182,7 @@ public CoreDescriptor(String coreName, CoreDescriptor other) { */ public CoreDescriptor(String name, Path instanceDir, Map coreProps, Properties containerProperties, ZkController zkController) { - this.instanceDir = instanceDir; + this.instanceDir = instanceDir.toAbsolutePath(); originalCoreProperties.setProperty(CORE_NAME, name); @@ -290,9 +290,7 @@ public boolean usingDefaultDataDir() { return defaultProperties.get(CORE_DATADIR).equals(coreProperties.getProperty(CORE_DATADIR)); } - /** - * @return the core instance directory - */ + /** The core instance directory (absolute). */ public Path getInstanceDir() { return instanceDir; } diff --git a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java index b32a85e88ced..2d2712f1a18c 100644 --- a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java +++ b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java @@ -62,7 +62,7 @@ public CorePropertiesLocator(Path coreDiscoveryRoot) { @Override public void create(CoreContainer cc, CoreDescriptor... coreDescriptors) { for (CoreDescriptor cd : coreDescriptors) { - Path propertiesFile = this.rootDirectory.resolve(cd.getInstanceDir()).resolve(PROPERTIES_FILENAME); + Path propertiesFile = cd.getInstanceDir().resolve(PROPERTIES_FILENAME); if (Files.exists(propertiesFile)) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Could not create a new core in " + cd.getInstanceDir() @@ -78,7 +78,7 @@ public void create(CoreContainer cc, CoreDescriptor... coreDescriptors) { @Override public void persist(CoreContainer cc, CoreDescriptor... coreDescriptors) { for (CoreDescriptor cd : coreDescriptors) { - Path propFile = this.rootDirectory.resolve(cd.getInstanceDir()).resolve(PROPERTIES_FILENAME); + Path propFile = cd.getInstanceDir().resolve(PROPERTIES_FILENAME); writePropertiesFile(cd, propFile); } } @@ -105,7 +105,7 @@ public void delete(CoreContainer cc, CoreDescriptor... coreDescriptors) { } for (CoreDescriptor cd : coreDescriptors) { if (cd == null) continue; - Path propfile = this.rootDirectory.resolve(cd.getInstanceDir()).resolve(PROPERTIES_FILENAME); + Path propfile = cd.getInstanceDir().resolve(PROPERTIES_FILENAME); try { Files.deleteIfExists(propfile); } catch (IOException e) { diff --git a/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java index b1f34086776d..57366921c054 100644 --- a/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java +++ b/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java @@ -23,9 +23,8 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.file.NoSuchFileException; -import java.util.Arrays; import java.nio.file.Path; -import java.nio.file.Paths; +import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -55,7 +54,7 @@ public abstract class DirectoryFactory implements NamedListInitializedPlugin, protected static final String INDEX_W_TIMESTAMP_REGEX = "index\\.[0-9]{17}"; // see SnapShooter.DATE_FMT - // May be set by sub classes as data root, in which case getDataHome will use it as base + // May be set by sub classes as data root, in which case getDataHome will use it as base. Absolute. protected Path dataHomePath; // hint about what the directory contains - default is index directory @@ -331,16 +330,16 @@ public boolean searchersReserveCommitPoints() { * @return a String with absolute path to data direcotry */ public String getDataHome(CoreDescriptor cd) throws IOException { - String dataDir; + Path dataDir; if (dataHomePath != null) { - String instanceDirLastPath = cd.getInstanceDir().getName(cd.getInstanceDir().getNameCount()-1).toString(); - dataDir = Paths.get(coreContainer.getSolrHome()).resolve(dataHomePath) - .resolve(instanceDirLastPath).resolve(cd.getDataDir()).toAbsolutePath().toString(); + Path instanceDirLastPath = cd.getInstanceDir().getName(cd.getInstanceDir().getNameCount()-1); + dataDir = dataHomePath.resolve(instanceDirLastPath).resolve(cd.getDataDir()); } else { // by default, we go off the instance directory - dataDir = cd.getInstanceDir().resolve(cd.getDataDir()).toAbsolutePath().toString(); + dataDir = cd.getInstanceDir().resolve(cd.getDataDir()); } - return dataDir; + assert dataDir.isAbsolute(); + return dataDir.toString(); } public void cleanupOldIndexDirectories(final String dataDirPath, final String currentIndexDirPath, boolean afterCoreReload) { @@ -398,7 +397,7 @@ protected boolean deleteOldIndexDirectory(String oldDirPath) throws IOException public void initCoreContainer(CoreContainer cc) { this.coreContainer = cc; if (cc != null && cc.getConfig() != null) { - this.dataHomePath = cc.getConfig().getSolrDataHome(); + this.dataHomePath = cc.getConfig().getSolrDataHome(); // absolute } } diff --git a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java index e73acd6250af..8782371e7e73 100644 --- a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java +++ b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java @@ -155,7 +155,7 @@ private final static class LocalityHolder { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings("rawtypes") NamedList args) { super.init(args); params = args.toSolrParams(); this.hdfsDataDir = getConfig(HDFS_HOME, null); diff --git a/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java b/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java index 201c6364c40b..9de8698af508 100644 --- a/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java +++ b/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java @@ -323,7 +323,7 @@ public String getSegmentsFileName() { } @Override - public Collection getFileNames() throws IOException { + public Collection getFileNames() throws IOException { return delegate.getFileNames(); } @@ -379,7 +379,7 @@ public boolean isDeleted() { } @Override - public Map getUserData() throws IOException { + public Map getUserData() throws IOException { return delegate.getUserData(); } } diff --git a/solr/core/src/java/org/apache/solr/core/IndexReaderFactory.java b/solr/core/src/java/org/apache/solr/core/IndexReaderFactory.java index 03e73b7c8516..bf30d838c809 100644 --- a/solr/core/src/java/org/apache/solr/core/IndexReaderFactory.java +++ b/solr/core/src/java/org/apache/solr/core/IndexReaderFactory.java @@ -35,7 +35,7 @@ public abstract class IndexReaderFactory implements NamedListInitializedPlugin { * */ @Override - public void init(NamedList args) { + public void init(@SuppressWarnings("rawtypes") NamedList args) { Object v = args.get("setTermIndexDivisor"); if (v != null) { throw new IllegalArgumentException("Illegal parameter 'setTermIndexDivisor'"); diff --git a/solr/core/src/java/org/apache/solr/core/InitParams.java b/solr/core/src/java/org/apache/solr/core/InitParams.java index 8e7b874ecd6b..ad31cf6fd8f4 100644 --- a/solr/core/src/java/org/apache/solr/core/InitParams.java +++ b/solr/core/src/java/org/apache/solr/core/InitParams.java @@ -38,8 +38,13 @@ public class InitParams { public static final String TYPE = "initParams"; public final String name; public final Set paths; - public final NamedList defaults, invariants, appends; - private PluginInfo pluginInfo; + @SuppressWarnings({"rawtypes"}) + public final NamedList defaults; + @SuppressWarnings({"rawtypes"}) + public final NamedList invariants; + @SuppressWarnings({"rawtypes"}) + public final NamedList appends; + final private PluginInfo pluginInfo; private final Set KNOWN_KEYS = ImmutableSet.of(DEFAULTS, INVARIANTS, APPENDS); public InitParams(PluginInfo p) { @@ -51,6 +56,7 @@ public InitParams(PluginInfo p) { paths = Set.copyOf(StrUtils.splitSmart(pathStr, ',')); } this.paths = paths; + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) p.initArgs.get(DEFAULTS); defaults = nl == null ? null : nl.getImmutableCopy(); nl = (NamedList) p.initArgs.get(INVARIANTS); @@ -88,6 +94,7 @@ private static boolean matchPath(String path, String name) { } + @SuppressWarnings({"unchecked", "rawtypes"}) public void apply(PluginInfo info) { if (!info.isFromSolrConfig()) { //if this is a component implicitly defined in code it should be overridden by initPrams @@ -110,6 +117,7 @@ public void apply(PluginInfo info) { } } + @SuppressWarnings({"unchecked", "rawtypes"}) private static void merge(NamedList first, NamedList second, NamedList sink, String name, boolean appends) { if (first == null && second == null) return; if (first == null) first = new NamedList(); diff --git a/solr/core/src/java/org/apache/solr/core/MMapDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/MMapDirectoryFactory.java index 0c1875b259fd..69def25ccdb4 100644 --- a/solr/core/src/java/org/apache/solr/core/MMapDirectoryFactory.java +++ b/solr/core/src/java/org/apache/solr/core/MMapDirectoryFactory.java @@ -47,6 +47,7 @@ public class MMapDirectoryFactory extends StandardDirectoryFactory { private int maxChunk; @Override + @SuppressWarnings({"rawtypes"}) public void init(NamedList args) { super.init(args); SolrParams params = args.toSolrParams(); diff --git a/solr/core/src/java/org/apache/solr/core/MemClassLoader.java b/solr/core/src/java/org/apache/solr/core/MemClassLoader.java index cf6bb4dd9713..03e4de295df3 100644 --- a/solr/core/src/java/org/apache/solr/core/MemClassLoader.java +++ b/solr/core/src/java/org/apache/solr/core/MemClassLoader.java @@ -44,6 +44,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour private boolean allJarsLoaded = false; private final SolrResourceLoader parentLoader; private List libs = new ArrayList<>(); + @SuppressWarnings("rawtypes") private Map classCache = new HashMap<>(); private List errors = new ArrayList<>(); @@ -97,6 +98,7 @@ protected Class findClass(String name) throws ClassNotFoundException { } } + @SuppressWarnings({"rawtypes"}) private synchronized Class loadFromRuntimeLibs(String name) throws ClassNotFoundException { Class result = classCache.get(name); if(result != null) @@ -149,11 +151,12 @@ private ByteBuffer getByteBuffer(String name, AtomicReference jarName) t } @Override - public void close() throws Exception { + public void close() { for (PluginBag.RuntimeLib lib : libs) { try { lib.close(); } catch (Exception e) { + log.error("Error closing lib {}", lib.getName(), e); } } } diff --git a/solr/core/src/java/org/apache/solr/core/NRTCachingDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/NRTCachingDirectoryFactory.java index 789ffdbe00dc..4e1fad9637ed 100644 --- a/solr/core/src/java/org/apache/solr/core/NRTCachingDirectoryFactory.java +++ b/solr/core/src/java/org/apache/solr/core/NRTCachingDirectoryFactory.java @@ -36,6 +36,7 @@ public class NRTCachingDirectoryFactory extends StandardDirectoryFactory { private double maxCachedMB = DEFAULT_MAX_CACHED_MB; @Override + @SuppressWarnings({"rawtypes"}) public void init(NamedList args) { super.init(args); SolrParams params = args.toSolrParams(); diff --git a/solr/core/src/java/org/apache/solr/core/NodeConfig.java b/solr/core/src/java/org/apache/solr/core/NodeConfig.java index 01aaf3512ab3..ef8fddbe4be7 100644 --- a/solr/core/src/java/org/apache/solr/core/NodeConfig.java +++ b/solr/core/src/java/org/apache/solr/core/NodeConfig.java @@ -18,6 +18,7 @@ import java.nio.file.Path; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.Properties; import java.util.Set; @@ -29,6 +30,7 @@ public class NodeConfig { + // all Path fields here are absolute and normalized. private final String nodeName; @@ -40,6 +42,8 @@ public class NodeConfig { private final Path configSetBaseDirectory; + private final Set allowPaths; + private final String sharedLibDirectory; private final PluginInfo shardHandlerFactoryConfig; @@ -94,7 +98,8 @@ private NodeConfig(String nodeName, Path coreRootDirectory, Path solrDataHome, I Path solrHome, SolrResourceLoader loader, Properties solrProperties, PluginInfo[] backupRepositoryPlugins, MetricsConfig metricsConfig, PluginInfo transientCacheConfig, PluginInfo tracerConfig, - boolean fromZookeeper) { + boolean fromZookeeper, Set allowPaths) { + // all Path params here are absolute and normalized. this.nodeName = nodeName; this.coreRootDirectory = coreRootDirectory; this.solrDataHome = solrDataHome; @@ -123,6 +128,7 @@ private NodeConfig(String nodeName, Path coreRootDirectory, Path solrDataHome, I this.transientCacheConfig = transientCacheConfig; this.tracerConfig = tracerConfig; this.fromZookeeper = fromZookeeper; + this.allowPaths = allowPaths; if (this.cloudConfig != null && this.getCoreLoadThreadCount(false) < 2) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, @@ -134,10 +140,12 @@ public String getNodeName() { return nodeName; } + /** Absolute. */ public Path getCoreRootDirectory() { return coreRootDirectory; } + /** Absolute. */ public Path getSolrDataHome() { return solrDataHome; } @@ -208,6 +216,7 @@ public String getManagementPath() { return managementPath; } + /** Absolute. */ public Path getConfigSetBaseDirectory() { return configSetBaseDirectory; } @@ -258,8 +267,14 @@ public boolean isFromZookeeper() { return fromZookeeper; } - public static class NodeConfigBuilder { + /** + * Extra file paths that will be allowed for core creation, in addition to + * SOLR_HOME, SOLR_DATA_HOME and coreRootDir + */ + public Set getAllowPaths() { return allowPaths; } + public static class NodeConfigBuilder { + // all Path fields here are absolute and normalized. private SolrResourceLoader loader; private Path coreRootDirectory; private Path solrDataHome; @@ -288,6 +303,7 @@ public static class NodeConfigBuilder { private PluginInfo transientCacheConfig; private PluginInfo tracerConfig; private boolean fromZookeeper = false; + private Set allowPaths = Collections.emptySet(); private final Path solrHome; private final String nodeName; @@ -314,26 +330,23 @@ public static class NodeConfigBuilder { public NodeConfigBuilder(String nodeName, Path solrHome) { this.nodeName = nodeName; - this.solrHome = solrHome; + this.solrHome = solrHome.toAbsolutePath(); this.coreRootDirectory = solrHome; // always init from sysprop because config element may be missing - String dataHomeProperty = System.getProperty(SolrXmlConfig.SOLR_DATA_HOME); - if (dataHomeProperty != null && !dataHomeProperty.isEmpty()) { - solrDataHome = solrHome.resolve(dataHomeProperty); - } - this.configSetBaseDirectory = solrHome.resolve("configsets"); + setSolrDataHome(System.getProperty(SolrXmlConfig.SOLR_DATA_HOME)); + setConfigSetBaseDirectory("configsets"); this.metricsConfig = new MetricsConfig.MetricsConfigBuilder().build(); } public NodeConfigBuilder setCoreRootDirectory(String coreRootDirectory) { - this.coreRootDirectory = solrHome.resolve(coreRootDirectory); + this.coreRootDirectory = solrHome.resolve(coreRootDirectory).normalize(); return this; } public NodeConfigBuilder setSolrDataHome(String solrDataHomeString) { // keep it null unless explicitly set to non-empty value if (solrDataHomeString != null && !solrDataHomeString.isEmpty()) { - this.solrDataHome = solrHome.resolve(solrDataHomeString); + this.solrDataHome = solrHome.resolve(solrDataHomeString).normalize(); } return this; } @@ -455,6 +468,11 @@ public NodeConfigBuilder setFromZookeeper(boolean fromZookeeper) { return this; } + public NodeConfigBuilder setAllowPaths(Set paths) { + this.allowPaths = paths; + return this; + } + public NodeConfig build() { // if some things weren't set then set them now. Simple primitives are set on the field declaration if (loader == null) { @@ -465,7 +483,7 @@ public NodeConfig build() { updateShardHandlerConfig, coreAdminHandlerClass, collectionsAdminHandlerClass, healthCheckHandlerClass, infoHandlerClass, configSetsHandlerClass, logWatcherConfig, cloudConfig, coreLoadThreads, replayUpdatesThreads, transientCacheSize, useSchemaCache, managementPath, solrHome, loader, solrProperties, - backupRepositoryPlugins, metricsConfig, transientCacheConfig, tracerConfig, fromZookeeper); + backupRepositoryPlugins, metricsConfig, transientCacheConfig, tracerConfig, fromZookeeper, allowPaths); } public NodeConfigBuilder setSolrResourceLoader(SolrResourceLoader resourceLoader) { diff --git a/solr/core/src/java/org/apache/solr/core/PluginBag.java b/solr/core/src/java/org/apache/solr/core/PluginBag.java index 2937dc569f65..2f82ccc1cf2f 100644 --- a/solr/core/src/java/org/apache/solr/core/PluginBag.java +++ b/solr/core/src/java/org/apache/solr/core/PluginBag.java @@ -69,6 +69,7 @@ public class PluginBag implements AutoCloseable { private final Map> registry; private final Map> immutableRegistry; private String def; + @SuppressWarnings({"rawtypes"}) private final Class klass; private SolrCore core; private final SolrConfig.SolrPluginInfo meta; @@ -119,6 +120,7 @@ public static void initInstance(Object inst, PluginInfo info) { /** * Check if any of the mentioned names are missing. If yes, return the Set of missing names */ + @SuppressWarnings({"unchecked"}) public Set checkContains(Collection names) { if (names == null || names.isEmpty()) return Collections.EMPTY_SET; HashSet result = new HashSet<>(); @@ -126,6 +128,7 @@ public Set checkContains(Collection names) { return result; } + @SuppressWarnings({"unchecked"}) public PluginHolder createPlugin(PluginInfo info) { if ("true".equals(String.valueOf(info.attributes.get("runtimeLib")))) { if (log.isDebugEnabled()) { @@ -148,7 +151,7 @@ public PluginHolder createPlugin(PluginInfo info) { PackagePluginHolder holder = new PackagePluginHolder<>(info, core, meta); return holder; } else { - T inst = core.createInstance(info.className, (Class) meta.clazz, meta.getCleanTag(), null, core.getResourceLoader(info.pkgName)); + T inst = SolrCore.createInstance(info.className, (Class) meta.clazz, meta.getCleanTag(), null, core.getResourceLoader(info.pkgName)); initInstance(inst, info); return new PluginHolder<>(info, inst); } @@ -208,6 +211,7 @@ public T put(String name, T plugin) { return old == null ? null : old.get(); } + @SuppressWarnings({"unchecked"}) public PluginHolder put(String name, PluginHolder plugin) { Boolean registerApi = null; Boolean disableHandler = null; @@ -382,14 +386,21 @@ public boolean isLoaded() { } @Override - public void close() throws Exception { + public void close() { // TODO: there may be a race here. One thread can be creating a plugin // and another thread can come along and close everything (missing the plugin // that is in the state of being created and will probably never have close() called on it). // can close() be called concurrently with other methods? if (isLoaded()) { T myInst = get(); - if (myInst != null && myInst instanceof AutoCloseable) ((AutoCloseable) myInst).close(); + // N.B. instanceof returns false if myInst is null + if (myInst instanceof AutoCloseable) { + try { + ((AutoCloseable) myInst).close(); + } catch (Exception e) { + log.error("Error closing {}", inst , e); + } + } } } @@ -459,10 +470,11 @@ private synchronized boolean createInst() { MemClassLoader loader = (MemClassLoader) resourceLoader; loader.loadJars(); } + @SuppressWarnings({"unchecked"}) Class clazz = (Class) pluginMeta.clazz; T localInst = null; try { - localInst = core.createInstance(pluginInfo.className, clazz, pluginMeta.getCleanTag(), null, resourceLoader); + localInst = SolrCore.createInstance(pluginInfo.className, clazz, pluginMeta.getCleanTag(), null, resourceLoader); } catch (SolrException e) { if (isRuntimeLib && !(resourceLoader instanceof MemClassLoader)) { throw new SolrException(SolrException.ErrorCode.getErrorCode(e.code()), @@ -489,8 +501,6 @@ private synchronized boolean createInst() { lazyInst = localInst; // only assign the volatile until after the plugin is completely ready to use return true; } - - } /** @@ -526,9 +536,7 @@ public void init(PluginInfo info) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString(BlobRepository.INVALID_JAR_MSG, url, sha512, digest) ); } log.info("dynamic library verified {}, sha512: {}", url, sha512); - } - } public RuntimeLib(SolrCore core) { @@ -539,6 +547,7 @@ public String getUrl(){ return url; } + @SuppressWarnings({"unchecked"}) void loadJar() { if (jarContent != null) return; synchronized (this) { @@ -601,7 +610,7 @@ public ByteBuffer getFileContent(BlobRepository.BlobContent blobCont @Override - public void close() throws Exception { + public void close() { if (jarContent != null) coreContainer.getBlobRepository().decrementBlobRefCount(jarContent); } diff --git a/solr/core/src/java/org/apache/solr/core/PluginInfo.java b/solr/core/src/java/org/apache/solr/core/PluginInfo.java index bb290e12e813..428d72c58814 100644 --- a/solr/core/src/java/org/apache/solr/core/PluginInfo.java +++ b/solr/core/src/java/org/apache/solr/core/PluginInfo.java @@ -42,6 +42,7 @@ */ public class PluginInfo implements MapSerializable { public final String name, className, type, pkgName; + @SuppressWarnings({"rawtypes"}) public final NamedList initArgs; public final Map attributes; public final List children; @@ -49,7 +50,7 @@ public class PluginInfo implements MapSerializable { - public PluginInfo(String type, Map attrs, NamedList initArgs, List children) { + public PluginInfo(String type, Map attrs, @SuppressWarnings({"rawtypes"})NamedList initArgs, List children) { this.type = type; this.name = attrs.get(NAME); Pair parsed = parseClassName(attrs.get(CLASS_NAME)); @@ -92,6 +93,7 @@ public PluginInfo(Node node, String err, boolean requireName, boolean requireCla isFromSolrConfig = true; } + @SuppressWarnings({"unchecked", "rawtypes"}) public PluginInfo(String type, Map map) { LinkedHashMap m = new LinkedHashMap<>(map); initArgs = new NamedList(); @@ -163,6 +165,7 @@ public PluginInfo getChild(String type){ return l.isEmpty() ? null:l.get(0); } + @SuppressWarnings({"unchecked", "rawtypes"}) public Map toMap(Map map) { map.putAll(attributes); Map m = map; @@ -197,6 +200,7 @@ public List getChildren(String type){ for (PluginInfo child : children) if(type.equals(child.type)) result.add(child); return result; } + @SuppressWarnings({"rawtypes"}) public static final PluginInfo EMPTY_INFO = new PluginInfo("",Collections.emptyMap(), new NamedList(),Collections.emptyList()); private static final HashSet NL_TAGS = new HashSet<> diff --git a/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java b/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java index a0dbbb1830ef..2e8fa9ea2623 100644 --- a/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java +++ b/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java @@ -45,14 +45,13 @@ public QuerySenderListener(SolrCore core) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) { final SolrIndexSearcher searcher = newSearcher; - log.info("QuerySenderListener sending requests to {}", newSearcher); + log.debug("QuerySenderListener sending requests to {}", newSearcher); List allLists = (List)getArgs().get("queries"); if (allLists == null) return; - boolean createNewReqInfo = SolrRequestInfo.getRequestInfo() == null; for (NamedList nlst : allLists) { - SolrQueryRequest req = null; try { // bind the request to a particular searcher (the newSearcher) NamedList params = addEventParms(currentSearcher, nlst); @@ -60,42 +59,41 @@ public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher current if (params.get(DISTRIB) == null) { params.add(DISTRIB, false); } - req = new LocalSolrQueryRequest(getCore(),params) { + SolrQueryRequest req = new LocalSolrQueryRequest(getCore(),params) { @Override public SolrIndexSearcher getSearcher() { return searcher; } @Override public void close() { } }; - SolrQueryResponse rsp = new SolrQueryResponse(); - if (createNewReqInfo) { - // SolrRequerstInfo for this thread could have been transferred from the parent - // thread. - SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); - } - getCore().execute(getCore().getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp); + SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); + try { + getCore().execute(getCore().getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp); - // Retrieve the Document instances (not just the ids) to warm - // the OS disk cache, and any Solr document cache. Only the top - // level values in the NamedList are checked for DocLists. - NamedList values = rsp.getValues(); - for (int i=0; i paramsets = new LinkedHashMap<>(); private final int znodeVersion; + @SuppressWarnings({"rawtypes"}) public RequestParams(Map data, int znodeVersion) { if (data == null) data = Collections.EMPTY_MAP; this.data = data; @@ -67,6 +69,7 @@ public RequestParams(Map data, int znodeVersion) { this.znodeVersion = znodeVersion; } + @SuppressWarnings({"rawtypes"}) public static ParamSet createParamSet(Map map, Long version) { Map copy = getDeepCopy(map, 3); Map meta = (Map) copy.remove(""); @@ -82,27 +85,27 @@ public static ParamSet createParamSet(Map map, Long version) { * This converts Lists to arrays of strings. Because Solr expects * params to be String[] */ - private static Map getMapCopy(Map value) { - Map copy = new LinkedHashMap<>(); - for (Object o1 : value.entrySet()) { - Map.Entry entry = (Map.Entry) o1; + private static Map getMapCopy(Map value) { + Map copy = new LinkedHashMap<>(); + for (Map.Entry entry : value.entrySet()) { if ("".equals(entry.getKey())) { - copy.put(entry.getKey(), entry.getValue()); - continue; - } - if (entry.getValue() != null) { - if (entry.getValue() instanceof List) { - List l = (List) entry.getValue(); - String[] sarr = new String[l.size()]; - for (int i = 0; i < l.size(); i++) { - if (l.get(i) != null) sarr[i] = String.valueOf(l.get(i)); - } - copy.put(entry.getKey(), sarr); + // Why is this a special case? + if (entry.getValue() instanceof String[]) { + copy.put("", (String[]) entry.getValue()); } else { - copy.put(entry.getKey(), String.valueOf(entry.getValue())); + throw new IllegalArgumentException(); + } + } else if (entry.getValue() == null) { + copy.put(entry.getKey(), null); + } else if (entry.getValue() instanceof List) { + List l = (List) entry.getValue(); + String[] sarr = new String[l.size()]; + for (int i = 0; i < l.size(); i++) { + if (l.get(i) != null) sarr[i] = String.valueOf(l.get(i)); } + copy.put(entry.getKey(), sarr); } else { - copy.put(entry.getKey(), entry.getValue()); + copy.put(entry.getKey(), new String[] { entry.getValue().toString() }); } } return copy; @@ -122,10 +125,12 @@ public int getZnodeVersion() { } @Override + @SuppressWarnings({"unchecked"}) public Map toMap(Map map) { return getMapWithVersion(data, znodeVersion); } + @SuppressWarnings({"unchecked", "rawtypes"}) public static Map getMapWithVersion(Map data, int znodeVersion) { Map result = new LinkedHashMap(); result.put(ConfigOverlay.ZNODEVER, znodeVersion); @@ -133,6 +138,7 @@ public static Map getMapWithVersion(Map data, in return result; } + @SuppressWarnings({"unchecked", "rawtypes"}) public RequestParams setParams(String name, ParamSet paramSet) { Map deepCopy = getDeepCopy(data, 3); Map p = (Map) deepCopy.get(NAME); @@ -182,6 +188,7 @@ private static Object[] getMapAndVersion(SolrResourceLoader loader, String name) log.info("conf resource {} loaded . version : {} ", name, version); } try { + @SuppressWarnings({"rawtypes"}) Map m = (Map) fromJSON (in); return new Object[]{m, version}; } catch (Exception e) { @@ -205,11 +212,15 @@ public byte[] toByteArray() { public static final String APPENDS = "_appends_"; public static final String INVARIANTS = "_invariants_"; + @SuppressWarnings({"unchecked"}) public static class ParamSet implements MapSerializable { + @SuppressWarnings({"rawtypes"}) private final Map defaults, appends, invariants; Map paramsMap; + @SuppressWarnings({"rawtypes"}) public final Map meta; + @SuppressWarnings({"rawtypes"}) ParamSet(Map defaults, Map invariants, Map appends, Map meta) { this.defaults = defaults; this.invariants = invariants; @@ -227,6 +238,7 @@ public Long getVersion() { } @Override + @SuppressWarnings({"unchecked"}) public Map toMap(Map result) { result.putAll(defaults); if (appends != null) result.put(APPENDS, appends); @@ -236,7 +248,8 @@ public Map toMap(Map result) { } - public ParamSet update(Map map) { + @SuppressWarnings({"rawtypes"}) + public ParamSet update(@SuppressWarnings({"rawtypes"})Map map) { ParamSet p = createParamSet(map, null); return new ParamSet( mergeMaps(getDeepCopy(defaults, 2), p.defaults), @@ -246,6 +259,7 @@ public ParamSet update(Map map) { ); } + @SuppressWarnings({"unchecked", "rawtypes"}) private static Map mergeMaps(Map m1, Map m2) { if (m1 == null && m2 == null) return null; if (m1 == null) return m2; @@ -263,15 +277,16 @@ public VersionedParams getParams(String type) { /**get the raw map */ + @SuppressWarnings({"unchecked"}) public Map get() { return defaults; } } - public static class VersionedParams extends MapSolrParams { + public static class VersionedParams extends MultiMapSolrParams { final ParamSet paramSet; - public VersionedParams(Map map, ParamSet paramSet) { + public VersionedParams(Map map, ParamSet paramSet) { super(getMapCopy(map)); this.paramSet = paramSet; } diff --git a/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java b/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java index fa34edd3c645..6fc3629ad1ec 100644 --- a/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java +++ b/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java @@ -24,7 +24,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; -import org.apache.lucene.codecs.lucene84.Lucene84Codec; +import org.apache.lucene.codecs.lucene86.Lucene86Codec; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.util.NamedList; @@ -73,6 +73,7 @@ public void inform(SolrCore core) { } @Override + @SuppressWarnings({"rawtypes"}) public void init(NamedList args) { super.init(args); assert codec == null; @@ -91,7 +92,7 @@ public void init(NamedList args) { compressionMode = SOLR_DEFAULT_COMPRESSION_MODE; log.debug("Using default compressionMode: {}", compressionMode); } - codec = new Lucene84Codec(compressionMode) { + codec = new Lucene86Codec(compressionMode) { @Override public PostingsFormat getPostingsFormatForField(String field) { final SchemaField schemaField = core.getLatestSchema().getFieldOrNull(field); diff --git a/solr/core/src/java/org/apache/solr/core/SimpleTextCodecFactory.java b/solr/core/src/java/org/apache/solr/core/SimpleTextCodecFactory.java index de0124fce82f..9f3929aa7f59 100644 --- a/solr/core/src/java/org/apache/solr/core/SimpleTextCodecFactory.java +++ b/solr/core/src/java/org/apache/solr/core/SimpleTextCodecFactory.java @@ -25,6 +25,7 @@ public class SimpleTextCodecFactory extends CodecFactory { private Codec codec; @Override + @SuppressWarnings({"rawtypes"}) public void init(NamedList args) { super.init(args); assert codec == null; diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java index 64407c0b1750..a38e800743c5 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java +++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java @@ -315,10 +315,10 @@ public static final Version parseLuceneVersionString(final String matchVersion) } if (version == Version.LATEST && !versionWarningAlreadyLogged.getAndSet(true)) { - log.warn("You should not use LATEST as luceneMatchVersion property: {}{}{}" - , "if you use this setting, and then Solr upgrades to a newer release of Lucene, " - , "sizable changes may happen. If precise back compatibility is important " - , "then you should instead explicitly specify an actual Lucene version."); + log.warn("You should not use LATEST as luceneMatchVersion property: " + + "if you use this setting, and then Solr upgrades to a newer release of Lucene, " + + "sizable changes may happen. If precise back compatibility is important " + + "then you should instead explicitly specify an actual Lucene version."); } return version; @@ -368,11 +368,13 @@ public static final Version parseLuceneVersionString(final String matchVersion) public static class SolrPluginInfo { + @SuppressWarnings({"rawtypes"}) public final Class clazz; public final String tag; public final Set options; + @SuppressWarnings({"unchecked", "rawtypes"}) private SolrPluginInfo(Class clz, String tag, PluginOpts... opts) { this.clazz = clz; this.tag = tag; @@ -389,6 +391,7 @@ public String getTagCleanLower() { } } + @SuppressWarnings({"unchecked", "rawtypes"}) public static ConfigOverlay getConfigOverlay(SolrResourceLoader loader) { InputStream in = null; InputStreamReader isr = null; @@ -672,6 +675,7 @@ public UpdateHandlerInfo(String className, int autoCommmitMaxDocs, int autoCommm @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Map toMap(Map map) { LinkedHashMap result = new LinkedHashMap(); result.put("indexWriter", makeMap("closeWaitsForMerges", indexWriterCloseWaitsForMerges)); @@ -706,6 +710,7 @@ public String getDataDir() { * SearchComponent, QueryConverter, SolrEventListener, DirectoryFactory, * IndexDeletionPolicy, IndexReaderFactory, {@link TransformerFactory} */ + @SuppressWarnings({"unchecked", "rawtypes"}) public List getPluginInfos(String type) { List result = pluginStore.get(type); SolrPluginInfo info = classVsSolrPluginInfo.get(type); @@ -755,7 +760,7 @@ private void initLibs(SolrResourceLoader loader, boolean isConfigsetTrusted) { try { urls.addAll(SolrResourceLoader.getURLs(libPath)); } catch (IOException e) { - log.warn("Couldn't add files from {} to classpath: {}", libPath, e.getMessage()); + log.warn("Couldn't add files from {} to classpath: {}", libPath, e); } } @@ -781,14 +786,14 @@ private void initLibs(SolrResourceLoader loader, boolean isConfigsetTrusted) { else urls.addAll(SolrResourceLoader.getFilteredURLs(dir, regex)); } catch (IOException e) { - log.warn("Couldn't add files from {} filtered by {} to classpath: {}", dir, regex, e.getMessage()); + log.warn("Couldn't add files from {} filtered by {} to classpath: {}", dir, regex, e); } } else if (null != path) { final Path dir = instancePath.resolve(path); try { urls.add(dir.toUri().toURL()); } catch (MalformedURLException e) { - log.warn("Couldn't add file {} to classpath: {}", dir, e.getMessage()); + log.warn("Couldn't add file {} to classpath: {}", dir, e); } } else { throw new RuntimeException("lib: missing mandatory attributes: 'dir' or 'path'"); @@ -856,6 +861,7 @@ public String get(String path, String def) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Map toMap(Map result) { if (getZnodeVersion() > -1) result.put(ZNODEVER, getZnodeVersion()); result.put(IndexSchema.LUCENE_MATCH_VERSION_PARAM, luceneMatchVersion); @@ -914,6 +920,7 @@ public Map toMap(Map result) { return result; } + @SuppressWarnings({"unchecked", "rawtypes"}) private void addCacheConfig(Map queryMap, CacheConfig... cache) { if (cache == null) return; for (CacheConfig config : cache) if (config != null) queryMap.put(config.getNodeName(), config); diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java index ca7df79ad81e..9b635e494020 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrCore.java +++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java @@ -196,6 +196,7 @@ public final class SolrCore implements SolrInfoBean, Closeable { private final SolrConfig solrConfig; private final SolrResourceLoader resourceLoader; private volatile IndexSchema schema; + @SuppressWarnings({"rawtypes"}) private final NamedList configSetProperties; private final String dataDir; private final String ulogDir; @@ -326,7 +327,7 @@ public IndexSchema getLatestSchema() { return schema; } - /** The core's instance directory. */ + /** The core's instance directory (absolute). */ public Path getInstancePath() { return getCoreDescriptor().getInstanceDir(); } @@ -354,6 +355,7 @@ public void setLatestSchema(IndexSchema replacementSchema) { this.schema = replacementSchema; } + @SuppressWarnings({"rawtypes"}) public NamedList getConfigSetProperties() { return configSetProperties; } @@ -1356,7 +1358,7 @@ private static void writeNewIndexProps(Directory dir, String tmpFileName, String private String initUpdateLogDir(CoreDescriptor coreDescriptor) { String updateLogDir = coreDescriptor.getUlogDir(); if (updateLogDir == null) { - updateLogDir = coreDescriptor.getInstanceDir().resolve(dataDir).normalize().toAbsolutePath().toString(); + updateLogDir = coreDescriptor.getInstanceDir().resolve(dataDir).toString(); } return updateLogDir; } @@ -2018,7 +2020,7 @@ public RefCounted getRealtimeSearcher() { } - public RefCounted getSearcher(boolean forceNew, boolean returnSearcher, final Future[] waitSearcher) { + public RefCounted getSearcher(boolean forceNew, boolean returnSearcher, @SuppressWarnings({"rawtypes"})final Future[] waitSearcher) { return getSearcher(forceNew, returnSearcher, waitSearcher, false); } @@ -2214,7 +2216,7 @@ public RefCounted openNewSearcher(boolean updateHandlerReopen * @param waitSearcher if non-null, will be filled in with a {@link Future} that will return after the new searcher is registered. * @param updateHandlerReopens if true, the UpdateHandler will be used when reopening a {@link SolrIndexSearcher}. */ - public RefCounted getSearcher(boolean forceNew, boolean returnSearcher, final Future[] waitSearcher, boolean updateHandlerReopens) { + public RefCounted getSearcher(boolean forceNew, boolean returnSearcher, @SuppressWarnings({"rawtypes"})final Future[] waitSearcher, boolean updateHandlerReopens) { // it may take some time to open an index.... we may need to make // sure that two threads aren't trying to open one at the same time // if it isn't necessary. @@ -2321,6 +2323,7 @@ public RefCounted getSearcher(boolean forceNew, boolean retur final SolrIndexSearcher currSearcher = currSearcherHolder == null ? null : currSearcherHolder.get(); + @SuppressWarnings({"rawtypes"}) Future future = null; // if the underlying searcher has not changed, no warming is needed @@ -2526,7 +2529,10 @@ private void registerSearcher(RefCounted newSearcherHolder) { ***/ newSearcher.register(); // register subitems (caches) - log.info("{}Registered new searcher {}", logid, newSearcher); + + if (log.isInfoEnabled()) { + log.info("{} Registered new searcher autowarm time: {} ms", logid, newSearcher.getWarmupTime()); + } } catch (Exception e) { // an exception in register() shouldn't be fatal. @@ -2800,6 +2806,7 @@ public QParserPlugin getQueryPlugin(String parserName) { private final PluginBag transformerFactories = new PluginBag<>(TransformerFactory.class, this); + @SuppressWarnings({"unchecked"}) Map createInstances(Map> map) { Map result = new LinkedHashMap<>(map.size(), 1); for (Map.Entry> e : map.entrySet()) { @@ -2848,7 +2855,7 @@ public T initPlugins(List pluginInfos, Map registry, return def; } - public void initDefaultPlugin(Object plugin, Class type) { + public void initDefaultPlugin(Object plugin, @SuppressWarnings({"rawtypes"})Class type) { if (plugin instanceof SolrMetricProducer) { coreMetricManager.registerMetricProducer(type.getSimpleName() + ".default", (SolrMetricProducer) plugin); } @@ -2994,7 +3001,7 @@ public void postClose(SolrCore core) { public static void deleteUnloadedCore(CoreDescriptor cd, boolean deleteDataDir, boolean deleteInstanceDir) { if (deleteDataDir) { - File dataDir = new File(cd.getInstanceDir().resolve(cd.getDataDir()).toAbsolutePath().toString()); + File dataDir = cd.getInstanceDir().resolve(cd.getDataDir()).toFile(); try { FileUtils.deleteDirectory(dataDir); } catch (IOException e) { @@ -3161,8 +3168,10 @@ public void cleanupOldIndexDirectories(boolean reload) { } } + @SuppressWarnings({"rawtypes"}) private static final Map implicitPluginsInfo = (Map) Utils.fromJSONResource("ImplicitPlugins.json"); + @SuppressWarnings({"unchecked", "rawtypes"}) public List getImplicitHandlers() { List implicits = new ArrayList<>(); Map requestHandlers = (Map) implicitPluginsInfo.get(SolrRequestHandler.TYPE); @@ -3187,12 +3196,14 @@ public List getImplicitHandlers() { * @param decoder a decoder with which to convert the blob into a Java Object representation (first time only) * @return a reference to the blob that has already cached the decoded version. */ + @SuppressWarnings({"rawtypes"}) public BlobRepository.BlobContentRef loadDecodeAndCacheBlob(String key, BlobRepository.Decoder decoder) { // make sure component authors don't give us oddball keys with no version... if (!BlobRepository.BLOB_KEY_PATTERN_CHECKER.matcher(key).matches()) { throw new IllegalArgumentException("invalid key format, must end in /N where N is the version number"); } // define the blob + @SuppressWarnings({"rawtypes"}) BlobRepository.BlobContentRef blobRef = coreContainer.getBlobRepository().getBlobIncRef(key, decoder); addCloseHook(new CloseHook() { @Override diff --git a/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java b/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java index 71c69a3409f7..6c4c9eca47ca 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java +++ b/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java @@ -48,7 +48,7 @@ public class SolrDeletionPolicy extends IndexDeletionPolicy implements NamedList private int maxOptimizedCommitsToKeep = 0; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings("rawtypes") NamedList args) { String keepOptimizedOnlyString = (String) args.get("keepOptimizedOnly"); String maxCommitsToKeepString = (String) args.get("maxCommitsToKeep"); String maxOptimizedCommitsToKeepString = (String) args.get("maxOptimizedCommitsToKeep"); diff --git a/solr/core/src/java/org/apache/solr/core/SolrPaths.java b/solr/core/src/java/org/apache/solr/core/SolrPaths.java index 344a67a41def..c72bff78a918 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrPaths.java +++ b/solr/core/src/java/org/apache/solr/core/SolrPaths.java @@ -28,6 +28,8 @@ import java.util.Set; import java.util.concurrent.ConcurrentSkipListSet; +import org.apache.commons.exec.OS; +import org.apache.solr.common.SolrException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -128,4 +130,37 @@ private static void logOnceInfo(String key, String msg) { log.info(msg); } } + + /** + * Checks that the given path is relative to one of the allowPaths supplied. Typically this will be + * called from {@link CoreContainer#assertPathAllowed(Path)} and allowPaths pre-filled with the node's + * SOLR_HOME, SOLR_DATA_HOME and coreRootDirectory folders, as well as any paths specified in + * solr.xml's allowPaths element. The following paths will always fail validation: + *
      + *
    • Relative paths starting with ..
    • + *
    • Windows UNC paths (such as \\host\share\path)
    • + *
    • Paths which are not relative to any of allowPaths
    • + *
    + * @param pathToAssert path to check + * @param allowPaths list of paths that should be allowed prefixes for pathToAssert + * @throws SolrException if path is outside allowed paths + */ + public static void assertPathAllowed(Path pathToAssert, Set allowPaths) throws SolrException { + if (OS.isFamilyWindows() && pathToAssert.toString().startsWith("\\\\")) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Path " + pathToAssert + " disallowed. UNC paths not supported. Please use drive letter instead."); + } + // Conversion Path -> String -> Path is to be able to compare against org.apache.lucene.mockfile.FilterPath instances + final Path path = Path.of(pathToAssert.toString()).normalize(); + if (path.startsWith("..")) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Path " + pathToAssert + " disallowed due to path traversal.."); + } + if (!path.isAbsolute()) return; // All relative paths are accepted + if (allowPaths.contains(Paths.get("_ALL_"))) return; // Catch-all path "*"/"_ALL_" will allow all other paths + if (allowPaths.stream().noneMatch(p -> path.startsWith(Path.of(p.toString())))) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Path " + path + " must be relative to SOLR_HOME, SOLR_DATA_HOME coreRootDirectory. Set system property 'solr.allowPaths' to add other allowed paths."); + } + } } diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java index afe07cd01e5c..f4fcecde8455 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java +++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java @@ -16,12 +16,8 @@ */ package org.apache.solr.core; -import java.io.Closeable; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; +import com.google.common.annotations.VisibleForTesting; +import java.io.*; import java.lang.invoke.MethodHandles; import java.lang.reflect.Constructor; import java.net.MalformedURLException; @@ -34,24 +30,13 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.PathMatcher; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; - -import com.google.common.annotations.VisibleForTesting; import org.apache.lucene.analysis.WordlistLoader; -import org.apache.lucene.analysis.util.CharFilterFactory; -import org.apache.lucene.analysis.util.ResourceLoader; -import org.apache.lucene.analysis.util.ResourceLoaderAware; -import org.apache.lucene.analysis.util.TokenFilterFactory; -import org.apache.lucene.analysis.util.TokenizerFactory; +import org.apache.lucene.analysis.util.*; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; @@ -520,13 +505,14 @@ public Class findClass(String cname, Class expectedType, Str } } - static final String empty[] = new String[0]; + static final String[] empty = new String[0]; @Override public T newInstance(String name, Class expectedType) { return newInstance(name, expectedType, empty); } + @SuppressWarnings({"rawtypes"}) private static final Class[] NO_CLASSES = new Class[0]; private static final Object[] NO_OBJECTS = new Object[0]; @@ -534,11 +520,12 @@ public T newInstance(String cname, Class expectedType, String... subpacka return newInstance(cname, expectedType, subpackages, NO_CLASSES, NO_OBJECTS); } + @SuppressWarnings({"rawtypes"}) public T newInstance(String cName, Class expectedType, String[] subPackages, Class[] params, Object[] args) { Class clazz = findClass(cName, expectedType, subPackages); if (clazz == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Can not find class: " + cName + " in " + classLoader); + "Can not find class: " + cName + " in " + classLoader); } T obj = null; @@ -566,25 +553,50 @@ public T newInstance(String cName, Class expectedType, String[] subPackag } catch (Exception e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Error instantiating class: '" + clazz.getName() + "'", e); + "Error instantiating class: '" + clazz.getName() + "'", e); } - if (!live) { - if (obj instanceof SolrCoreAware) { - assertAwareCompatibility(SolrCoreAware.class, obj); - waitingForCore.add((SolrCoreAware) obj); + addToCoreAware(obj); + addToResourceLoaderAware(obj); + addToInfoBeans(obj); + return obj; + } + + public void addToInfoBeans(T obj) { + if(!live) { + if (obj instanceof SolrInfoBean) { + //TODO: Assert here? + infoMBeans.add((SolrInfoBean) obj); } + } + } + + public boolean addToResourceLoaderAware(T obj) { + if (!live) { if (obj instanceof ResourceLoaderAware) { assertAwareCompatibility(ResourceLoaderAware.class, obj); waitingForResources.add((ResourceLoaderAware) obj); } - if (obj instanceof SolrInfoBean) { - //TODO: Assert here? - infoMBeans.add((SolrInfoBean) obj); - } + return true; + } else { + return false; } + } - return obj; + /** the inform() callback should be invoked on the listener. + * If this is 'live', the callback is not called so currently this returns 'false' + * + */ + public boolean addToCoreAware(T obj) { + if (!live) { + if (obj instanceof SolrCoreAware) { + assertAwareCompatibility(SolrCoreAware.class, obj); + waitingForCore.add((SolrCoreAware) obj); + } + return true; + } else { + return false; + } } @@ -677,12 +689,13 @@ public Path getInstancePath() { /** * Keep a list of classes that are allowed to implement each 'Aware' interface */ + @SuppressWarnings({"rawtypes"}) private static final Map awareCompatibility; static { awareCompatibility = new HashMap<>(); awareCompatibility.put( - SolrCoreAware.class, new Class[]{ + SolrCoreAware.class, new Class[]{ // DO NOT ADD THINGS TO THIS LIST -- ESPECIALLY THINGS THAT CAN BE CREATED DYNAMICALLY // VIA RUNTIME APIS -- UNTILL CAREFULLY CONSIDERING THE ISSUES MENTIONED IN SOLR-8311 CodecFactory.class, @@ -698,7 +711,7 @@ public Path getInstancePath() { ); awareCompatibility.put( - ResourceLoaderAware.class, new Class[]{ + ResourceLoaderAware.class, new Class[]{ // DO NOT ADD THINGS TO THIS LIST -- ESPECIALLY THINGS THAT CAN BE CREATED DYNAMICALLY // VIA RUNTIME APIS -- UNTILL CAREFULLY CONSIDERING THE ISSUES MENTIONED IN SOLR-8311 CharFilterFactory.class, @@ -713,7 +726,8 @@ public Path getInstancePath() { /** * Utility function to throw an exception if the class is invalid */ - static void assertAwareCompatibility(Class aware, Object obj) { + @SuppressWarnings({"rawtypes"}) + public static void assertAwareCompatibility(Class aware, Object obj) { Class[] valid = awareCompatibility.get(aware); if (valid == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, diff --git a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java index a379ae6a5e5e..1c96d2bb9271 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java +++ b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java @@ -26,13 +26,17 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; +import java.util.stream.Collectors; import com.google.common.base.Strings; import org.apache.commons.io.IOUtils; @@ -277,6 +281,9 @@ private static NodeConfig fillSolrSection(NodeConfig.NodeConfigBuilder builder, case "sharedLib": builder.setSharedLibDirectory(value); break; + case "allowPaths": + builder.setAllowPaths(stringToPaths(value)); + break; case "configSetBaseDir": builder.setConfigSetBaseDirectory(value); break; @@ -300,6 +307,15 @@ private static NodeConfig fillSolrSection(NodeConfig.NodeConfigBuilder builder, return builder.build(); } + private static Set stringToPaths(String commaSeparatedString) { + if (Strings.isNullOrEmpty(commaSeparatedString)) { + return Collections.emptySet(); + } + // Parse list of paths. The special value '*' is mapped to _ALL_ to mean all paths + return Arrays.stream(commaSeparatedString.split(",\\s?")) + .map(p -> Paths.get("*".equals(p) ? "_ALL_" : p)).collect(Collectors.toSet()); + } + private static UpdateShardHandlerConfig loadUpdateConfig(NamedList nl, boolean alwaysDefine) { if (nl == null && !alwaysDefine) diff --git a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java index 5de4643e5fb6..f579c775f8d5 100644 --- a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java +++ b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java @@ -54,6 +54,7 @@ public TransientSolrCoreCacheDefault(final CoreContainer container) { // deprecate this for 7.0? this.cacheSize = cfg.getTransientCacheSize(); } else { + @SuppressWarnings({"rawtypes"}) NamedList args = cfg.getTransientCachePluginInfo().initArgs; Object obj = args.get("transientCacheSize"); if (obj != null) { @@ -69,6 +70,7 @@ private void doInit() { // Still handle just having transientCacheSize defined in the body of solr.xml not in a transient handler clause. this.cacheSize = cfg.getTransientCacheSize(); } else { + @SuppressWarnings({"rawtypes"}) NamedList args = cfg.getTransientCachePluginInfo().initArgs; Object obj = args.get("transientCacheSize"); if (obj != null) { @@ -105,6 +107,8 @@ protected boolean removeEldestEntry(Map.Entry eldest) { @Override public Collection prepareForShutdown() { // Return a copy of the values + + @SuppressWarnings({"unchecked", "rawtypes"}) List ret = new ArrayList(transientCores.values()); transientCores.clear(); return ret; @@ -129,7 +133,7 @@ public Set getLoadedCoreNames() { } // Remove a core from the internal structures, presumably it - // being closed. If the core is re-opened, it will be readded by CoreContainer. + // being closed. If the core is re-opened, it will be re-added by CoreContainer. @Override public SolrCore removeCore(String name) { return transientCores.remove(name); diff --git a/solr/core/src/java/org/apache/solr/core/ZkContainer.java b/solr/core/src/java/org/apache/solr/core/ZkContainer.java index 3df1c53cc75c..2ca62f8b37de 100644 --- a/solr/core/src/java/org/apache/solr/core/ZkContainer.java +++ b/solr/core/src/java/org/apache/solr/core/ZkContainer.java @@ -47,7 +47,16 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * Used by {@link CoreContainer} to hold ZooKeeper / SolrCloud info, especially {@link ZkController}. + * Mainly it does some ZK initialization, and ensures a loading core registers in ZK. + * Even when in standalone mode, perhaps surprisingly, an instance of this class exists. + * If {@link #getZkController()} returns null then we're in standalone mode. + */ public class ZkContainer { + // NOTE DWS: It's debatable if this in-between class is needed instead of folding it all into ZkController. + // ZKC is huge though. + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); protected ZkController zkController; @@ -180,6 +189,10 @@ private String stripChroot(String zkRun) { public static volatile Predicate testing_beforeRegisterInZk; public void registerInZk(final SolrCore core, boolean background, boolean skipRecovery) { + if (zkController == null) { + return; + } + CoreDescriptor cd = core.getCoreDescriptor(); // save this here - the core may not have it later Runnable r = () -> { MDCLoggingContext.setCore(core); @@ -223,7 +236,6 @@ public void registerInZk(final SolrCore core, boolean background, boolean skipRe } else { r.run(); } - } public ZkController getZkController() { diff --git a/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java b/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java index b15bbfee838f..deae3604c798 100644 --- a/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java +++ b/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java @@ -139,7 +139,7 @@ public DocCollection readCollectionState(URI backupLoc, String backupId, String try (IndexInput is = repository.openInput(zkStateDir, COLLECTION_PROPS_FILE, IOContext.DEFAULT)) { byte[] arr = new byte[(int) is.length()]; // probably ok since the json file should be small. is.readBytes(arr, 0, (int) is.length()); - ClusterState c_state = ClusterState.load(-1, arr, Collections.emptySet()); + ClusterState c_state = ClusterState.createFromJson(-1, arr, Collections.emptySet()); return c_state.getCollection(collectionName); } } diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java b/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java index 6c0b04c3f722..1e9da27451ea 100644 --- a/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java +++ b/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java @@ -49,6 +49,7 @@ public class HdfsBackupRepository implements BackupRepository { private Configuration hdfsConfig = null; private FileSystem fileSystem = null; private Path baseHdfsPath = null; + @SuppressWarnings("rawtypes") private NamedList config = null; protected int copyBufferSize = HdfsDirectory.DEFAULT_BUFFER_SIZE; diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java b/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java index 2379b0dd7bed..612a61fd4bff 100644 --- a/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java +++ b/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java @@ -46,10 +46,11 @@ * interface e.g. NFS). */ public class LocalFileSystemRepository implements BackupRepository { + @SuppressWarnings("rawtypes") private NamedList config = null; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings("rawtypes") NamedList args) { this.config = args; } diff --git a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotManager.java b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotManager.java index f40bea805589..3a9fa0ef1b8c 100644 --- a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotManager.java +++ b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotManager.java @@ -169,6 +169,7 @@ public static Optional getCollectionLevelSnapshot(So throws InterruptedException, KeeperException { String zkPath = getSnapshotMetaDataZkPath(collectionName, Optional.of(commitName)); try { + @SuppressWarnings({"unchecked"}) Map data = (Map)Utils.fromJSON(zkClient.getData(zkPath, null, null, true)); return Optional.of(new CollectionSnapshotMetaData(data)); } catch (KeeperException ex) { @@ -281,13 +282,14 @@ public void onCommit(List commits) * @param dir The index directory storing the snapshot. * @throws IOException in case of I/O errors. */ + + @SuppressWarnings({"try", "unused"}) private static void deleteSnapshotIndexFiles(SolrCore core, Directory dir, IndexDeletionPolicy delPolicy) throws IOException { IndexWriterConfig conf = core.getSolrConfig().indexConfig.toIndexWriterConfig(core); conf.setOpenMode(OpenMode.APPEND); conf.setMergePolicy(NoMergePolicy.INSTANCE);//Don't want to merge any commits here! conf.setIndexDeletionPolicy(delPolicy); conf.setCodec(core.getCodec()); - try (SolrIndexWriter iw = new SolrIndexWriter("SolrSnapshotCleaner", dir, conf)) { // Do nothing. The only purpose of opening index writer is to invoke the Lucene IndexDeletionPolicy#onInit // method so that we can cleanup the files associated with specified index commit. diff --git a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java index 31323aa94511..4d4c3b829aff 100644 --- a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java +++ b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java @@ -426,6 +426,7 @@ private static boolean isReplicaAvailable (Slice s, String coreName) { return false; } + @SuppressWarnings({"unchecked", "rawtypes"}) private Collection listCollectionSnapshots(String collectionName) throws SolrServerException, IOException { CollectionAdminRequest.ListSnapshots listSnapshots = new CollectionAdminRequest.ListSnapshots(collectionName); diff --git a/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java b/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java index 88cb0e21021c..f6f793b9b1b4 100644 --- a/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java +++ b/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java @@ -178,6 +178,7 @@ private boolean fetchFileFromNodeAndPersist(String fromNode) { String baseUrl = url.replace("/solr", "/api"); ByteBuffer metadata = null; + @SuppressWarnings({"rawtypes"}) Map m = null; try { metadata = Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(), @@ -448,7 +449,7 @@ public void syncToAllNodes(String path) throws IOException { } @Override - public List list(String path, Predicate predicate) { + public List list(String path, Predicate predicate) { File file = getRealpath(path).toFile(); List fileDetails = new ArrayList<>(); FileType type = getType(path, false); @@ -472,6 +473,7 @@ public List list(String path, Predicate predicate) { @Override public void refresh(String path) { try { + @SuppressWarnings({"rawtypes"}) List l = null; try { l = coreContainer.getZkController().getZkClient().getChildren(ZK_PACKAGESTORE+ path, null, true); @@ -479,6 +481,7 @@ public void refresh(String path) { // does not matter } if (l != null && !l.isEmpty()) { + @SuppressWarnings({"rawtypes"}) List myFiles = list(path, s -> true); for (Object f : l) { if (!myFiles.contains(f)) { @@ -546,6 +549,7 @@ public static void _persistToFile(Path solrHome, String path, ByteBuffer data, B if (!parent.exists()) { parent.mkdirs(); } + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSON(meta.array(), meta.arrayOffset(), meta.limit()); if (m == null || m.isEmpty()) { throw new SolrException(SERVER_ERROR, "invalid metadata , discarding : " + path); diff --git a/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java b/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java index 5156d50fb954..523ae7ce41f3 100644 --- a/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java +++ b/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java @@ -89,7 +89,7 @@ public PackageStore getPackageStore() { */ public ArrayList shuffledNodes() { Set liveNodes = coreContainer.getZkController().getZkStateReader().getClusterState().getLiveNodes(); - ArrayList l = new ArrayList(liveNodes); + ArrayList l = new ArrayList<>(liveNodes); l.remove(coreContainer.getZkController().getNodeName()); Collections.shuffle(l, BlobRepository.RANDOM); return l; @@ -279,6 +279,7 @@ public void read(SolrQueryRequest req, SolrQueryResponse rsp) { int idx = path.lastIndexOf('/'); String fileName = path.substring(idx + 1); String parentPath = path.substring(0, path.lastIndexOf('/')); + @SuppressWarnings({"rawtypes"}) List l = packageStore.list(parentPath, s -> s.equals(fileName)); rsp.add("files", Collections.singletonMap(path, l.isEmpty() ? null : l.get(0))); return; @@ -312,7 +313,8 @@ public static class MetaData implements MapWriter { List signatures; Map otherAttribs; - public MetaData(Map m) { + @SuppressWarnings({"unchecked"}) + public MetaData(@SuppressWarnings({"rawtypes"})Map m) { m = Utils.getDeepCopy(m, 3); this.sha512 = (String) m.remove(SHA512); this.signatures = (List) m.remove("sig"); diff --git a/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java index b7c59942452a..8dbbdc16d4ff 100644 --- a/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java +++ b/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java @@ -82,6 +82,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw * * @throws Exception When analysis fails. */ + @SuppressWarnings({"rawtypes"}) protected abstract NamedList doAnalysis(SolrQueryRequest req) throws Exception; /** @@ -99,6 +100,7 @@ protected NamedList analyzeValue(String value, AnalysisContext if (!TokenizerChain.class.isInstance(analyzer)) { try (TokenStream tokenStream = analyzer.tokenStream(context.getFieldName(), value)) { + @SuppressWarnings({"rawtypes"}) NamedList> namedList = new NamedList<>(); namedList.add(tokenStream.getClass().getName(), convertTokensToNamedLists(analyzeTokenStream(tokenStream), context)); return namedList; @@ -231,6 +233,7 @@ private List analyzeTokenStream(TokenStream tokenStream) { * * @return List of NamedLists containing the relevant information taken from the tokens */ + @SuppressWarnings({"rawtypes"}) private List convertTokensToNamedLists(final List tokenList, AnalysisContext context) { final List tokensNamedLists = new ArrayList<>(); final FieldType fieldType = context.getFieldType(); diff --git a/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java b/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java index 4112e04d21b4..a92e3eb7d20d 100644 --- a/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java +++ b/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java @@ -82,7 +82,7 @@ public Object evaluate(Tuple tuple) throws IOException { value = obj.toString(); } - List tokens = new ArrayList(); + List tokens = new ArrayList<>(); try(TokenStream tokenStream = analyzer.tokenStream(analyzerField, value)) { CharTermAttribute termAtt = tokenStream.getAttribute(CharTermAttribute.class); diff --git a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java index 11495b14aac7..829aa2815a01 100644 --- a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java @@ -80,6 +80,7 @@ public class BlobHandler extends RequestHandlerBase implements PluginInfoInitial private long maxSize = DEFAULT_MAX_SIZE; @Override + @SuppressWarnings({"unchecked"}) public void handleRequestBody(final SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { String httpMethod = req.getHttpMethod(); String path = (String) req.getContext().get("path"); @@ -275,6 +276,7 @@ public String getDescription() { public void init(PluginInfo info) { super.init(info.initArgs); if (info.initArgs != null) { + @SuppressWarnings({"rawtypes"}) NamedList invariants = (NamedList) info.initArgs.get(PluginInfo.INVARIANTS); if (invariants != null) { Object o = invariants.get("maxSize"); diff --git a/solr/core/src/java/org/apache/solr/handler/CatStream.java b/solr/core/src/java/org/apache/solr/handler/CatStream.java index d1ebf5a8dfa2..806c94a381d7 100644 --- a/solr/core/src/java/org/apache/solr/handler/CatStream.java +++ b/solr/core/src/java/org/apache/solr/handler/CatStream.java @@ -24,7 +24,6 @@ import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -126,14 +125,14 @@ public void close() throws IOException {} public Tuple read() throws IOException { if (maxLines >= 0 && linesReturned >= maxLines) { closeCurrentFileIfSet(); - return createEofTuple(); + return Tuple.EOF(); } else if (currentFileHasMoreLinesToRead()) { return fetchNextLineFromCurrentFile(); } else if (advanceToNextFileWithData()) { return fetchNextLineFromCurrentFile(); } else { // No more data closeCurrentFileIfSet(); - return createEofTuple(); + return Tuple.EOF(); } } @@ -197,19 +196,14 @@ private boolean advanceToNextFileWithData() throws IOException { return false; } + @SuppressWarnings({"unchecked"}) private Tuple fetchNextLineFromCurrentFile() { linesReturned++; - HashMap m = new HashMap(); - m.put("file", currentFilePath.displayPath); - m.put("line", currentFileLines.next()); - return new Tuple(m); - } - - private Tuple createEofTuple() { - HashMap m = new HashMap(); - m.put("EOF", true); - return new Tuple(m); + return new Tuple( + "file", currentFilePath.displayPath, + "line", currentFileLines.next() + ); } private boolean currentFileHasMoreLinesToRead() { diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java b/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java index e63dcb858618..1f41cc3b9e0f 100644 --- a/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java +++ b/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java @@ -150,6 +150,7 @@ private void sendRequest(UpdateRequest req) throws IOException, SolrServerExcept * or received via solr client */ private boolean isTargetCluster(Object o) { + @SuppressWarnings({"rawtypes"}) List entry = (List) o; int operationAndFlags = (Integer) entry.get(0); int oper = operationAndFlags & UpdateLog.OPERATION_MASK; @@ -171,6 +172,7 @@ private boolean isTargetCluster(Object o) { } private boolean isDelete(Object o) { + @SuppressWarnings({"rawtypes"}) List entry = (List) o; int operationAndFlags = (Integer) entry.get(0); int oper = operationAndFlags & UpdateLog.OPERATION_MASK; @@ -195,6 +197,7 @@ private void handleException(Exception e) { private UpdateRequest processUpdate(Object o, UpdateRequest req) { // should currently be a List + @SuppressWarnings({"rawtypes"}) List entry = (List) o; int operationAndFlags = (Integer) entry.get(0); diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java b/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java index d80a09a87992..01a0c4de5c02 100644 --- a/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java +++ b/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java @@ -146,6 +146,7 @@ public synchronized void stateUpdate() { ExecutorUtil.shutdownAndAwaitTermination(bootstrapExecutor); } this.closeLogReaders(); + @SuppressWarnings({"rawtypes"}) Callable callable = core.getSolrCoreState().getCdcrBootstrapCallable(); if (callable != null) { CdcrRequestHandler.BootstrapCallable bootstrapCallable = (CdcrRequestHandler.BootstrapCallable) callable; @@ -201,9 +202,11 @@ private long getCheckpoint(CdcrReplicatorState state) throws IOException, SolrSe ModifiableSolrParams params = new ModifiableSolrParams(); params.set(CommonParams.ACTION, CdcrParams.CdcrAction.COLLECTIONCHECKPOINT.toString()); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath(path); + @SuppressWarnings({"rawtypes"}) NamedList response = state.getClient().request(request); return (Long) response.get(CdcrParams.CHECKPOINT); } @@ -364,6 +367,7 @@ private BootstrapStatus sendBootstrapCommand() throws InterruptedException { try (HttpSolrClient client = new HttpSolrClient.Builder(leaderCoreUrl).withHttpClient(httpClient).build()) { log.info("Attempting to bootstrap target collection: {} shard: {} leader: {}", targetCollection, shard, leaderCoreUrl); try { + @SuppressWarnings({"rawtypes"}) NamedList response = sendCdcrCommand(client, CdcrParams.CdcrAction.BOOTSTRAP, ReplicationHandler.MASTER_URL, myCoreUrl); log.debug("CDCR Bootstrap response: {}", response); String status = response.get(RESPONSE_STATUS).toString(); @@ -384,6 +388,7 @@ private BootstrapStatus getBoostrapStatus() throws InterruptedException { String leaderCoreUrl = leader.getCoreUrl(); HttpClient httpClient = state.getClient().getLbClient().getHttpClient(); try (HttpSolrClient client = new HttpSolrClient.Builder(leaderCoreUrl).withHttpClient(httpClient).build()) { + @SuppressWarnings({"rawtypes"}) NamedList response = sendCdcrCommand(client, CdcrParams.CdcrAction.BOOTSTRAP_STATUS); String status = (String) response.get(RESPONSE_STATUS); BootstrapStatus bootstrapStatus = BootstrapStatus.valueOf(status.toUpperCase(Locale.ROOT)); @@ -410,6 +415,7 @@ private BootstrapStatus getBoostrapStatus() throws InterruptedException { } } + @SuppressWarnings({"rawtypes"}) private NamedList sendCdcrCommand(SolrClient client, CdcrParams.CdcrAction action, String... params) throws SolrServerException, IOException { ModifiableSolrParams solrParams = new ModifiableSolrParams(); solrParams.set(CommonParams.QT, "/cdcr"); diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java index e45ab9f7a53e..e7211f46a8cf 100644 --- a/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java @@ -58,19 +58,18 @@ import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.IOUtils; import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.core.CloseHook; import org.apache.solr.core.PluginBag; import org.apache.solr.core.SolrCore; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestHandler; -import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.update.CdcrUpdateLog; import org.apache.solr.update.SolrCoreState; import org.apache.solr.update.UpdateLog; import org.apache.solr.update.VersionInfo; import org.apache.solr.update.processor.DistributedUpdateProcessor; -import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.util.plugin.SolrCoreAware; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -131,7 +130,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw private CdcrBufferManager bufferManager; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); if (args != null) { @@ -155,6 +154,7 @@ public void init(NamedList args) { // Configuration of the Replicas replicasConfiguration = new HashMap<>(); + @SuppressWarnings({"rawtypes"}) List replicas = args.getAll(CdcrParams.REPLICA_PARAM); for (Object replica : replicas) { if (replica != null && replica instanceof NamedList) { @@ -376,6 +376,7 @@ private void handleStatusAction(SolrQueryRequest req, SolrQueryResponse rsp) { rsp.add(CdcrParams.CdcrAction.STATUS.toLower(), this.getStatus()); } + @SuppressWarnings({"unchecked", "rawtypes"}) private NamedList getStatus() { NamedList status = new NamedList(); status.add(CdcrParams.ProcessState.getParam(), processStateManager.getState().toLower()); @@ -548,6 +549,7 @@ private void handleLastProcessedVersionAction(SolrQueryRequest req, SolrQueryRes rsp.add(CdcrParams.LAST_PROCESSED_VERSION, lastProcessedVersion); } + @SuppressWarnings({"unchecked", "rawtypes"}) private void handleQueuesAction(SolrQueryRequest req, SolrQueryResponse rsp) { NamedList hosts = new NamedList(); @@ -580,6 +582,7 @@ private void handleQueuesAction(SolrQueryRequest req, SolrQueryResponse rsp) { updateLogSynchronizer.isStarted() ? CdcrParams.ProcessState.STARTED.toLower() : CdcrParams.ProcessState.STOPPED.toLower()); } + @SuppressWarnings({"unchecked", "rawtypes"}) private void handleOpsAction(SolrQueryRequest req, SolrQueryResponse rsp) { NamedList hosts = new NamedList(); @@ -598,6 +601,7 @@ private void handleOpsAction(SolrQueryRequest req, SolrQueryResponse rsp) { rsp.add(CdcrParams.OPERATIONS_PER_SECOND, hosts); } + @SuppressWarnings({"unchecked", "rawtypes"}) private void handleErrorsAction(SolrQueryRequest req, SolrQueryResponse rsp) { NamedList hosts = new NamedList(); @@ -771,12 +775,6 @@ public Boolean call() throws Exception { success = replicationHandler.doFetch(solrParams, false).getSuccessful(); - // this is required because this callable can race with HttpSolrCall#destroy - // which clears the request info. - // Applying buffered updates fails without the following line because LogReplayer - // also tries to set request info and fails with AssertionError - SolrRequestInfo.clearRequestInfo(); - Future future = ulog.applyBufferedUpdates(); if (future == null) { // no replay needed @@ -862,9 +860,11 @@ public Long call() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.set(CommonParams.ACTION, CdcrParams.CdcrAction.SHARDCHECKPOINT.toString()); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath(cdcrPath); + @SuppressWarnings({"rawtypes"}) NamedList response = server.request(request); return (Long) response.get(CdcrParams.CHECKPOINT); } diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java b/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java index 5931496195b7..31f779dcfb9c 100644 --- a/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java +++ b/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java @@ -139,11 +139,13 @@ public void run() { ModifiableSolrParams params = new ModifiableSolrParams(); params.set(CommonParams.ACTION, CdcrParams.CdcrAction.LASTPROCESSEDVERSION.toString()); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath(path); long lastVersion; try { + @SuppressWarnings({"rawtypes"}) NamedList response = server.request(request); lastVersion = (Long) response.get(CdcrParams.LAST_PROCESSED_VERSION); if (log.isDebugEnabled()) { diff --git a/solr/core/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java index 1859f042fc55..7f4feac3bc20 100644 --- a/solr/core/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java +++ b/solr/core/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java @@ -32,7 +32,7 @@ public abstract class ContentStreamHandlerBase extends RequestHandlerBase { @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); // Caching off by default diff --git a/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java index 205c14e9e0b5..f40a03a3514c 100644 --- a/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java @@ -87,7 +87,7 @@ public class DocumentAnalysisRequestHandler extends AnalysisRequestHandlerBase { private XMLInputFactory inputFactory; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); inputFactory = XMLInputFactory.newInstance(); @@ -109,6 +109,7 @@ public void init(NamedList args) { } @Override + @SuppressWarnings({"rawtypes"}) protected NamedList doAnalysis(SolrQueryRequest req) throws Exception { DocumentAnalysisRequest analysisRequest = resolveAnalysisRequest(req); return handleAnalysisRequest(analysisRequest, req.getSchema()); @@ -194,6 +195,7 @@ NamedList handleAnalysisRequest(DocumentAnalysisRequest request, IndexSc for (SolrInputDocument document : request.getDocuments()) { + @SuppressWarnings({"rawtypes"}) NamedList theTokens = new SimpleOrderedMap<>(); result.add(document.getFieldValue(uniqueKeyField.getName()).toString(), theTokens); for (String name : document.getFieldNames()) { diff --git a/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java index d7d5b71efa7b..4ce5fa57dab7 100644 --- a/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java @@ -37,12 +37,14 @@ public class DumpRequestHandler extends RequestHandlerBase { @Override - public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException + @SuppressWarnings({"unchecked"}) + public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException { // Show params rsp.add( "params", req.getParams().toNamedList() ); String[] parts = req.getParams().getParams("urlTemplateValues"); if (parts != null && parts.length > 0) { + @SuppressWarnings({"rawtypes"}) Map map = new LinkedHashMap<>(); rsp.getValues().add("urlTemplateValues", map); for (String part : parts) { @@ -52,6 +54,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw String[] returnParams = req.getParams().getParams("param"); if(returnParams !=null) { + @SuppressWarnings({"rawtypes"}) NamedList params = (NamedList) rsp.getValues().get("params"); for (String returnParam : returnParams) { String[] vals = req.getParams().getParams(returnParam); @@ -68,6 +71,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw } if(req.getParams().getBool("getdefaults", false)){ + @SuppressWarnings({"rawtypes"}) NamedList def = (NamedList) initArgs.get(PluginInfo.DEFAULTS); rsp.add("getdefaults", def); } @@ -116,9 +120,11 @@ public SolrRequestHandler getSubHandler(String subPath) { private List subpaths; @Override - public void init(NamedList args) { + @SuppressWarnings({"unchecked"}) + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); if(args !=null) { + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) args.get(PluginInfo.DEFAULTS); if(nl!=null) subpaths = nl.getAll("subpath"); } diff --git a/solr/core/src/java/org/apache/solr/handler/ExportHandler.java b/solr/core/src/java/org/apache/solr/handler/ExportHandler.java index ea9239d02655..64999dc3f662 100644 --- a/solr/core/src/java/org/apache/solr/handler/ExportHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/ExportHandler.java @@ -18,20 +18,88 @@ package org.apache.solr.handler; +import java.lang.invoke.MethodHandles; import java.util.HashMap; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import org.apache.solr.client.solrj.io.ModelCache; +import org.apache.solr.client.solrj.io.SolrClientCache; +import org.apache.solr.client.solrj.io.stream.StreamContext; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.SolrParams; +import org.apache.solr.core.CoreContainer; +import org.apache.solr.core.SolrCore; import org.apache.solr.handler.component.SearchHandler; import org.apache.solr.handler.export.ExportWriter; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.solr.common.params.CommonParams.JSON; public class ExportHandler extends SearchHandler { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private ModelCache modelCache = null; + @SuppressWarnings({"rawtypes"}) + private ConcurrentMap objectCache = new ConcurrentHashMap(); + private SolrDefaultStreamFactory streamFactory = new ExportHandlerStreamFactory(); + private String coreName; + private SolrClientCache solrClientCache; + private StreamContext initialStreamContext; + + public static class ExportHandlerStreamFactory extends SolrDefaultStreamFactory { + static final String[] forbiddenStreams = new String[] { + // source streams + "search", "facet", "facet2D", "update", "delete", "jdbc", "topic", + "commit", "random", "knnSearch", + // execution streams + "parallel", "executor", "daemon" + // other streams? + }; + + public ExportHandlerStreamFactory() { + super(); + for (String function : forbiddenStreams) { + this.withoutFunctionName(function); + } + this.withFunctionName("input", ExportWriter.ExportWriterStream.class); + } + } + + @Override + public void inform(SolrCore core) { + super.inform(core); + String defaultCollection; + String defaultZkhost; + CoreContainer coreContainer = core.getCoreContainer(); + this.solrClientCache = coreContainer.getSolrClientCache(); + this.coreName = core.getName(); + + if (coreContainer.isZooKeeperAware()) { + defaultCollection = core.getCoreDescriptor().getCollectionName(); + defaultZkhost = core.getCoreContainer().getZkController().getZkServerAddress(); + streamFactory.withCollectionZkHost(defaultCollection, defaultZkhost); + streamFactory.withDefaultZkHost(defaultZkhost); + modelCache = new ModelCache(250, + defaultZkhost, + solrClientCache); + } + streamFactory.withSolrResourceLoader(core.getResourceLoader()); + StreamHandler.addExpressiblePlugins(streamFactory, core); + initialStreamContext = new StreamContext(); + initialStreamContext.setStreamFactory(streamFactory); + initialStreamContext.setSolrClientCache(solrClientCache); + initialStreamContext.setModelCache(modelCache); + initialStreamContext.setObjectCache(objectCache); + initialStreamContext.put("core", this.coreName); + initialStreamContext.put("solr-core", core); + } + @Override public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { try { @@ -44,6 +112,6 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw Map map = new HashMap<>(1); map.put(CommonParams.WT, ReplicationHandler.FILE_STREAM); req.setParams(SolrParams.wrapDefaults(new MapSolrParams(map),req.getParams())); - rsp.add(ReplicationHandler.FILE_STREAM, new ExportWriter(req, rsp, wt)); + rsp.add(ReplicationHandler.FILE_STREAM, new ExportWriter(req, rsp, wt, initialStreamContext)); } } diff --git a/solr/core/src/java/org/apache/solr/handler/FieldAnalysisRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/FieldAnalysisRequestHandler.java index 5038b6a779b7..6917bbbebc18 100644 --- a/solr/core/src/java/org/apache/solr/handler/FieldAnalysisRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/FieldAnalysisRequestHandler.java @@ -93,6 +93,7 @@ public class FieldAnalysisRequestHandler extends AnalysisRequestHandlerBase { @Override + @SuppressWarnings({"rawtypes"}) protected NamedList doAnalysis(SolrQueryRequest req) throws Exception { FieldAnalysisRequest analysisRequest = resolveAnalysisRequest(req); IndexSchema indexSchema = req.getSchema(); @@ -174,6 +175,7 @@ FieldAnalysisRequest resolveAnalysisRequest(SolrQueryRequest req) throws SolrExc * * @return The analysis breakdown as a named list. */ + @SuppressWarnings({"rawtypes"}) protected NamedList handleAnalysisRequest(FieldAnalysisRequest request, IndexSchema schema) { NamedList analysisResults = new SimpleOrderedMap<>(); @@ -210,6 +212,7 @@ protected NamedList handleAnalysisRequest(FieldAnalysisRequest reques * @return NamedList containing the tokens produced by the analyzers of the given field, separated into an index and * a query group */ // package access for testing + @SuppressWarnings({"rawtypes"}) NamedList analyzeValues(FieldAnalysisRequest analysisRequest, FieldType fieldType, String fieldName) { final String queryValue = analysisRequest.getQuery(); diff --git a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java index bed4086f1163..5c159e70d978 100644 --- a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java @@ -20,11 +20,11 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import org.apache.solr.client.solrj.io.SolrClientCache; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.StreamComparator; import org.apache.solr.client.solrj.io.graph.Traversal; @@ -39,6 +39,7 @@ import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.params.StreamParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.PluginInfo; @@ -83,17 +84,20 @@ public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, P private StreamFactory streamFactory = new DefaultStreamFactory(); private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private String coreName; + private SolrClientCache solrClientCache; @Override public PermissionNameProvider.Name getPermissionName(AuthorizationContext request) { return PermissionNameProvider.Name.READ_PERM; } + @SuppressWarnings({"unchecked"}) public void inform(SolrCore core) { String defaultCollection; String defaultZkhost; CoreContainer coreContainer = core.getCoreContainer(); this.coreName = core.getName(); + this.solrClientCache = coreContainer.getSolrClientCache(); if(coreContainer.isZooKeeperAware()) { defaultCollection = core.getCoreDescriptor().getCollectionName(); @@ -119,7 +123,8 @@ public void inform(SolrCore core) { Expressible.class); streamFactory.withFunctionName(key, clazz); } else { - StreamHandler.ExpressibleHolder holder = new StreamHandler.ExpressibleHolder(pluginInfo, core, SolrConfig.classVsSolrPluginInfo.get(Expressible.class)); + @SuppressWarnings("resource") + StreamHandler.ExpressibleHolder holder = new StreamHandler.ExpressibleHolder(pluginInfo, core, SolrConfig.classVsSolrPluginInfo.get(Expressible.class.getName())); streamFactory.withFunctionName(key, () -> holder.getClazz()); } @@ -128,6 +133,7 @@ public void inform(SolrCore core) { } } + @SuppressWarnings({"unchecked"}) public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { SolrParams params = req.getParams(); params = adjustParams(params); @@ -141,17 +147,19 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw } catch (Exception e) { //Catch exceptions that occur while the stream is being created. This will include streaming expression parse rules. SolrException.log(log, e); + @SuppressWarnings({"rawtypes"}) Map requestContext = req.getContext(); requestContext.put("stream", new DummyErrorStream(e)); return; } StreamContext context = new StreamContext(); - context.setSolrClientCache(StreamHandler.clientCache); + context.setSolrClientCache(solrClientCache); context.put("core", this.coreName); Traversal traversal = new Traversal(); context.put("traversal", traversal); tupleStream.setStreamContext(context); + @SuppressWarnings({"rawtypes"}) Map requestContext = req.getContext(); requestContext.put("stream", new TimerStream(new ExceptionStream(tupleStream))); requestContext.put("traversal", traversal); @@ -199,11 +207,7 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { } public Tuple read() { - String msg = e.getMessage(); - Map m = new HashMap(); - m.put("EOF", true); - m.put("EXCEPTION", msg); - return new Tuple(m); + return Tuple.EXCEPTION(e.getMessage(), true); } } @@ -250,11 +254,12 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { return null; } + @SuppressWarnings({"unchecked"}) public Tuple read() throws IOException { Tuple tuple = this.tupleStream.read(); if(tuple.EOF) { long totalTime = (System.nanoTime() - begin) / 1000000; - tuple.fields.put("RESPONSE_TIME", totalTime); + tuple.put(StreamParams.RESPONSE_TIME, totalTime); } return tuple; } diff --git a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java index 146b0e9c2caa..46c009cfb780 100644 --- a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java +++ b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java @@ -230,7 +230,7 @@ private static HttpClient createHttpClient(SolrCore core, String httpBasicAuthUs return HttpClientUtil.createClient(httpClientParams, core.getCoreContainer().getUpdateShardHandler().getRecoveryOnlyConnectionManager(), true); } - public IndexFetcher(final NamedList initArgs, final ReplicationHandler handler, final SolrCore sc) { + public IndexFetcher(@SuppressWarnings({"rawtypes"})final NamedList initArgs, final ReplicationHandler handler, final SolrCore sc) { solrCore = sc; Object fetchFromLeader = initArgs.get(FETCH_FROM_LEADER); if (fetchFromLeader != null && fetchFromLeader instanceof Boolean) { @@ -272,7 +272,8 @@ public IndexFetcher(final NamedList initArgs, final ReplicationHandler handler, myHttpClient = createHttpClient(solrCore, httpBasicAuthUser, httpBasicAuthPassword, useExternalCompression); } - protected T getParameter(NamedList initArgs, String configKey, T defaultValue, StringBuilder sb) { + @SuppressWarnings({"unchecked"}) + protected T getParameter(@SuppressWarnings({"rawtypes"})NamedList initArgs, String configKey, T defaultValue, StringBuilder sb) { T toReturn = defaultValue; if (initArgs != null) { T temp = (T) initArgs.get(configKey); @@ -285,7 +286,7 @@ protected T getParameter(NamedList initArgs, String configKey, T defaultValu /** * Gets the latest commit version and generation from the master */ - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList getLatestVersion() throws IOException { ModifiableSolrParams params = new ModifiableSolrParams(); params.set(COMMAND, CMD_INDEX_VERSION); @@ -309,6 +310,7 @@ NamedList getLatestVersion() throws IOException { /** * Fetches the list of files in a given index commit point and updates internal list of files to download. */ + @SuppressWarnings({"unchecked"}) private void fetchFileList(long gen) throws IOException { ModifiableSolrParams params = new ModifiableSolrParams(); params.set(COMMAND, CMD_GET_FILE_LIST); @@ -324,6 +326,7 @@ private void fetchFileList(long gen) throws IOException { .withConnectionTimeout(connTimeout) .withSocketTimeout(soTimeout) .build()) { + @SuppressWarnings({"rawtypes"}) NamedList response = client.request(req); List> files = (List>) response.get(CMD_GET_FILE_LIST); @@ -410,6 +413,7 @@ IndexFetchResult fetchLatestIndex(boolean forceReplication, boolean forceCoreRel } } //get the current 'replicateable' index version in the master + @SuppressWarnings({"rawtypes"}) NamedList response; try { response = getLatestVersion(); @@ -917,6 +921,7 @@ private void openNewSearcherAndUpdateCommitPoint() throws IOException { // todo stop keeping solrCore around SolrCore core = solrCore.getCoreContainer().getCore(solrCore.getName()); try { + @SuppressWarnings({"rawtypes"}) Future[] waitSearcher = new Future[1]; searcher = core.getSearcher(true, true, waitSearcher, true); if (waitSearcher[0] != null) { @@ -1481,11 +1486,14 @@ private String getDateAsStr(Date d) { * * @return a list of configuration files which have changed on the master and need to be downloaded. */ + @SuppressWarnings({"unchecked"}) private Collection> getModifiedConfFiles(List> confFilesToDownload) { if (confFilesToDownload == null || confFilesToDownload.isEmpty()) return Collections.EMPTY_LIST; //build a map with alias/name as the key + @SuppressWarnings({"rawtypes"}) Map> nameVsFile = new HashMap<>(); + @SuppressWarnings({"rawtypes"}) NamedList names = new NamedList(); for (Map map : confFilesToDownload) { //if alias is present that is the name the file may have in the slave @@ -1562,6 +1570,7 @@ long getReplicationTimeElapsed() { return timeElapsed; } + @SuppressWarnings({"unchecked"}) List> getTlogFilesToDownload() { //make a copy first because it can be null later List> tmp = tlogFilesToDownload; @@ -1569,6 +1578,7 @@ List> getTlogFilesToDownload() { return tmp == null ? Collections.EMPTY_LIST : new ArrayList<>(tmp); } + @SuppressWarnings({"unchecked"}) List> getTlogFilesDownloaded() { //make a copy first because it can be null later List> tmp = tlogFilesDownloaded; @@ -1576,6 +1586,7 @@ List> getTlogFilesDownloaded() { return tmp == null ? Collections.EMPTY_LIST : new ArrayList<>(tmp); } + @SuppressWarnings({"unchecked"}) List> getConfFilesToDownload() { //make a copy first because it can be null later List> tmp = confFilesToDownload; @@ -1583,6 +1594,7 @@ List> getConfFilesToDownload() { return tmp == null ? Collections.EMPTY_LIST : new ArrayList<>(tmp); } + @SuppressWarnings({"unchecked"}) List> getConfFilesDownloaded() { //make a copy first because it can be null later List> tmp = confFilesDownloaded; @@ -1590,12 +1602,14 @@ List> getConfFilesDownloaded() { return tmp == null ? Collections.EMPTY_LIST : new ArrayList<>(tmp); } + @SuppressWarnings({"unchecked"}) List> getFilesToDownload() { //make a copy first because it can be null later List> tmp = filesToDownload; return tmp == null ? Collections.EMPTY_LIST : new ArrayList<>(tmp); } + @SuppressWarnings({"unchecked"}) List> getFilesDownloaded() { List> tmp = filesDownloaded; return tmp == null ? Collections.EMPTY_LIST : new ArrayList<>(tmp); @@ -1857,6 +1871,7 @@ private FastInputStream getStream() throws IOException { } + @SuppressWarnings({"rawtypes"}) NamedList response; InputStream is = null; @@ -1966,6 +1981,7 @@ private class LocalFsFileFetcher extends FileFetcher { } } + @SuppressWarnings({"rawtypes"}) NamedList getDetails() throws IOException, SolrServerException { ModifiableSolrParams params = new ModifiableSolrParams(); params.set(COMMAND, CMD_DETAILS); diff --git a/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java b/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java index d13fa7abd6ab..652024cf07cd 100644 --- a/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java @@ -91,7 +91,7 @@ public class MoreLikeThisHandler extends RequestHandlerBase "MoreLikeThis does not support multiple ContentStreams"; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); } @@ -261,6 +261,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw // TODO resolve duplicated code with DebugComponent. Perhaps it should be added to doStandardDebug? if (dbg == true) { try { + @SuppressWarnings({"unchecked"}) NamedList dbgInfo = SolrPluginUtils.doStandardDebug(req, q, mlt.getRawMLTQuery(), mltDocs.docList, dbgQuery, dbgResults); if (null != dbgInfo) { if (null != filters) { diff --git a/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java index 0cd9e1d57efb..c8cbc09085a8 100644 --- a/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java @@ -139,7 +139,7 @@ protected enum ACTIONS {STATUS, ENABLE, DISABLE, PING}; private File healthcheck = null; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); Object tmp = args.get(HEALTHCHECK_FILE_PARAM); healthFileName = (null == tmp ? null : tmp.toString()); diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java index 94ff9e187d05..b68598c57854 100644 --- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java @@ -969,6 +969,7 @@ private NamedList getReplicationDetails(SolrQueryResponse rsp, boolean s Properties props = loadReplicationProperties(); if (showSlaveDetails) { try { + @SuppressWarnings({"rawtypes"}) NamedList nl = fetcher.getDetails(); slave.add("masterDetails", nl.get(CMD_DETAILS)); } catch (Exception e) { @@ -1091,6 +1092,7 @@ private NamedList getReplicationDetails(SolrQueryResponse rsp, boolean s if (slave.size() > 0) details.add("slave", slave); + @SuppressWarnings({"rawtypes"}) NamedList snapshotStats = snapShootDetails; if (snapshotStats != null) details.add(CMD_BACKUP, snapshotStats); @@ -1102,21 +1104,21 @@ private NamedList getReplicationDetails(SolrQueryResponse rsp, boolean s return details; } - private void addVal(NamedList nl, String key, Properties props, Class clzz) { + private void addVal(NamedList nl, String key, Properties props, @SuppressWarnings({"rawtypes"})Class clzz) { Object val = formatVal(key, props, clzz); if (val != null) { nl.add(key, val); } } - private void addVal(Map map, String key, Properties props, Class clzz) { + private void addVal(Map map, String key, Properties props, @SuppressWarnings({"rawtypes"})Class clzz) { Object val = formatVal(key, props, clzz); if (val != null) { map.put(key, val); } } - private Object formatVal(String key, Properties props, Class clzz) { + private Object formatVal(String key, Properties props, @SuppressWarnings({"rawtypes"})Class clzz) { String s = props.getProperty(key); if (s == null || s.trim().length() == 0) return null; if (clzz == Date.class) { @@ -1233,6 +1235,7 @@ public void inform(SolrCore core) { } else { numberBackupsToKeep = 0; } + @SuppressWarnings({"rawtypes"}) NamedList slave = (NamedList) initArgs.get("slave"); boolean enableSlave = isEnabled( slave ); if (enableSlave) { @@ -1240,6 +1243,7 @@ public void inform(SolrCore core) { setupPolling((String) slave.get(POLL_INTERVAL)); isSlave = true; } + @SuppressWarnings({"rawtypes"}) NamedList master = (NamedList) initArgs.get("master"); boolean enableMaster = isEnabled( master ); @@ -1269,9 +1273,11 @@ public void inform(SolrCore core) { } log.info("Replication enabled for following config files: {}", includeConfFiles); } + @SuppressWarnings({"rawtypes"}) List backup = master.getAll("backupAfter"); boolean backupOnCommit = backup.contains("commit"); boolean backupOnOptimize = !backupOnCommit && backup.contains("optimize"); + @SuppressWarnings({"rawtypes"}) List replicateAfter = master.getAll(REPLICATE_AFTER); replicateOnCommit = replicateAfter.contains("commit"); replicateOnOptimize = !replicateOnCommit && replicateAfter.contains("optimize"); @@ -1361,7 +1367,7 @@ public void inform(SolrCore core) { } // check master or slave is enabled - private boolean isEnabled( NamedList params ){ + private boolean isEnabled( @SuppressWarnings({"rawtypes"})NamedList params ){ if( params == null ) return false; Object enable = params.get( "enable" ); if( enable == null ) return true; @@ -1432,7 +1438,7 @@ public void shutdown() { private SolrEventListener getEventListener(final boolean snapshoot, final boolean getCommit) { return new SolrEventListener() { @Override - public void init(NamedList args) {/*no op*/ } + public void init(@SuppressWarnings({"rawtypes"})NamedList args) {/*no op*/ } /** * This refreshes the latest replicateable index commit and optionally can create Snapshots as well diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java index 01ea3005a042..1fcc183b6ce7 100644 --- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java +++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java @@ -54,6 +54,7 @@ */ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfoBean, NestedRequestHandler, ApiSupport { + @SuppressWarnings({"rawtypes"}) protected NamedList initArgs = null; protected SolrParams defaults; protected SolrParams appends; @@ -127,7 +128,7 @@ public RequestHandlerBase() { * See also the example solrconfig.xml located in the Solr codebase (example/solr/conf). */ @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { initArgs = args; if (args != null) { @@ -168,7 +169,7 @@ public void initializeMetrics(SolrMetricsContext parentContext, String scope) { solrMetricsContext.gauge(() -> handlerStart, true, "handlerStart", getCategory().toString(), scope); } - public static SolrParams getSolrParamsFromNamedList(NamedList args, String key) { + public static SolrParams getSolrParamsFromNamedList(@SuppressWarnings({"rawtypes"})NamedList args, String key) { Object o = args.get(key); if (o != null && o instanceof NamedList) { return ((NamedList) o).toSolrParams(); @@ -176,6 +177,7 @@ public static SolrParams getSolrParamsFromNamedList(NamedList args, String key) return null; } + @SuppressWarnings({"rawtypes"}) public NamedList getInitArgs() { return initArgs; } @@ -199,6 +201,7 @@ public void handleRequest(SolrQueryRequest req, SolrQueryResponse rsp) { } } Timer.Context timer = requestTimes.time(); + @SuppressWarnings("resource") Timer.Context dTimer = distrib ? distribRequestTimes.time() : localRequestTimes.time(); try { if (pluginInfo != null && pluginInfo.attributes.containsKey(USEPARAM)) @@ -208,6 +211,7 @@ public void handleRequest(SolrQueryRequest req, SolrQueryResponse rsp) { rsp.setHttpCaching(httpCaching); handleRequestBody(req, rsp); // count timeouts + @SuppressWarnings({"rawtypes"}) NamedList header = rsp.getResponseHeader(); if (header != null) { if (Boolean.TRUE.equals(header.getBooleanArg( diff --git a/solr/core/src/java/org/apache/solr/handler/RestoreCore.java b/solr/core/src/java/org/apache/solr/handler/RestoreCore.java index fb75d4a6bd8f..3e12d4b101c0 100644 --- a/solr/core/src/java/org/apache/solr/handler/RestoreCore.java +++ b/solr/core/src/java/org/apache/solr/handler/RestoreCore.java @@ -153,6 +153,7 @@ private void checkInterrupted() throws InterruptedException { } private void openNewSearcher() throws Exception { + @SuppressWarnings({"rawtypes"}) Future[] waitSearcher = new Future[1]; core.getSearcher(true, false, waitSearcher, true); if (waitSearcher[0] != null) { diff --git a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java index 6b0330add830..8bc1491f78c5 100644 --- a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java @@ -159,7 +159,7 @@ public Tuple read() throws IOException { // Return a metadata tuple as the first tuple and then pass through to the JDBCStream. if(firstTuple) { try { - Map fields = new HashMap<>(); + Tuple tuple = new Tuple(); firstTuple = false; @@ -173,10 +173,10 @@ public Tuple read() throws IOException { } if(includeMetadata) { - fields.put("isMetadata", true); - fields.put("fields", metadataFields); - fields.put("aliases", metadataAliases); - return new Tuple(fields); + tuple.put("isMetadata", true); + tuple.put("fields", metadataFields); + tuple.put("aliases", metadataAliases); + return tuple; } } catch (SQLException e) { throw new IOException(e); diff --git a/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java b/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java index 58a6bd09df60..fb24b0d64344 100644 --- a/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java @@ -56,6 +56,7 @@ import static org.apache.solr.schema.IndexSchema.SchemaProps.Handler.FIELDS; import static org.apache.solr.schema.IndexSchema.SchemaProps.Handler.FIELD_TYPES; +@SuppressWarnings({"unchecked"}) public class SchemaHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private boolean isImmutableConfigSet = false; @@ -63,6 +64,7 @@ public class SchemaHandler extends RequestHandlerBase implements SolrCoreAware, private static final Map level2; static { + @SuppressWarnings({"rawtypes"}) Map s = Utils.makeMap( FIELD_TYPES.nameLower, null, FIELDS.nameLower, "fl", @@ -87,6 +89,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw } try { + @SuppressWarnings({"rawtypes"}) List errs = new SchemaManager(req).performOperations(); if (!errs.isEmpty()) throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing commands", errs); @@ -136,11 +139,7 @@ private void handleGET(SolrQueryRequest req, SolrQueryResponse rsp) { break; } case "/schema/zkversion": { - int refreshIfBelowVersion = -1; - Object refreshParam = req.getParams().get("refreshIfBelowVersion"); - if (refreshParam != null) - refreshIfBelowVersion = (refreshParam instanceof Number) ? ((Number) refreshParam).intValue() - : Integer.parseInt(refreshParam.toString()); + int refreshIfBelowVersion = req.getParams().getInt("refreshIfBelowVersion"); int zkVersion = -1; IndexSchema schema = req.getSchema(); if (schema instanceof ManagedIndexSchema) { @@ -168,14 +167,17 @@ private void handleGET(SolrQueryRequest req, SolrQueryResponse rsp) { if (parts.size() > 2) { req.setParams(SolrParams.wrapDefaults(new MapSolrParams(singletonMap(pathParam, parts.get(2))), req.getParams())); } + @SuppressWarnings({"rawtypes"}) Map propertyValues = req.getSchema().getNamedPropertyValues(realName, req.getParams()); Object o = propertyValues.get(fieldName); if(parts.size()> 2) { String name = parts.get(2); if (o instanceof List) { + @SuppressWarnings({"rawtypes"}) List list = (List) o; for (Object obj : list) { if (obj instanceof SimpleOrderedMap) { + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap simpleOrderedMap = (SimpleOrderedMap) obj; if(name.equals(simpleOrderedMap.get("name"))) { rsp.add(fieldName.substring(0, realName.length() - 1), simpleOrderedMap); diff --git a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java index 07a4eb987772..c238d55fb279 100644 --- a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java +++ b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java @@ -148,6 +148,7 @@ public void validateCreateSnapshot() throws IOException { } } + @SuppressWarnings({"rawtypes"}) public NamedList createSnapshot() throws Exception { final IndexCommit indexCommit = getAndSaveIndexCommit(); try { @@ -208,6 +209,7 @@ private IndexCommit getAndSaveIndexCommit() throws IOException { return commit; } + @SuppressWarnings({"unchecked", "rawtypes"}) public void createSnapAsync(final int numberToKeep, Consumer result) throws IOException { //TODO should use Solr's ExecutorUtil new Thread(() -> { @@ -242,6 +244,7 @@ public void createSnapAsync(final int numberToKeep, Consumer result) * @see IndexDeletionPolicyWrapper#saveCommitPoint * @see IndexDeletionPolicyWrapper#releaseCommitPoint */ + @SuppressWarnings({"rawtypes"}) protected NamedList createSnapshot(final IndexCommit indexCommit) throws Exception { assert indexCommit != null; if (log.isInfoEnabled()) { diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java index 3b2042005474..2b7101895c46 100644 --- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java @@ -151,6 +151,7 @@ public void inform(SolrCore core) { } public static boolean getImmutable(SolrCore core) { + @SuppressWarnings({"rawtypes"}) NamedList configSetProperties = core.getConfigSetProperties(); if (configSetProperties == null) return false; Object immutable = configSetProperties.get(IMMUTABLE_CONFIGSET_ARG); @@ -178,6 +179,7 @@ private String getDefaultPath() { return "/config"; } + @SuppressWarnings({"unchecked"}) private void handleGET() { if (parts.size() == 1) { //this is the whole config. sent out the whole payload @@ -189,6 +191,7 @@ private void handleGET() { if (parts.size() == 3) { RequestParams params = req.getCore().getSolrConfig().getRequestParams(); RequestParams.ParamSet p = params.getParams(parts.get(2)); + @SuppressWarnings({"rawtypes"}) Map m = new LinkedHashMap<>(); m.put(ZNODEVER, params.getZnodeVersion()); if (p != null) { @@ -247,6 +250,7 @@ private void handleGET() { Map val = makeMap(parts.get(1), m.get(parts.get(1))); String componentName = req.getParams().get("componentName"); if (componentName != null) { + @SuppressWarnings({"rawtypes"}) Map map = (Map) val.get(parts.get(1)); if (map != null) { Object o = map.get(componentName); @@ -261,6 +265,7 @@ private void handleGET() { if(info == null) continue; if (info.type.equals(parts.get(1)) && info.name.equals(componentName)) { if (o instanceof Map) { + @SuppressWarnings({"rawtypes"}) Map m1 = (Map) o; m1.put("_packageinfo_", listener.getPackageVersion()); } @@ -275,11 +280,13 @@ private void handleGET() { } } + @SuppressWarnings({"unchecked"}) private Map getConfigDetails(String componentType, SolrQueryRequest req) { String componentName = componentType == null ? null : req.getParams().get("componentName"); boolean showParams = req.getParams().getBool("expandParams", false); Map map = this.req.getCore().getSolrConfig().toMap(new LinkedHashMap<>()); if (componentType != null && !SolrRequestHandler.TYPE.equals(componentType)) return map; + @SuppressWarnings({"rawtypes"}) Map reqHandlers = (Map) map.get(SolrRequestHandler.TYPE); if (reqHandlers == null) map.put(SolrRequestHandler.TYPE, reqHandlers = new LinkedHashMap<>()); List plugins = this.req.getCore().getImplicitHandlers(); @@ -292,6 +299,7 @@ private Map getConfigDetails(String componentType, SolrQueryRequ } if (!showParams) return map; for (Object o : reqHandlers.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry e = (Map.Entry) o; if (componentName == null || e.getKey().equals(componentName)) { Map m = expandUseParams(req, e.getValue()); @@ -302,6 +310,7 @@ private Map getConfigDetails(String componentType, SolrQueryRequ return map; } + @SuppressWarnings({"unchecked"}) private Map expandUseParams(SolrQueryRequest req, Object plugin) { @@ -311,9 +320,11 @@ private Map expandUseParams(SolrQueryRequest req, } else if (plugin instanceof PluginInfo) { pluginInfo = ((PluginInfo) plugin).toMap(new LinkedHashMap<>()); } + @SuppressWarnings({"rawtypes"}) String useParams = (String) pluginInfo.get(USEPARAM); String useParamsInReq = req.getOriginalParams().get(USEPARAM); if (useParams != null || useParamsInReq != null) { + @SuppressWarnings({"rawtypes"}) Map m = new LinkedHashMap<>(); pluginInfo.put("_useParamsExpanded_", m); List params = new ArrayList<>(); @@ -331,6 +342,7 @@ private Map expandUseParams(SolrQueryRequest req, LocalSolrQueryRequest r = new LocalSolrQueryRequest(req.getCore(), req.getOriginalParams()); r.getContext().put(USEPARAM, useParams); + @SuppressWarnings({"rawtypes"}) NamedList nl = new PluginInfo(SolrRequestHandler.TYPE, pluginInfo).initArgs; SolrPluginUtils.setDefaults(r, getSolrParamsFromNamedList(nl, DEFAULTS), @@ -380,6 +392,7 @@ private void handlePOST() throws IOException { } + @SuppressWarnings({"unchecked"}) private void handleParams(ArrayList ops, RequestParams params) { for (CommandOperation op : ops) { switch (op.name) { @@ -390,6 +403,7 @@ private void handleParams(ArrayList ops, RequestParams params) for (Map.Entry entry : map.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map val; String key = entry.getKey(); if (isNullOrEmpty(key)) { @@ -449,6 +463,7 @@ private void handleParams(ArrayList ops, RequestParams params) } + @SuppressWarnings({"rawtypes"}) List errs = CommandOperation.captureErrors(ops); if (!errs.isEmpty()) { throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing params", errs); @@ -478,6 +493,7 @@ private void handleParams(ArrayList ops, RequestParams params) } + @SuppressWarnings({"unchecked"}) private void handleCommands(List ops, ConfigOverlay overlay) throws IOException { for (CommandOperation op : ops) { switch (op.name) { @@ -514,6 +530,7 @@ private void handleCommands(List ops, ConfigOverlay overlay) t } } } + @SuppressWarnings({"rawtypes"}) List errs = CommandOperation.captureErrors(ops); if (!errs.isEmpty()) { log.error("ERROR:{}", Utils.toJSONString(errs)); @@ -561,7 +578,9 @@ private ConfigOverlay updateNamedPlugin(SolrConfig.SolrPluginInfo info, CommandO return overlay; } try { - new PluginBag.RuntimeLib(req.getCore()).init(new PluginInfo(info.tag, op.getDataMap())); + try (PluginBag.RuntimeLib rtl = new PluginBag.RuntimeLib(req.getCore())) { + rtl.init(new PluginInfo(info.tag, op.getDataMap())); + } } catch (Exception e) { op.addError(e.getMessage()); log.error("can't load this plugin ", e); @@ -592,13 +611,15 @@ private boolean pluginExists(SolrConfig.SolrPluginInfo info, ConfigOverlay overl return overlay.getNamedPlugins(info.getCleanTag()).containsKey(name); } - private boolean verifyClass(CommandOperation op, String clz, Class expected) { + @SuppressWarnings({"unchecked"}) + private boolean verifyClass(CommandOperation op, String clz, @SuppressWarnings({"rawtypes"})Class expected) { if (clz == null) return true; if (!"true".equals(String.valueOf(op.getStr("runtimeLib", null)))) { PluginInfo info = new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()); //this is not dynamically loaded so we can verify the class right away try { if(expected == Expressible.class) { + @SuppressWarnings("resource") SolrResourceLoader resourceLoader = info.pkgName == null ? req.getCore().getResourceLoader() : req.getCore().getResourceLoader(info.pkgName); @@ -661,6 +682,7 @@ private ConfigOverlay applySetProp(CommandOperation op, ConfigOverlay overlay) { for (Map.Entry e : m.entrySet()) { String name = e.getKey(); Object val = e.getValue(); + @SuppressWarnings({"rawtypes"}) Class typ = ConfigOverlay.checkEditable(name, false, null); if (typ == null) { op.addError(formatString(NOT_EDITABLE, name)); @@ -871,6 +893,7 @@ public Name getPermissionName(AuthorizationContext ctx) { } } + @SuppressWarnings({"rawtypes"}) private static class PerReplicaCallable extends SolrRequest implements Callable { String coreUrl; String prop; @@ -909,6 +932,7 @@ public Boolean call() throws Exception { Thread.sleep(100); NamedList resp = solr.httpUriRequest(this).future.get(); if (resp != null) { + @SuppressWarnings({"rawtypes"}) Map m = (Map) resp.get(ZNODEVER); if (m != null) { remoteVersion = (Number) m.get(prop); diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java index bd76ae9a9250..f1b15445dc2a 100644 --- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java @@ -53,7 +53,7 @@ import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; -import org.apache.solr.core.CloseHook; +import org.apache.solr.common.params.StreamParams; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.PluginInfo; import org.apache.solr.core.SolrConfig; @@ -88,12 +88,14 @@ */ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider { - static SolrClientCache clientCache = new SolrClientCache(); - static ModelCache modelCache = null; - static ConcurrentMap objectCache = new ConcurrentHashMap(); + private ModelCache modelCache; + @SuppressWarnings({"rawtypes"}) + private ConcurrentMap objectCache; private SolrDefaultStreamFactory streamFactory = new SolrDefaultStreamFactory(); private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private String coreName; + private SolrClientCache solrClientCache; + @SuppressWarnings({"unchecked", "rawtypes"}) private Map daemons = Collections.synchronizedMap(new HashMap()); @Override @@ -101,48 +103,38 @@ public PermissionNameProvider.Name getPermissionName(AuthorizationContext reques return PermissionNameProvider.Name.READ_PERM; } - public static SolrClientCache getClientCache() { - return clientCache; - } - + @SuppressWarnings({"rawtypes"}) public void inform(SolrCore core) { String defaultCollection; String defaultZkhost; CoreContainer coreContainer = core.getCoreContainer(); + this.solrClientCache = coreContainer.getSolrClientCache(); this.coreName = core.getName(); - + String cacheKey = this.getClass().getName() + "_" + coreName + "_"; + this.objectCache = coreContainer.getObjectCache().computeIfAbsent(cacheKey + "objectCache", + ConcurrentHashMap.class, k-> new ConcurrentHashMap()); if (coreContainer.isZooKeeperAware()) { defaultCollection = core.getCoreDescriptor().getCollectionName(); defaultZkhost = core.getCoreContainer().getZkController().getZkServerAddress(); streamFactory.withCollectionZkHost(defaultCollection, defaultZkhost); streamFactory.withDefaultZkHost(defaultZkhost); - modelCache = new ModelCache(250, - defaultZkhost, - clientCache); + modelCache = coreContainer.getObjectCache().computeIfAbsent(cacheKey + "modelCache", + ModelCache.class, + k -> new ModelCache(250, defaultZkhost, solrClientCache)); } streamFactory.withSolrResourceLoader(core.getResourceLoader()); // This pulls all the overrides and additions from the config addExpressiblePlugins(streamFactory, core); - - core.addCloseHook(new CloseHook() { - @Override - public void preClose(SolrCore core) { - // To change body of implemented methods use File | Settings | File Templates. - } - - @Override - public void postClose(SolrCore core) { - clientCache.close(); - } - }); } + @SuppressWarnings({"unchecked"}) public static void addExpressiblePlugins(StreamFactory streamFactory, SolrCore core) { List pluginInfos = core.getSolrConfig().getPluginInfos(Expressible.class.getName()); for (PluginInfo pluginInfo : pluginInfos) { if (pluginInfo.pkgName != null) { - ExpressibleHolder holder = new ExpressibleHolder(pluginInfo, core, SolrConfig.classVsSolrPluginInfo.get(Expressible.class)); + @SuppressWarnings("resource") + ExpressibleHolder holder = new ExpressibleHolder(pluginInfo, core, SolrConfig.classVsSolrPluginInfo.get(Expressible.class.getName())); streamFactory.withFunctionName(pluginInfo.name, () -> holder.getClazz()); } else { @@ -152,6 +144,7 @@ public static void addExpressiblePlugins(StreamFactory streamFactory, SolrCore c } } + @SuppressWarnings({"rawtypes"}) public static class ExpressibleHolder extends PackagePluginHolder { private Class clazz; @@ -159,6 +152,7 @@ public ExpressibleHolder(PluginInfo info, SolrCore core, SolrConfig.SolrPluginIn super(info, core, pluginMeta); } + @SuppressWarnings({"rawtypes"}) public Class getClazz() { return clazz; } @@ -183,10 +177,10 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw TupleStream tupleStream; try { - StreamExpression streamExpression = StreamExpressionParser.parse(params.get("expr")); + StreamExpression streamExpression = StreamExpressionParser.parse(params.get(StreamParams.EXPR)); if (this.streamFactory.isEvaluator(streamExpression)) { - StreamExpression tupleExpression = new StreamExpression("tuple"); - tupleExpression.addParameter(new StreamExpressionNamedParameter("return-value", streamExpression)); + StreamExpression tupleExpression = new StreamExpression(StreamParams.TUPLE); + tupleExpression.addParameter(new StreamExpressionNamedParameter(StreamParams.RETURN_VALUE, streamExpression)); tupleStream = this.streamFactory.constructStream(tupleExpression); } else { tupleStream = this.streamFactory.constructStream(streamExpression); @@ -195,13 +189,14 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw // Catch exceptions that occur while the stream is being created. This will include streaming expression parse // rules. SolrException.log(log, e); - rsp.add("result-set", new DummyErrorStream(e)); + rsp.add(StreamParams.RESULT_SET, new DummyErrorStream(e)); return; } final SolrCore core = req.getCore(); // explicit check for null core (temporary?, for tests) + @SuppressWarnings("resource") ZkController zkController = core == null ? null : core.getCoreContainer().getZkController(); RequestReplicaListTransformerGenerator requestReplicaListTransformerGenerator; if (zkController != null) { @@ -226,7 +221,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw context.put("shards", getCollectionShards(params)); context.workerID = worker; context.numWorkers = numWorkers; - context.setSolrClientCache(clientCache); + context.setSolrClientCache(solrClientCache); context.setModelCache(modelCache); context.setObjectCache(objectCache); context.put("core", this.coreName); @@ -247,9 +242,9 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw daemonStream.setDaemons(daemons); daemonStream.open(); // This will start the daemonStream daemons.put(daemonStream.getId(), daemonStream); - rsp.add("result-set", new DaemonResponseStream("Daemon:" + daemonStream.getId() + " started on " + coreName)); + rsp.add(StreamParams.RESULT_SET, new DaemonResponseStream("Daemon:" + daemonStream.getId() + " started on " + coreName)); } else { - rsp.add("result-set", new TimerStream(new ExceptionStream(tupleStream))); + rsp.add(StreamParams.RESULT_SET, new TimerStream(new ExceptionStream(tupleStream))); } } @@ -262,40 +257,40 @@ private void handleAdmin(SolrQueryRequest req, SolrQueryResponse rsp, SolrParams if ("list".equals(action)) { Collection vals = daemons.values(); - rsp.add("result-set", new DaemonCollectionStream(vals)); + rsp.add(StreamParams.RESULT_SET, new DaemonCollectionStream(vals)); return; } String id = params.get(ID); DaemonStream d = daemons.get(id); if (d == null) { - rsp.add("result-set", new DaemonResponseStream("Daemon:" + id + " not found on " + coreName)); + rsp.add(StreamParams.RESULT_SET, new DaemonResponseStream("Daemon:" + id + " not found on " + coreName)); return; } switch (action) { case "stop": d.close(); - rsp.add("result-set", new DaemonResponseStream("Daemon:" + id + " stopped on " + coreName)); + rsp.add(StreamParams.RESULT_SET, new DaemonResponseStream("Daemon:" + id + " stopped on " + coreName)); break; case "start": try { d.open(); } catch (IOException e) { - rsp.add("result-set", new DaemonResponseStream("Daemon: " + id + " error: " + e.getMessage())); + rsp.add(StreamParams.RESULT_SET, new DaemonResponseStream("Daemon: " + id + " error: " + e.getMessage())); } - rsp.add("result-set", new DaemonResponseStream("Daemon:" + id + " started on " + coreName)); + rsp.add(StreamParams.RESULT_SET, new DaemonResponseStream("Daemon:" + id + " started on " + coreName)); break; case "kill": daemons.remove(id); d.close(); // we already found it in the daemons list, so we don't need to verify we removed it. - rsp.add("result-set", new DaemonResponseStream("Daemon:" + id + " killed on " + coreName)); + rsp.add(StreamParams.RESULT_SET, new DaemonResponseStream("Daemon:" + id + " killed on " + coreName)); break; default: - rsp.add("result-set", new DaemonResponseStream("Daemon:" + id + " action '" + rsp.add(StreamParams.RESULT_SET, new DaemonResponseStream("Daemon:" + id + " action '" + action + "' not recognized on " + coreName)); break; } @@ -358,11 +353,7 @@ public Tuple read() { msg = t.getMessage(); t = t.getCause(); } - - Map m = new HashMap(); - m.put("EOF", true); - m.put("EXCEPTION", msg); - return new Tuple(m); + return Tuple.EXCEPTION(msg, true); } } @@ -404,9 +395,7 @@ public Tuple read() { if (it.hasNext()) { return it.next().getInfo(); } else { - Map m = new HashMap(); - m.put("EOF", true); - return new Tuple(m); + return Tuple.EOF(); } } } @@ -448,14 +437,10 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { public Tuple read() { if (sendEOF) { - Map m = new HashMap(); - m.put("EOF", true); - return new Tuple(m); + return Tuple.EOF(); } else { sendEOF = true; - Map m = new HashMap(); - m.put("DaemonOp", message); - return new Tuple(m); + return new Tuple("DaemonOp", message); } } } @@ -500,11 +485,12 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { .withExpression("--non-expressible--"); } + @SuppressWarnings({"unchecked"}) public Tuple read() throws IOException { Tuple tuple = this.tupleStream.read(); if (tuple.EOF) { long totalTime = (System.nanoTime() - begin) / 1000000; - tuple.fields.put("RESPONSE_TIME", totalTime); + tuple.put(StreamParams.RESPONSE_TIME, totalTime); } return tuple; } @@ -512,7 +498,7 @@ public Tuple read() throws IOException { private Map> getCollectionShards(SolrParams params) { - Map> collectionShards = new HashMap(); + Map> collectionShards = new HashMap<>(); Iterator paramsIt = params.getParameterNamesIterator(); while (paramsIt.hasNext()) { String param = paramsIt.next(); @@ -520,7 +506,8 @@ private Map> getCollectionShards(SolrParams params) { String collection = param.split("\\.")[0]; String shardString = params.get(param); String[] shards = shardString.split(","); - List shardList = new ArrayList(); + @SuppressWarnings({"rawtypes"}) + List shardList = new ArrayList<>(); for (String shard : shards) { shardList.add(shard); } diff --git a/solr/core/src/java/org/apache/solr/handler/UpdateRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/UpdateRequestHandler.java index cbe2cbaeddb6..cd64b55333b6 100644 --- a/solr/core/src/java/org/apache/solr/handler/UpdateRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/UpdateRequestHandler.java @@ -113,7 +113,7 @@ private void setDefaultWT(SolrQueryRequest req, ContentStreamLoader loader) { }; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); // Since backed by a non-thread safe Map, it should not be modifiable @@ -133,7 +133,7 @@ protected void setAssumeContentType(String ct) { } } private Map pathVsLoaders = new HashMap<>(); - protected Map createDefaultLoaders(NamedList args) { + protected Map createDefaultLoaders(@SuppressWarnings({"rawtypes"})NamedList args) { SolrParams p = null; if(args!=null) { p = args.toSolrParams(); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java b/solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java index 71da0e5dc60b..f1f944adbfa4 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java @@ -69,6 +69,7 @@ public static boolean maybeProxyToNodes(SolrQueryRequest req, SolrQueryResponse Set nodes; String pathStr = req.getPath(); + @SuppressWarnings({"unchecked"}) Map paramsMap = req.getParams().toMap(new HashMap<>()); paramsMap.remove(PARAM_NODES); SolrParams params = new MapSolrParams(paramsMap); @@ -125,6 +126,7 @@ public static Pair>, SolrClient> callRemoteNode(String log.debug("Proxying {} request to node {}", endpoint, nodeName); URL baseUrl = new URL(zkController.zkStateReader.getBaseUrlForNodeName(nodeName)); HttpSolrClient solr = new HttpSolrClient.Builder(baseUrl.toString()).build(); + @SuppressWarnings({"rawtypes"}) SolrRequest proxyReq = new GenericSolrRequest(SolrRequest.METHOD.GET, endpoint, params); HttpSolrClient.HttpUriRequestResponse proxyResp = solr.httpUriRequest(proxyReq); return new Pair<>(proxyResp.future, solr); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java index d6464fcc09e9..5fb618e88d83 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java @@ -100,6 +100,7 @@ public Name getPermissionName(AuthorizationContext request) { } @Override + @SuppressWarnings({"unchecked"}) public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(req.getParams()); String collection = params.get(SYSTEM_COLLECTION_PARAM, CollectionAdminParams.SYSTEM_COLL); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java b/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java index 85033f3b1847..d813e449caf1 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java @@ -132,6 +132,7 @@ public void call(SolrQueryRequest req, SolrQueryResponse rsp) { /** * Wrapper for SolrParams that wraps V2 params and exposes them as V1 params. */ + @SuppressWarnings({"unchecked"}) private static void wrapParams(final SolrQueryRequest req, final CommandOperation co, final ApiCommand cmd, final boolean useRequestParams) { final Map pathValues = req.getPathTemplateValues(); final Map map = co == null || !(co.getCommandData() instanceof Map) ? @@ -158,6 +159,7 @@ private Object getParams0(String param) { if (o == null) o = pathValues.get(param); if (o == null && useRequestParams) o = origParams.getParams(param); if (o instanceof List) { + @SuppressWarnings({"rawtypes"}) List l = (List) o; return l.toArray(new String[l.size()]); } @@ -179,7 +181,7 @@ public Iterator getParameterNamesIterator() { } @Override - public Map toMap(Map suppliedMap) { + public Map toMap(Map suppliedMap) { for(Iterator it=getParameterNamesIterator(); it.hasNext(); ) { final String param = it.next(); String key = cmd.meta().getParamSubstitute(param); @@ -197,6 +199,7 @@ public Map toMap(Map suppliedMap) { Boolean.class.isAssignableFrom(oClass)) { suppliedMap.put(param,String.valueOf(o)); } else if (List.class.isAssignableFrom(oClass) && ((List)o).get(0) instanceof String ) { + @SuppressWarnings({"unchecked"}) List l = (List) o; suppliedMap.put( param, l.toArray(new String[0])); } else { diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java index 50fa95aafd01..2265c9b75eca 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java @@ -22,7 +22,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -55,7 +54,7 @@ public ClusterStatus(ZkStateReader zkStateReader, ZkNodeProps props) { } @SuppressWarnings("unchecked") - public void getClusterStatus(NamedList results) + public void getClusterStatus(@SuppressWarnings({"rawtypes"})NamedList results) throws KeeperException, InterruptedException { // read aliases Aliases aliases = zkStateReader.getAliases(); @@ -72,6 +71,7 @@ public void getClusterStatus(NamedList results) } } + @SuppressWarnings({"rawtypes"}) Map roles = null; if (zkStateReader.getZkClient().exists(ZkStateReader.ROLES, true)) { roles = (Map) Utils.fromJSON(zkStateReader.getZkClient().getData(ZkStateReader.ROLES, null, null, true)); @@ -79,10 +79,6 @@ public void getClusterStatus(NamedList results) ClusterState clusterState = zkStateReader.getClusterState(); - // convert cluster state into a map of writable types - byte[] bytes = Utils.toJSON(clusterState); - Map stateMap = (Map) Utils.fromJSON(bytes); - String routeKey = message.getStr(ShardParams._ROUTE_); String shard = message.getStr(ZkStateReader.SHARD_ID_PROP); @@ -134,13 +130,9 @@ public void getClusterStatus(NamedList results) requestedShards.addAll(Arrays.asList(paramShards)); } - if (clusterStateCollection.getStateFormat() > 1) { - bytes = Utils.toJSON(clusterStateCollection); + byte[] bytes = Utils.toJSON(clusterStateCollection); Map docCollection = (Map) Utils.fromJSON(bytes); collectionStatus = getCollectionStatus(docCollection, name, requestedShards); - } else { - collectionStatus = getCollectionStatus((Map) stateMap.get(name), name, requestedShards); - } collectionStatus.put("znodeVersion", clusterStateCollection.getZNodeVersion()); if (collectionVsAliases.containsKey(name) && !collectionVsAliases.get(name).isEmpty()) { @@ -165,7 +157,7 @@ public void getClusterStatus(NamedList results) clusterStatus.add("collections", collectionProps); // read cluster properties - Map clusterProps = zkStateReader.getClusterProperties(); + Map clusterProps = zkStateReader.getClusterProperties(); if (clusterProps != null && !clusterProps.isEmpty()) { clusterStatus.add("properties", clusterProps); } @@ -231,19 +223,17 @@ private Map getCollectionStatus(Map collection, @SuppressWarnings("unchecked") protected void crossCheckReplicaStateWithLiveNodes(List liveNodes, NamedList collectionProps) { - Iterator> colls = collectionProps.iterator(); - while (colls.hasNext()) { - Map.Entry next = colls.next(); - Map collMap = (Map)next.getValue(); - Map shards = (Map)collMap.get("shards"); + for (Map.Entry next : collectionProps) { + Map collMap = (Map) next.getValue(); + Map shards = (Map) collMap.get("shards"); for (Object nextShard : shards.values()) { - Map shardMap = (Map)nextShard; - Map replicas = (Map)shardMap.get("replicas"); + Map shardMap = (Map) nextShard; + Map replicas = (Map) shardMap.get("replicas"); for (Object nextReplica : replicas.values()) { - Map replicaMap = (Map)nextReplica; + Map replicaMap = (Map) nextReplica; if (Replica.State.getState((String) replicaMap.get(ZkStateReader.STATE_PROP)) != Replica.State.DOWN) { // not down, so verify the node is live - String node_name = (String)replicaMap.get(ZkStateReader.NODE_NAME_PROP); + String node_name = (String) replicaMap.get(ZkStateReader.NODE_NAME_PROP); if (!liveNodes.contains(node_name)) { // node is not live, so this replica is actually down replicaMap.put(ZkStateReader.STATE_PROP, Replica.State.DOWN.toString()); @@ -253,6 +243,4 @@ protected void crossCheckReplicaStateWithLiveNodes(List liveNodes, Named } } } - - } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java b/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java index 609b39bac104..90b7625e57bf 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java @@ -27,7 +27,6 @@ import java.util.TreeMap; import java.util.TreeSet; -import org.apache.http.client.HttpClient; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.io.SolrClientCache; @@ -66,12 +65,13 @@ public class ColStatus { public static final String RAW_SIZE_SAMPLING_PERCENT_PROP = SegmentsInfoRequestHandler.RAW_SIZE_SAMPLING_PERCENT_PARAM; public static final String SEGMENTS_PROP = "segments"; - public ColStatus(HttpClient httpClient, ClusterState clusterState, ZkNodeProps props) { + public ColStatus(SolrClientCache solrClientCache, ClusterState clusterState, ZkNodeProps props) { this.props = props; - this.solrClientCache = new SolrClientCache(httpClient); + this.solrClientCache = solrClientCache; this.clusterState = clusterState; } + @SuppressWarnings({"unchecked"}) public void getColStatus(NamedList results) { Collection collections; String col = props.getStr(ZkStateReader.COLLECTION_PROP); @@ -101,7 +101,6 @@ public void getColStatus(NamedList results) { continue; } SimpleOrderedMap colMap = new SimpleOrderedMap<>(); - colMap.add("stateFormat", coll.getStateFormat()); colMap.add("znodeVersion", coll.getZNodeVersion()); Map props = new TreeMap<>(coll.getProperties()); props.remove("shards"); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java index d63b48f10934..f443832b2dc1 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java @@ -125,7 +125,6 @@ import static org.apache.solr.common.cloud.DocCollection.DOC_ROUTER; import static org.apache.solr.common.cloud.DocCollection.RULE; import static org.apache.solr.common.cloud.DocCollection.SNITCH; -import static org.apache.solr.common.cloud.DocCollection.STATE_FORMAT; import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS; import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP; import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE; @@ -204,7 +203,7 @@ public PermissionNameProvider.Name getPermissionName(AuthorizationContext ctx) { } @Override - final public void init(NamedList args) { + final public void init(@SuppressWarnings({"rawtypes"})NamedList args) { } @@ -261,6 +260,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw rsp.setHttpCaching(false); } + @SuppressWarnings({"unchecked"}) void invokeAction(SolrQueryRequest req, SolrQueryResponse rsp, CoreContainer cores, CollectionAction action, CollectionOperation operation) throws Exception { if (!coreContainer.isZooKeeperAware()) { throw new SolrException(BAD_REQUEST, @@ -463,7 +463,6 @@ public enum CollectionOperation implements CollectionOp { CREATE_NODE_SET, CREATE_NODE_SET_SHUFFLE, SHARDS_PROP, - STATE_FORMAT, AUTO_ADD_REPLICAS, RULE, SNITCH, @@ -475,8 +474,6 @@ public enum CollectionOperation implements CollectionOp { WITH_COLLECTION, ALIAS); - props.putIfAbsent(STATE_FORMAT, "2"); - if (props.get(REPLICATION_FACTOR) != null && props.get(NRT_REPLICAS) != null) { //TODO: Remove this in 8.0 . Keep this for SolrJ client back-compat. See SOLR-11676 for more details int replicationFactor = Integer.parseInt((String) props.get(REPLICATION_FACTOR)); @@ -514,6 +511,7 @@ public enum CollectionOperation implements CollectionOp { return copyPropertiesWithPrefix(req.getParams(), props, "router."); }), + @SuppressWarnings({"unchecked"}) COLSTATUS_OP(COLSTATUS, (req, rsp, h) -> { Map props = copy(req.getParams(), null, COLLECTION_PROP, @@ -529,7 +527,7 @@ public enum CollectionOperation implements CollectionOp { if (props.containsKey(CoreAdminParams.NAME) && !props.containsKey(COLLECTION_PROP)) { props.put(COLLECTION_PROP, props.get(CoreAdminParams.NAME)); } - new ColStatus(h.coreContainer.getUpdateShardHandler().getDefaultHttpClient(), + new ColStatus(h.coreContainer.getSolrClientCache(), h.coreContainer.getZkController().getZkStateReader().getClusterState(), new ZkNodeProps(props)) .getColStatus(rsp.getValues()); return null; @@ -567,7 +565,6 @@ public enum CollectionOperation implements CollectionOp { CREATE_NODE_SET_SHUFFLE, AUTO_ADD_REPLICAS, "shards", - STATE_FORMAT, CommonParams.ROWS, CommonParams.Q, CommonParams.FL, @@ -603,6 +600,7 @@ public enum CollectionOperation implements CollectionOp { return null; }), + @SuppressWarnings({"unchecked"}) CREATEALIAS_OP(CREATEALIAS, (req, rsp, h) -> { String alias = req.getParams().get(NAME); SolrIdentifierValidator.validateAliasName(alias); @@ -617,7 +615,6 @@ public enum CollectionOperation implements CollectionOp { // we'll throw this later if we are in fact creating a routed alias. ex = e; } - @SuppressWarnings("unchecked") ModifiableSolrParams finalParams = new ModifiableSolrParams(); for (Map.Entry entry : possiblyModifiedParams.entrySet()) { if (entry.getValue().getClass().isArray() ) { @@ -705,6 +702,7 @@ public enum CollectionOperation implements CollectionOp { /** * List the aliases and associated properties. */ + @SuppressWarnings({"unchecked"}) LISTALIASES_OP(LISTALIASES, (req, rsp, h) -> { ZkStateReader zkStateReader = h.coreContainer.getZkController().getZkStateReader(); // if someone calls listAliases, lets ensure we return an up to date response @@ -852,6 +850,7 @@ public enum CollectionOperation implements CollectionOp { cp.setCollectionProperty(collection, name, val); return null; }), + @SuppressWarnings({"unchecked"}) REQUESTSTATUS_OP(REQUESTSTATUS, (req, rsp, h) -> { req.getParams().required().check(REQUESTID); @@ -952,6 +951,7 @@ public Map execute(SolrQueryRequest req, SolrQueryResponse rsp, * Handle list collection request. * Do list collection request to zk host */ + @SuppressWarnings({"unchecked"}) LIST_OP(LIST, (req, rsp, h) -> { NamedList results = new NamedList<>(); Map collections = h.coreContainer.getZkController().getZkStateReader().getClusterState().getCollectionsMap(); @@ -1062,8 +1062,6 @@ public Map execute(SolrQueryRequest req, SolrQueryResponse rsp, } return m; }), - MIGRATESTATEFORMAT_OP(MIGRATESTATEFORMAT, (req, rsp, h) -> copy(req.getParams().required(), null, COLLECTION_PROP)), - BACKUP_OP(BACKUP, (req, rsp, h) -> { req.getParams().required().check(NAME, COLLECTION_PROP); @@ -1172,7 +1170,7 @@ public Map execute(SolrQueryRequest req, SolrQueryResponse rsp, } // from CREATE_OP: copy(req.getParams(), params, COLL_CONF, REPLICATION_FACTOR, NRT_REPLICAS, TLOG_REPLICAS, - PULL_REPLICAS, MAX_SHARDS_PER_NODE, STATE_FORMAT, AUTO_ADD_REPLICAS, CREATE_NODE_SET, CREATE_NODE_SET_SHUFFLE); + PULL_REPLICAS, MAX_SHARDS_PER_NODE, AUTO_ADD_REPLICAS, CREATE_NODE_SET, CREATE_NODE_SET_SHUFFLE); copyPropertiesWithPrefix(req.getParams(), params, COLL_PROP_PREFIX); return params; }), @@ -1448,9 +1446,11 @@ public static void waitForActiveCollection(String collectionName, CoreContainer } public static void verifyRuleParams(CoreContainer cc, Map m) { + @SuppressWarnings({"rawtypes"}) List l = (List) m.get(RULE); if (l != null) { for (Object o : l) { + @SuppressWarnings({"rawtypes"}) Map map = (Map) o; try { new Rule(map); @@ -1476,6 +1476,7 @@ private static Map addMapObject(Map props, Strin val.add(v.toString()); } if (val.size() > 0) { + @SuppressWarnings({"rawtypes"}) ArrayList l = new ArrayList<>(); for (String rule : val) l.add(Rule.parseRule(rule)); props.put(key, l); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java index b90daba7b51f..566a5d8296e4 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java @@ -209,6 +209,7 @@ private void createZkNodeIfNotExistsAndSetData(SolrZkClient zkClient, } } + @SuppressWarnings({"unchecked"}) private void handleResponse(String operation, ZkNodeProps m, SolrQueryResponse rsp, long timeout) throws KeeperException, InterruptedException { long time = System.nanoTime(); @@ -219,6 +220,7 @@ private void handleResponse(String operation, ZkNodeProps m, if (event.getBytes() != null) { SolrResponse response = OverseerSolrResponseSerializer.deserialize(event.getBytes()); rsp.getValues().addAll(response.getResponse()); + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap exp = (SimpleOrderedMap) response.getResponse().get("exception"); if (exp != null) { Integer code = (Integer) exp.get("rspCode"); @@ -282,6 +284,7 @@ Map call(SolrQueryRequest req, SolrQueryResponse rsp, ConfigSets return CollectionsHandler.copy(req.getParams().required(), null, NAME); } }, + @SuppressWarnings({"unchecked"}) LIST_OP(LIST) { @Override Map call(SolrQueryRequest req, SolrQueryResponse rsp, ConfigSetsHandler h) throws Exception { diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java index f78e523ed2d7..0ef3ebbd999d 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java @@ -114,7 +114,7 @@ public CoreAdminHandler(final CoreContainer coreContainer) { @Override - final public void init(NamedList args) { + final public void init(@SuppressWarnings({"rawtypes"})NamedList args) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "CoreAdminHandler should not be configured in solrconf.xml\n" + "it is a special Handler configured directly by the RequestDispatcher"); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java index f8c7e8c772ac..3036ced933bc 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java @@ -77,7 +77,7 @@ enum CoreAdminOperation implements CoreAdminOp { String coreName = params.required().get(CoreAdminParams.NAME); Map coreParams = buildCoreParams(params); CoreContainer coreContainer = it.handler.coreContainer; - Path instancePath = coreContainer.getCoreRootDirectory().resolve(coreName); + Path instancePath; // TODO: Should we nuke setting odd instance paths? They break core discovery, generally String instanceDir = it.req.getParams().get(CoreAdminParams.INSTANCE_DIR); @@ -86,6 +86,8 @@ enum CoreAdminOperation implements CoreAdminOp { if (instanceDir != null) { instanceDir = PropertiesUtil.substituteProperty(instanceDir, coreContainer.getContainerProperties()); instancePath = coreContainer.getCoreRootDirectory().resolve(instanceDir).normalize(); + } else { + instancePath = coreContainer.getCoreRootDirectory().resolve(coreName); } boolean newCollection = params.getBool(CoreAdminParams.NEW_COLLECTION, false); @@ -243,6 +245,7 @@ enum CoreAdminOperation implements CoreAdminOp { RESTORECORE_OP(RESTORECORE, new RestoreCoreOp()), CREATESNAPSHOT_OP(CREATESNAPSHOT, new CreateSnapshotOp()), DELETESNAPSHOT_OP(DELETESNAPSHOT, new DeleteSnapshotOp()), + @SuppressWarnings({"unchecked"}) LISTSNAPSHOTS_OP(LISTSNAPSHOTS, it -> { final SolrParams params = it.req.getParams(); String cname = params.required().get(CoreAdminParams.CORE); @@ -255,6 +258,7 @@ enum CoreAdminOperation implements CoreAdminOp { } SolrSnapshotMetaDataManager mgr = core.getSnapshotMetaDataManager(); + @SuppressWarnings({"rawtypes"}) NamedList result = new NamedList(); for (String name : mgr.listSnapshots()) { Optional metadata = mgr.getSnapshotMetaData(name); @@ -294,6 +298,7 @@ static Logger log() { * @return - a named list of key/value pairs from the core. * @throws IOException - LukeRequestHandler can throw an I/O exception */ + @SuppressWarnings({"unchecked", "rawtypes"}) static NamedList getCoreStatus(CoreContainer cores, String cname, boolean isIndexInfoNeeded) throws IOException { NamedList info = new SimpleOrderedMap<>(); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java index 8dc0f7908ffc..ddf22ef1b678 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java @@ -78,7 +78,7 @@ public HealthCheckHandler(final CoreContainer coreContainer) { } @Override - final public void init(NamedList args) { + final public void init(@SuppressWarnings({"rawtypes"})NamedList args) { } public CoreContainer getCoreContainer() { diff --git a/solr/core/src/java/org/apache/solr/handler/admin/IndexSizeEstimator.java b/solr/core/src/java/org/apache/solr/handler/admin/IndexSizeEstimator.java index 970b3136e24e..13fefa377099 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/IndexSizeEstimator.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/IndexSizeEstimator.java @@ -194,6 +194,7 @@ public void setSamplingPercent(float percent) throws IllegalArgumentException { this.samplingPercent = percent; } + @SuppressWarnings({"unchecked"}) public Estimate estimate() throws Exception { Map details = new LinkedHashMap<>(); Map summary = new LinkedHashMap<>(); @@ -244,6 +245,7 @@ public Estimate estimate() throws Exception { return new Estimate(fieldsBySize, typesBySize, withSummary ? newSummary : null, withDetails ? details : null); } + @SuppressWarnings({"unchecked"}) private void convert(Map result) { for (Map.Entry entry : result.entrySet()) { Object value = entry.getValue(); @@ -266,6 +268,7 @@ private void convert(Map result) { } } + @SuppressWarnings({"unchecked"}) private void estimateSummary(Map details, Map summary) { log.info("- preparing summary..."); details.forEach((type, perType) -> { diff --git a/solr/core/src/java/org/apache/solr/handler/admin/InfoHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/InfoHandler.java index b0c6d61314a6..98c320e59ceb 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/InfoHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/InfoHandler.java @@ -56,7 +56,7 @@ public InfoHandler(final CoreContainer coreContainer) { @Override - final public void init(NamedList args) { + final public void init(@SuppressWarnings({"rawtypes"})NamedList args) { } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/LoggingHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/LoggingHandler.java index 5cf9de0507ac..ec27b1ceb309 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/LoggingHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/LoggingHandler.java @@ -47,6 +47,7 @@ public class LoggingHandler extends RequestHandlerBase implements SolrCoreAware { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + @SuppressWarnings({"rawtypes"}) private LogWatcher watcher; public LoggingHandler(CoreContainer cc) { @@ -139,6 +140,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw else { rsp.add("levels", watcher.getAllLevels()); + @SuppressWarnings({"unchecked"}) List loggers = new ArrayList<>(watcher.getAllLoggers()); Collections.sort(loggers); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java index c165a3352f22..5bb122f49ce2 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java @@ -17,8 +17,8 @@ package org.apache.solr.handler.admin; import java.io.IOException; -import java.nio.file.NoSuchFileException; import java.lang.invoke.MethodHandles; +import java.nio.file.NoSuchFileException; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; @@ -39,6 +39,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; @@ -47,7 +48,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.MultiTerms; import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; @@ -55,6 +55,7 @@ import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; +import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRefBuilder; @@ -622,8 +623,8 @@ private static long getSegmentsFileLength(IndexCommit commit) { try { return commit.getDirectory().fileLength(commit.getSegmentsFileName()); } catch (NoSuchFileException okException) { - log.debug("Unable to determine the (optional) fileSize for the current IndexReader's segments file because it is {}{}" - , "no longer in the Directory, this can happen if there are new commits since the Reader was opened" + log.debug("Unable to determine the (optional) fileSize for the current IndexReader's segments file because it is " + + "no longer in the Directory, this can happen if there are new commits since the Reader was opened" , okException); } catch (IOException strangeException) { log.warn("Ignoring IOException wile attempting to determine the (optional) fileSize stat for the current IndexReader's segments file", @@ -634,17 +635,19 @@ private static long getSegmentsFileLength(IndexCommit commit) { /** Returns the sum of RAM bytes used by each segment */ private static long getIndexHeapUsed(DirectoryReader reader) { - long indexHeapRamBytesUsed = 0; - for(LeafReaderContext leafReaderContext : reader.leaves()) { - LeafReader leafReader = leafReaderContext.reader(); - if (leafReader instanceof SegmentReader) { - indexHeapRamBytesUsed += ((SegmentReader) leafReader).ramBytesUsed(); - } else { - // Not supported for any reader that is not a SegmentReader - return -1; - } - } - return indexHeapRamBytesUsed; + return reader.leaves().stream() + .map(LeafReaderContext::reader) + .map(FilterLeafReader::unwrap) + .map(leafReader -> { + if (leafReader instanceof Accountable) { + return ((Accountable) leafReader).ramBytesUsed(); + } else { + return -1L; // unsupported + } + }) + .mapToLong(Long::longValue) + .reduce(0, (left, right) -> left == -1 || right == -1 ? -1 : left + right); + // if any leaves are unsupported (-1), we ultimately return -1. } // Get terribly detailed information about a particular field. This is a very expensive call, use it with caution @@ -747,6 +750,7 @@ public NamedList toNamedList() /** * Private internal class that counts up frequent terms */ + @SuppressWarnings("rawtypes") private static class TopTermQueue extends PriorityQueue { static class TermInfo { diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java index b98ec02c407e..c017f152b8ab 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java @@ -93,7 +93,7 @@ public MetricsCollectorHandler(final CoreContainer coreContainer) { } @Override - public void init(NamedList initArgs) { + public void init(@SuppressWarnings({"rawtypes"})NamedList initArgs) { super.init(initArgs); if (initArgs != null) { params = initArgs.toSolrParams(); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java index f3b20adf9a97..e6d80174b4f5 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java @@ -103,6 +103,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw handleRequest(req.getParams(), (k, v) -> rsp.add(k, v)); } + @SuppressWarnings({"unchecked"}) public void handleRequest(SolrParams params, BiConsumer consumer) throws Exception { boolean compact = params.getBool(COMPACT_PARAM, true); String[] keys = params.getParams(KEY_PARAM); @@ -116,9 +117,11 @@ public void handleRequest(SolrParams params, BiConsumer consumer List metricFilters = metricTypes.stream().map(MetricType::asMetricFilter).collect(Collectors.toList()); Set requestedRegistries = parseRegistries(params); + @SuppressWarnings({"rawtypes"}) NamedList response = new SimpleOrderedMap(); for (String registryName : requestedRegistries) { MetricRegistry registry = metricManager.registry(registryName); + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap result = new SimpleOrderedMap(); MetricUtils.toMaps(registry, metricFilters, mustMatchFilter, propertyFilter, false, false, compact, false, (k, v) -> result.add(k, v)); @@ -129,6 +132,7 @@ public void handleRequest(SolrParams params, BiConsumer consumer consumer.accept("metrics", response); } + @SuppressWarnings({"unchecked", "rawtypes"}) public void handleKeyRequest(String[] keys, BiConsumer consumer) throws Exception { SimpleOrderedMap result = new SimpleOrderedMap(); SimpleOrderedMap errors = new SimpleOrderedMap(); @@ -345,9 +349,10 @@ enum MetricType { public static final String SUPPORTED_TYPES_MSG = EnumSet.allOf(MetricType.class).toString(); + @SuppressWarnings({"rawtypes"}) private final Class klass; - MetricType(Class klass) { + MetricType(@SuppressWarnings({"rawtypes"})Class klass) { this.klass = klass; } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java index ad0a266149f1..5c475a188a1a 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java @@ -185,6 +185,7 @@ public MetricsHistoryHandler(String nodeName, MetricsHandler metricsHandler, } // override from ZK if available if (cloudManager != null) { + @SuppressWarnings({"unchecked"}) Map props = (Map)cloudManager.getClusterStateProvider() .getClusterProperty("metrics", Collections.emptyMap()) .getOrDefault("history", Collections.emptyMap()); @@ -379,6 +380,7 @@ private void collectMetrics() { ExecutorUtil.setServerThreadFlag(false); } + @SuppressWarnings({"unchecked", "rawtypes"}) private void collectLocalReplicaMetrics() { List groups = new ArrayList<>(); if (enableNodes) { @@ -800,6 +802,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw rsp.getResponseHeader().add("zkConnected", cloudManager != null); } + @SuppressWarnings({"unchecked"}) private NamedList handleRemoteRequest(String nodeName, SolrQueryRequest req) { String baseUrl = Utils.getBaseUrlForNodeName(nodeName, overseerUrlScheme); String url; @@ -827,6 +830,7 @@ private NamedList handleRemoteRequest(String nodeName, SolrQueryRequest } } + @SuppressWarnings({"unchecked", "rawtypes"}) private void mergeRemoteRes(SolrQueryResponse rsp, NamedList remoteRes) { if (remoteRes == null || remoteRes.get("metrics") == null) { return; diff --git a/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java b/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java index 10a455ffaab0..81900c32ed3e 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java @@ -110,6 +110,7 @@ class RebalanceLeaders { final static String INACTIVE_PREFERREDS = "inactivePreferreds"; final static String ALREADY_LEADERS = "alreadyLeaders"; final static String SUMMARY = "Summary"; + @SuppressWarnings({"rawtypes"}) final SimpleOrderedMap results = new SimpleOrderedMap(); final Map pendingOps = new HashMap<>(); private String collectionName; @@ -122,6 +123,7 @@ class RebalanceLeaders { coreContainer = collectionsHandler.getCoreContainer(); } + @SuppressWarnings({"unchecked", "rawtypes"}) void execute() throws KeeperException, InterruptedException { DocCollection dc = checkParams(); @@ -286,6 +288,7 @@ private boolean electionQueueInBadState(List electionNodes, Slice slice, // Provide some feedback to the user about what actually happened, or in this case where no action was // possible + @SuppressWarnings({"unchecked", "rawtypes"}) private void addInactiveToResults(Slice slice, Replica replica) { SimpleOrderedMap inactives = (SimpleOrderedMap) results.get(INACTIVE_PREFERREDS); if (inactives == null) { @@ -300,6 +303,7 @@ private void addInactiveToResults(Slice slice, Replica replica) { // Provide some feedback to the user about what actually happened, or in this case where no action was // necesary since this preferred replica was already the leader + @SuppressWarnings({"unchecked", "rawtypes"}) private void addAlreadyLeaderToResults(Slice slice, Replica replica) { SimpleOrderedMap alreadyLeaders = (SimpleOrderedMap) results.get(ALREADY_LEADERS); if (alreadyLeaders == null) { @@ -458,6 +462,7 @@ private boolean waitAsyncRequests(final int maxWaitSecs, Boolean waitForAll) } // If we actually changed the leader, we should send that fact back in the response. + @SuppressWarnings({"unchecked", "rawtypes"}) private void addToSuccesses(Slice slice, Replica replica) { SimpleOrderedMap successes = (SimpleOrderedMap) results.get("successes"); if (successes == null) { @@ -476,7 +481,8 @@ private void addToSuccesses(Slice slice, Replica replica) { // If for any reason we were supposed to change leadership, that should be recorded in changingLeaders. Any // time we verified that the change actually occurred, that entry should have been removed. So report anything // left over as a failure. - private void addAnyFailures() { + @SuppressWarnings({"unchecked", "rawtypes"}) + private void addAnyFailures() { if (pendingOps.size() == 0) { return; } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java index aff30a40d488..f590bf3bdc48 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java @@ -86,6 +86,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw } } + @SuppressWarnings({"unchecked", "rawtypes"}) private void doEdit(SolrQueryRequest req, SolrQueryResponse rsp, String path, final String key, final Object plugin) throws IOException { ConfigEditablePlugin configEditablePlugin = null; @@ -156,14 +157,16 @@ Object getPlugin(String key) { protected abstract void getConf(SolrQueryResponse rsp, String key); public static Map getMapValue(Map lookupMap, String key) { + @SuppressWarnings({"unchecked"}) Map m = (Map) lookupMap.get(key); if (m == null) lookupMap.put(key, m = new LinkedHashMap<>()); return m; } + @SuppressWarnings({"rawtypes"}) public static List getListValue(Map lookupMap, String key) { List l = (List) lookupMap.get(key); - if (l == null) lookupMap.put(key, l= new ArrayList()); + if (l == null) lookupMap.put(key, l= new ArrayList<>()); return l; } @@ -193,6 +196,7 @@ public Category getCategory() { * The data object defaults to EMPTY_MAP if not set */ public static class SecurityConfig { + @SuppressWarnings({"unchecked"}) private Map data = Collections.EMPTY_MAP; private int version = -1; @@ -213,6 +217,7 @@ public SecurityConfig setData(Map data) { * @param data an Object of type Map<String,Object> * @return SecurityConf object (builder pattern) */ + @SuppressWarnings({"unchecked"}) public SecurityConfig setData(Object data) { if (data instanceof Map) { this.data = (Map) data; diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java index 19d9d8148a25..cbc7ed4c7e16 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java @@ -127,7 +127,6 @@ private void getSegmentsInfo(SolrQueryRequest req, SolrQueryResponse rsp) SimpleOrderedMap segmentInfos = new SimpleOrderedMap<>(); SolrCore core = req.getCore(); - RefCounted iwRef = core.getSolrCoreState().getIndexWriter(core); SimpleOrderedMap infosInfo = new SimpleOrderedMap<>(); Version minVersion = infos.getMinSegmentLuceneVersion(); if (minVersion != null) { @@ -149,6 +148,7 @@ private void getSegmentsInfo(SolrQueryRequest req, SolrQueryResponse rsp) coreInfo.add("indexDir", core.getIndexDir()); coreInfo.add("sizeInGB", (double)core.getIndexSize() / GB); + RefCounted iwRef = core.getSolrCoreState().getIndexWriter(core); if (iwRef != null) { try { IndexWriter iw = iwRef.get(); @@ -238,10 +238,7 @@ private SimpleOrderedMap getSegmentInfo( SegmentReader seg = null; for (LeafReaderContext lrc : leafContexts) { LeafReader leafReader = lrc.reader(); - // unwrap - while (leafReader instanceof FilterLeafReader) { - leafReader = ((FilterLeafReader)leafReader).getDelegate(); - } + leafReader = FilterLeafReader.unwrap(leafReader); if (leafReader instanceof SegmentReader) { SegmentReader sr = (SegmentReader)leafReader; if (sr.getSegmentInfo().info.equals(segmentCommitInfo.info)) { diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java index d3f3f8c63cc1..a0002901913e 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java @@ -103,7 +103,7 @@ public ShowFileRequestHandler() } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init( args ); hiddenFiles = initHidden(invariants); } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SolrInfoMBeanHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SolrInfoMBeanHandler.java index 7654fa73800f..80f2881a911c 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/SolrInfoMBeanHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/SolrInfoMBeanHandler.java @@ -184,6 +184,7 @@ protected NamedList>> getDiff( // System.out.println( "NOW: " + now_txt ); // Calculate the differences + @SuppressWarnings({"rawtypes"}) NamedList diff = diffNamedList(ref_bean,now_bean); diff.add( "_changed_", true ); // flag the changed thing cat.add(name, diff); @@ -204,6 +205,7 @@ else if(includeAll) { return changed; } + @SuppressWarnings({"rawtypes"}) public NamedList diffNamedList(NamedList ref, NamedList now) { NamedList out = new SimpleOrderedMap(); for(int i=0; i getSecurityInfo(SolrQueryRequest req) info.add("username", username); // Mapped roles for this principal + @SuppressWarnings("resource") AuthorizationPlugin auth = cc==null? null: cc.getAuthorizationPlugin(); if (auth != null) { - RuleBasedAuthorizationPlugin rbap = (RuleBasedAuthorizationPlugin) auth; - Set roles = rbap.getRoles(username); + RuleBasedAuthorizationPluginBase rbap = (RuleBasedAuthorizationPluginBase) auth; + Set roles = rbap.getUserRoles(req.getUserPrincipal()); info.add("roles", roles); } } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java index 5c5bfb7da56d..9ee7a92b2e05 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java @@ -262,7 +262,7 @@ public void process(WatchedEvent event) { } /** - * Create a merged view of all collections (internal from /clusterstate.json and external from /collections/?/state.json + * Create a merged view of all collections from /collections/?/state.json */ private synchronized List getCollections(SolrZkClient zkClient) throws KeeperException, InterruptedException { if (cachedCollections == null) { @@ -283,7 +283,7 @@ private synchronized List getCollections(SolrZkClient zkClient) throws K /** * Gets the requested page of collections after applying filters and offsets. */ - public PageOfCollections fetchPage(PageOfCollections page, SolrZkClient zkClient) + public void fetchPage(PageOfCollections page, SolrZkClient zkClient) throws KeeperException, InterruptedException { @@ -305,8 +305,6 @@ public PageOfCollections fetchPage(PageOfCollections page, SolrZkClient zkClient // status until reading all status objects from ZK if (page.filterType != FilterType.status) page.selectPage(children); - - return page; } @Override @@ -353,6 +351,7 @@ public void command() { private PagedCollectionSupport pagingSupport; @Override + @SuppressWarnings({"unchecked"}) public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { final SolrParams params = req.getParams(); Map map = new HashMap<>(1); @@ -382,7 +381,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw String dumpS = params.get("dump"); boolean dump = dumpS != null && dumpS.equals("true"); - int start = params.getInt("start", 0); + int start = params.getInt("start", 0); // Note start ignored if rows not specified int rows = params.getInt("rows", -1); String filterType = params.get("filterType"); @@ -404,12 +403,19 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw printer.detail = detail; printer.dump = dump; boolean isGraphView = "graph".equals(params.get("view")); - printer.page = (isGraphView && "/clusterstate.json".equals(path)) - ? new PageOfCollections(start, rows, type, filter) : null; + // There is no znode /clusterstate.json (removed in Solr 9), but we do as if there's one and return collection listing + // Need to change services.js if cleaning up here, collection list is used from Admin UI Cloud - Graph + boolean paginateCollections = (isGraphView && "/clusterstate.json".equals(path)); + printer.page = paginateCollections ? new PageOfCollections(start, rows, type, filter) : null; printer.pagingSupport = pagingSupport; try { - printer.print(path); + if (paginateCollections) { + // List collections and allow pagination, but no specific znode info like when looking at a normal ZK path + printer.printPaginatedCollections(); + } else { + printer.print(path); + } } finally { printer.close(); } @@ -431,7 +437,7 @@ static class ZKPrinter implements ContentStream { String keeperAddr; // the address we're connected to final BAOS baos = new BAOS(); - final Writer out = new OutputStreamWriter(baos, StandardCharsets.UTF_8); + final Writer out = new OutputStreamWriter(baos, StandardCharsets.UTF_8); SolrZkClient zkClient; PageOfCollections page; @@ -452,7 +458,7 @@ public void close() { } } - // main entry point + // main entry point for printing from path void print(String path) throws IOException { if (zkClient == null) { return; @@ -500,6 +506,90 @@ void print(String path) throws IOException { out.write(chars.toString()); } + // main entry point for printing collections + @SuppressWarnings("unchecked") + void printPaginatedCollections() throws IOException { + SortedMap collectionStates; + try { + // support paging of the collections graph view (in case there are many collections) + // fetch the requested page of collections and then retrieve the state for each + pagingSupport.fetchPage(page, zkClient); + // keep track of how many collections match the filter + boolean applyStatusFilter = (page.filterType == FilterType.status && page.filter != null); + List matchesStatusFilter = applyStatusFilter ? new ArrayList() : null; + Set liveNodes = applyStatusFilter ? + zkController.getZkStateReader().getClusterState().getLiveNodes() : null; + + collectionStates = new TreeMap<>(pagingSupport); + for (String collection : page.selected) { + // Get collection state from ZK + String collStatePath = String.format(Locale.ROOT, "/collections/%s/state.json", collection); + String childDataStr = null; + try { + byte[] childData = zkClient.getData(collStatePath, null, null, true); + if (childData != null) + childDataStr = (new BytesRef(childData)).utf8ToString(); + } catch (KeeperException.NoNodeException nne) { + log.warn("State for collection {} not found.", collection); + } catch (Exception childErr) { + log.error("Failed to get {} due to", collStatePath, childErr); + } + + if (childDataStr != null) { + Map extColl = (Map) Utils.fromJSONString(childDataStr); + Object collectionState = extColl.get(collection); + + if (applyStatusFilter) { + // verify this collection matches the filtered state + if (page.matchesStatusFilter((Map) collectionState, liveNodes)) { + matchesStatusFilter.add(collection); + collectionStates.put(collection, collectionState); + } + } else { + collectionStates.put(collection, collectionState); + } + } + } + + if (applyStatusFilter) { + // update the paged navigation info after applying the status filter + page.selectPage(matchesStatusFilter); + + // rebuild the Map of state data + SortedMap map = new TreeMap(pagingSupport); + for (String next : page.selected) + map.put(next, collectionStates.get(next)); + collectionStates = map; + } + } catch (KeeperException | InterruptedException e) { + writeError(500, e.toString()); + return; + } + + CharArr chars = new CharArr(); + JSONWriter json = new JSONWriter(chars, 2); + json.startObject(); + + json.writeString("znode"); + json.writeNameSeparator(); + json.startObject(); + + // For some reason, without this the Json is badly formed + writeKeyValue(json, PATH, "Undefined", true); + + if (collectionStates != null) { + CharArr collectionOut = new CharArr(); + new JSONWriter(collectionOut, 2).write(collectionStates); + writeKeyValue(json, "data", collectionOut.toString(), false); + } + + writeKeyValue(json, "paging", page.getPagingHeader(), false); + + json.endObject(); + json.endObject(); + out.write(chars.toString()); + } + void writeError(int code, String msg) throws IOException { throw new SolrException(ErrorCode.getErrorCode(code), msg); /*response.setStatus(code); @@ -521,7 +611,6 @@ void writeError(int code, String msg) throws IOException { out.write(chars.toString());*/ } - boolean printTree(JSONWriter json, String path) throws IOException { String label = path; if (!fullpath) { @@ -623,7 +712,6 @@ public void writeKeyValue(JSONWriter json, String k, Object v, boolean isFirst) json.write(v); } - @SuppressWarnings("unchecked") boolean printZnode(JSONWriter json, String path) throws IOException { try { String dataStr = null; @@ -638,95 +726,6 @@ boolean printZnode(JSONWriter json, String path) throws IOException { dataStrErr = "data is not parsable as a utf8 String: " + e.toString(); } } - // support paging of the collections graph view (in case there are many collections) - if (page != null) { - // we've already pulled the data for /clusterstate.json from ZooKeeper above, - // but it needs to be parsed into a map so we can lookup collection states before - // trying to find them in the /collections/?/state.json znode - Map clusterstateJsonMap = null; - if (dataStr != null) { - try { - clusterstateJsonMap = (Map) Utils.fromJSONString(dataStr); - } catch (Exception e) { - throw new SolrException(ErrorCode.SERVER_ERROR, - "Failed to parse /clusterstate.json from ZooKeeper due to: " + e, e); - } - } else { - clusterstateJsonMap = Utils.makeMap(); - } - - // fetch the requested page of collections and then retrieve the state for each - page = pagingSupport.fetchPage(page, zkClient); - // keep track of how many collections match the filter - boolean applyStatusFilter = - (page.filterType == FilterType.status && page.filter != null); - List matchesStatusFilter = applyStatusFilter ? new ArrayList() : null; - Set liveNodes = applyStatusFilter ? - zkController.getZkStateReader().getClusterState().getLiveNodes() : null; - - SortedMap collectionStates = new TreeMap(pagingSupport); - for (String collection : page.selected) { - Object collectionState = clusterstateJsonMap.get(collection); - if (collectionState != null) { - // collection state was in /clusterstate.json - if (applyStatusFilter) { - // verify this collection matches the status filter - if (page.matchesStatusFilter((Map) collectionState, liveNodes)) { - matchesStatusFilter.add(collection); - collectionStates.put(collection, collectionState); - } - } else { - collectionStates.put(collection, collectionState); - } - } else { - // looks like an external collection ... - String collStatePath = String.format(Locale.ROOT, "/collections/%s/state.json", collection); - String childDataStr = null; - try { - byte[] childData = zkClient.getData(collStatePath, null, null, true); - if (childData != null) - childDataStr = (new BytesRef(childData)).utf8ToString(); - } catch (KeeperException.NoNodeException nne) { - log.warn("State for collection {} not found in /clusterstate.json or /collections/{}/state.json!" - , collection, collection); - } catch (Exception childErr) { - log.error("Failed to get {} due to", collStatePath, childErr); - } - - if (childDataStr != null) { - Map extColl = (Map) Utils.fromJSONString(childDataStr); - collectionState = extColl.get(collection); - - if (applyStatusFilter) { - // verify this collection matches the filtered state - if (page.matchesStatusFilter((Map) collectionState, liveNodes)) { - matchesStatusFilter.add(collection); - collectionStates.put(collection, collectionState); - } - } else { - collectionStates.put(collection, collectionState); - } - } - } - } - - if (applyStatusFilter) { - // update the paged navigation info after applying the status filter - page.selectPage(matchesStatusFilter); - - // rebuild the Map of state data - SortedMap map = new TreeMap(pagingSupport); - for (String next : page.selected) - map.put(next, collectionStates.get(next)); - collectionStates = map; - } - - if (collectionStates != null) { - CharArr out = new CharArr(); - new JSONWriter(out, 2).write(collectionStates); - dataStr = out.toString(); - } - } json.writeString("znode"); json.writeNameSeparator(); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperStatusHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperStatusHandler.java index dd1833b3a75a..33e324460a89 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperStatusHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperStatusHandler.java @@ -73,8 +73,8 @@ public Category getCategory() { return Category.ADMIN; } - @SuppressWarnings("rawtypes") @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { NamedList values = rsp.getValues(); if (cores.isZooKeeperAware()) { @@ -94,6 +94,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw * @param zkDynamicConfig list of zk dynamic config objects * @return map of zookeeper config and status per zk host */ + @SuppressWarnings({"unchecked"}) protected Map getZkStatus(String zkHost, ZkDynamicConfig zkDynamicConfig) { final ZkDynamicConfig hostsFromConnectionString = ZkDynamicConfig.fromZkConnectString(zkHost); final ZkDynamicConfig zookeepers; @@ -147,7 +148,10 @@ protected Map getZkStatus(String zkHost, ZkDynamicConfig zkDynam followers++; } else if ("leader".equals(state)) { leaders++; - reportedFollowers = Integer.parseInt(String.valueOf(stat.get("zk_followers"))); + reportedFollowers = Math.max( + Integer.parseInt((String) stat.getOrDefault("zk_followers", "0")), + Integer.parseInt((String) stat.getOrDefault("zk_synced_followers", "0")) + ); } else if ("standalone".equals(state)) { standalone++; } diff --git a/solr/core/src/java/org/apache/solr/handler/component/CloudReplicaSource.java b/solr/core/src/java/org/apache/solr/handler/component/CloudReplicaSource.java index 5ff8ec9ce1f2..4d3537cb730a 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/CloudReplicaSource.java +++ b/solr/core/src/java/org/apache/solr/handler/component/CloudReplicaSource.java @@ -59,6 +59,7 @@ private CloudReplicaSource(Builder builder) { } } + @SuppressWarnings({"unchecked", "rawtypes"}) private void withClusterState(Builder builder, SolrParams params) { ClusterState clusterState = builder.zkStateReader.getClusterState(); String shardKeys = params.get(ShardParams._ROUTE_); @@ -98,6 +99,7 @@ private void withClusterState(Builder builder, SolrParams params) { } } + @SuppressWarnings({"unchecked", "rawtypes"}) private void withShardsParam(Builder builder, String shardsParam) { List sliceOrUrls = StrUtils.splitSmart(shardsParam, ",", true); this.slices = new String[sliceOrUrls.size()]; diff --git a/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java b/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java index 7d9649469c8f..31fbf99f3bb6 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java @@ -100,9 +100,11 @@ public void process(ResponseBuilder rb) throws IOException results = rb.getResults().docList; } + @SuppressWarnings({"rawtypes"}) NamedList stdinfo = SolrPluginUtils.doStandardDebug( rb.req, rb.getQueryString(), rb.wrap(rb.getQuery()), results, rb.isDebugQuery(), rb.isDebugResults()); + @SuppressWarnings({"rawtypes"}) NamedList info = rb.getDebugInfo(); if( info == null ) { rb.setDebugInfo( stdinfo ); @@ -225,11 +227,13 @@ public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { private final static Set EXCLUDE_SET = Set.of("explain"); @Override + @SuppressWarnings({"unchecked"}) public void finishStage(ResponseBuilder rb) { if (rb.isDebug() && rb.stage == ResponseBuilder.STAGE_GET_FIELDS) { NamedList info = rb.getDebugInfo(); NamedList explain = new SimpleOrderedMap<>(); + @SuppressWarnings({"rawtypes"}) Map.Entry[] arr = new NamedList.NamedListEntry[rb.resultIds.size()]; // Will be set to true if there is at least one response with PURPOSE_GET_DEBUG boolean hasGetDebugResponses = false; @@ -241,11 +245,14 @@ public void finishStage(ResponseBuilder rb) { // this should only happen when using shards.tolerant=true continue; } + @SuppressWarnings({"rawtypes"}) NamedList sdebug = (NamedList)srsp.getSolrResponse().getResponse().get("debug"); + info = (NamedList)merge(sdebug, info, EXCLUDE_SET); if ((sreq.purpose & ShardRequest.PURPOSE_GET_DEBUG) != 0) { hasGetDebugResponses = true; if (rb.isDebugResults()) { + @SuppressWarnings({"rawtypes"}) NamedList sexplain = (NamedList)sdebug.get("explain"); SolrPluginUtils.copyNamedListIntoArrayByDocPosInResponse(sexplain, rb.resultIds, arr); } @@ -306,6 +313,7 @@ private NamedList getTrackResponse(ShardResponse shardResponse) { return namedList; } + @SuppressWarnings({"unchecked", "rawtypes"}) protected Object merge(Object source, Object dest, Set exclude) { if (source == null) return dest; if (dest == null) { diff --git a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java index 5e7e5ea521a9..0f523bf1b91c 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java @@ -419,6 +419,8 @@ public void process(ResponseBuilder rb) throws IOException { ReturnFields returnFields = rb.rsp.getReturnFields(); LongObjectMap groups = ((GroupCollector) groupExpandCollector).getGroups(); + + @SuppressWarnings({"rawtypes"}) NamedList outMap = new SimpleOrderedMap(); CharsRefBuilder charsRef = new CharsRefBuilder(); for (LongObjectCursor cursor : groups) { @@ -439,13 +441,13 @@ public void process(ResponseBuilder rb) throws IOException { scores[i] = scoreDoc.score; } assert topDocs.totalHits.relation == TotalHits.Relation.EQUAL_TO; - DocSlice slice = new DocSlice(0, docs.length, docs, scores, topDocs.totalHits.value, Float.NaN); + DocSlice slice = new DocSlice(0, docs.length, docs, scores, topDocs.totalHits.value, Float.NaN, TotalHits.Relation.EQUAL_TO); addGroupSliceToOutputMap(fieldType, ordBytes, outMap, charsRef, groupValue, slice); } } else { int totalHits = ((TotalHitCountCollector) cursor.value).getTotalHits(); if (totalHits > 0) { - DocSlice slice = new DocSlice(0, 0, null, null, totalHits, 0); + DocSlice slice = new DocSlice(0, 0, null, null, totalHits, 0, TotalHits.Relation.EQUAL_TO); addGroupSliceToOutputMap(fieldType, ordBytes, outMap, charsRef, groupValue, slice); } } @@ -454,7 +456,10 @@ public void process(ResponseBuilder rb) throws IOException { rb.rsp.add("expanded", outMap); } - private void addGroupSliceToOutputMap(FieldType fieldType, IntObjectHashMap ordBytes, NamedList outMap, CharsRefBuilder charsRef, long groupValue, DocSlice slice) { + + @SuppressWarnings({"unchecked"}) + private void addGroupSliceToOutputMap(FieldType fieldType, IntObjectHashMap ordBytes, + @SuppressWarnings({"rawtypes"})NamedList outMap, CharsRefBuilder charsRef, long groupValue, DocSlice slice) { if(fieldType instanceof StrField) { final BytesRef bytesRef = ordBytes.get((int)groupValue); fieldType.indexedToReadable(bytesRef, charsRef); @@ -484,8 +489,9 @@ public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest } } - @SuppressWarnings("unchecked") + @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { if (!rb.doExpand) { @@ -511,6 +517,7 @@ public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { } } + @SuppressWarnings("rawtypes") @Override public void finishStage(ResponseBuilder rb) { diff --git a/solr/core/src/java/org/apache/solr/handler/component/FieldFacetStats.java b/solr/core/src/java/org/apache/solr/handler/component/FieldFacetStats.java index 2b8373a20204..3d7075b54bd5 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/FieldFacetStats.java +++ b/solr/core/src/java/org/apache/solr/handler/component/FieldFacetStats.java @@ -143,13 +143,13 @@ public boolean accumulateTermNum(int statsTermNum, BytesRef value) throws IOExce facetStatsTerms.add(new HashMap()); } for (Map.Entry pairs : facetStatsTerms.get(statsTermNum).entrySet()) { - String key = (String) pairs.getKey(); + String key = pairs.getKey(); StatsValues facetStats = facetStatsValues.get(key); if (facetStats == null) { facetStats = StatsValuesFactory.createStatsValues(statsField); facetStatsValues.put(key, facetStats); } - Integer count = (Integer) pairs.getValue(); + Integer count = pairs.getValue(); if (count != null) { facetStats.accumulate(value, count); } diff --git a/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java b/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java index f81f78974b1c..62b72d268b2e 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java @@ -168,6 +168,7 @@ public void process(ResponseBuilder rb) throws IOException { // No highlighting if there is no query -- consider q.alt=*:* if( highlightQuery != null ) { + @SuppressWarnings({"rawtypes"}) NamedList sumData = highlighter.doHighlighting( rb.getResults().docList, highlightQuery, @@ -276,22 +277,26 @@ protected String highlightingResponseField() { return "highlighting"; } - protected Object convertHighlights(NamedList hl) { + protected Object convertHighlights(@SuppressWarnings({"rawtypes"})NamedList hl) { return hl; } + @SuppressWarnings({"rawtypes"}) protected Object[] newHighlightsArray(int size) { return new NamedList.NamedListEntry[size]; } protected void addHighlights(Object[] objArr, Object obj, Map resultIds) { + @SuppressWarnings({"unchecked"}) Map.Entry[] arr = (Map.Entry[])objArr; + @SuppressWarnings({"rawtypes"}) NamedList hl = (NamedList)obj; SolrPluginUtils.copyNamedListIntoArrayByDocPosInResponse(hl, resultIds, arr); } protected Object getAllHighlights(Object[] objArr) { - final Map.Entry[] arr = (Map.Entry[])objArr; + @SuppressWarnings({"unchecked"}) + final Map.Entry[] arr = (Map.Entry[])objArr; // remove nulls in case not all docs were able to be retrieved return SolrPluginUtils.removeNulls(arr, new SimpleOrderedMap<>()); } diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java index f23cf163a96a..17252c700e84 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java @@ -17,26 +17,15 @@ package org.apache.solr.handler.component; import java.io.IOException; -import java.net.ConnectException; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; -import java.util.concurrent.Callable; import java.util.concurrent.CompletionService; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; - -import io.opentracing.Span; -import io.opentracing.Tracer; -import io.opentracing.propagation.Format; import org.apache.solr.client.solrj.SolrRequest; -import org.apache.solr.client.solrj.SolrResponse; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.Http2SolrClient; -import org.apache.solr.client.solrj.impl.LBSolrClient; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.routing.ReplicaListTransformer; import org.apache.solr.cloud.CloudDescriptor; @@ -44,17 +33,12 @@ import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.ZkCoreNodeProps; -import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ShardParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.core.CoreDescriptor; import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.request.SolrRequestInfo; -import org.apache.solr.util.tracing.GlobalTracer; -import org.apache.solr.util.tracing.SolrRequestCarrier; -import org.slf4j.MDC; public class HttpShardHandler extends ShardHandler { /** @@ -65,10 +49,9 @@ public class HttpShardHandler extends ShardHandler { */ public static String ONLY_NRT_REPLICAS = "distribOnlyRealtime"; - private HttpShardHandlerFactory httpShardHandlerFactory; + final HttpShardHandlerFactory httpShardHandlerFactory; private CompletionService completionService; private Set> pending; - private Map> shardToURLs; private Http2SolrClient httpClient; public HttpShardHandler(HttpShardHandlerFactory httpShardHandlerFactory, Http2SolrClient httpClient) { @@ -76,134 +59,21 @@ public HttpShardHandler(HttpShardHandlerFactory httpShardHandlerFactory, Http2So this.httpShardHandlerFactory = httpShardHandlerFactory; completionService = httpShardHandlerFactory.newCompletionService(); pending = new HashSet<>(); - - // maps "localhost:8983|localhost:7574" to a shuffled List("http://localhost:8983","http://localhost:7574") - // This is primarily to keep track of what order we should use to query the replicas of a shard - // so that we use the same replica for all phases of a distributed request. - shardToURLs = new HashMap<>(); - } - - - private static class SimpleSolrResponse extends SolrResponse { - - long elapsedTime; - - NamedList nl; - - @Override - public long getElapsedTime() { - return elapsedTime; - } - - @Override - public NamedList getResponse() { - return nl; - } - - @Override - public void setResponse(NamedList rsp) { - nl = rsp; - } - - @Override - public void setElapsedTime(long elapsedTime) { - this.elapsedTime = elapsedTime; - } } - // Not thread safe... don't use in Callable. - // Don't modify the returned URL list. - private List getURLs(String shard) { - List urls = shardToURLs.get(shard); - if (urls == null) { - urls = httpShardHandlerFactory.buildURLList(shard); - shardToURLs.put(shard, urls); - } - return urls; - } - @Override public void submit(final ShardRequest sreq, final String shard, final ModifiableSolrParams params) { - // do this outside of the callable for thread safety reasons - final List urls = getURLs(shard); - final Tracer tracer = GlobalTracer.getTracer(); - final Span span = tracer != null ? tracer.activeSpan() : null; - - Callable task = () -> { - - ShardResponse srsp = new ShardResponse(); - if (sreq.nodeName != null) { - srsp.setNodeName(sreq.nodeName); - } - srsp.setShardRequest(sreq); - srsp.setShard(shard); - SimpleSolrResponse ssr = new SimpleSolrResponse(); - srsp.setSolrResponse(ssr); - long startTime = System.nanoTime(); - - try { - params.remove(CommonParams.WT); // use default (currently javabin) - params.remove(CommonParams.VERSION); - - QueryRequest req = makeQueryRequest(sreq, params, shard); - if (tracer != null && span != null) { - tracer.inject(span.context(), Format.Builtin.HTTP_HEADERS, new SolrRequestCarrier(req)); - } - req.setMethod(SolrRequest.METHOD.POST); - SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo(); - if (requestInfo != null) req.setUserPrincipal(requestInfo.getReq().getUserPrincipal()); - - // no need to set the response parser as binary is the defaultJab - // req.setResponseParser(new BinaryResponseParser()); - - // if there are no shards available for a slice, urls.size()==0 - if (urls.size() == 0) { - // TODO: what's the right error code here? We should use the same thing when - // all of the servers for a shard are down. - throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "no servers hosting shard: " + shard); - } - - if (urls.size() <= 1) { - String url = urls.get(0); - srsp.setShardAddress(url); - ssr.nl = request(url, req); - } else { - LBSolrClient.Rsp rsp = httpShardHandlerFactory.makeLoadBalancedRequest(req, urls); - ssr.nl = rsp.getResponse(); - srsp.setShardAddress(rsp.getServer()); - } - } catch (ConnectException cex) { - srsp.setException(cex); //???? - } catch (Exception th) { - srsp.setException(th); - if (th instanceof SolrException) { - srsp.setResponseCode(((SolrException) th).code()); - } else { - srsp.setResponseCode(-1); - } - } - - ssr.elapsedTime = TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS); - - return transfomResponse(sreq, srsp, shard); - }; - + ShardRequestor shardRequestor = new ShardRequestor(sreq, shard, params, this); try { - if (shard != null) { - MDC.put("ShardRequest.shards", shard); - } - if (urls != null && !urls.isEmpty()) { - MDC.put("ShardRequest.urlList", urls.toString()); - } - pending.add(completionService.submit(task)); + shardRequestor.init(); + pending.add(completionService.submit(shardRequestor)); } finally { - MDC.remove("ShardRequest.shards"); - MDC.remove("ShardRequest.urlList"); + shardRequestor.end(); } } - protected NamedList request(String url, SolrRequest req) throws IOException, SolrServerException { + protected NamedList request(String url, @SuppressWarnings({"rawtypes"})SolrRequest req) throws IOException, SolrServerException { req.setBasePath(url); return httpClient.request(req); } diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java index 1617dcb58482..80bddadfbe22 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java +++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java @@ -166,7 +166,7 @@ public ShardHandler getShardHandler(final HttpClient httpClient) { // a little hack for backward-compatibility when we are moving from apache http client to jetty client return new HttpShardHandler(this, null) { @Override - protected NamedList request(String url, SolrRequest req) throws IOException, SolrServerException { + protected NamedList request(String url, @SuppressWarnings({"rawtypes"})SolrRequest req) throws IOException, SolrServerException { try (SolrClient client = new HttpSolrClient.Builder(url).withHttpClient(httpClient).build()) { return client.request(req); } @@ -212,7 +212,8 @@ private static String checkDefaultReplicaListTransformer(NamedList c, String } } - private void initReplicaListTransformers(NamedList routingConfig) { + @SuppressWarnings({"unchecked"}) + private void initReplicaListTransformers(@SuppressWarnings({"rawtypes"})NamedList routingConfig) { String defaultRouting = null; ReplicaListTransformerFactory stableRltFactory = null; ReplicaListTransformerFactory defaultRltFactory; @@ -253,6 +254,7 @@ private void initReplicaListTransformers(NamedList routingConfig) { @Override public void init(PluginInfo info) { StringBuilder sb = new StringBuilder(); + @SuppressWarnings({"rawtypes"}) NamedList args = info.initArgs; this.scheme = getParameter(args, INIT_URL_SCHEME, null,sb); if(StringUtils.endsWith(this.scheme, "://")) { @@ -330,9 +332,10 @@ public void setSecurityBuilder(HttpClientBuilderPlugin clientBuilderPlugin) { clientBuilderPlugin.setup(defaultClient); } - protected T getParameter(NamedList initArgs, String configKey, T defaultValue, StringBuilder sb) { + protected T getParameter(@SuppressWarnings({"rawtypes"})NamedList initArgs, String configKey, T defaultValue, StringBuilder sb) { T toReturn = defaultValue; if (initArgs != null) { + @SuppressWarnings({"unchecked"}) T temp = (T) initArgs.get(configKey); toReturn = (temp != null) ? temp : defaultValue; } @@ -408,6 +411,7 @@ public List buildURLList(String shard) { protected ReplicaListTransformer getReplicaListTransformer(final SolrQueryRequest req) { final SolrParams params = req.getParams(); final SolrCore core = req.getCore(); // explicit check for null core (temporary?, for tests) + @SuppressWarnings("resource") ZkController zkController = core == null ? null : core.getCoreContainer().getZkController(); if (zkController != null) { return requestReplicaListTransformerGenerator.getReplicaListTransformer( diff --git a/solr/core/src/java/org/apache/solr/handler/component/IterativeMergeStrategy.java b/solr/core/src/java/org/apache/solr/handler/component/IterativeMergeStrategy.java index cd407b17de7b..edc797ee2eee 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/IterativeMergeStrategy.java +++ b/solr/core/src/java/org/apache/solr/handler/component/IterativeMergeStrategy.java @@ -112,6 +112,7 @@ public CallBack call() throws Exception{ } public List> callBack(List responses, QueryRequest req) { + @SuppressWarnings({"unchecked", "rawtypes"}) List> futures = new ArrayList(); for(ShardResponse response : responses) { futures.add(this.executorService.submit(new CallBack(response, req))); diff --git a/solr/core/src/java/org/apache/solr/handler/component/MergeStrategy.java b/solr/core/src/java/org/apache/solr/handler/component/MergeStrategy.java index 503dfb5b9487..d449ac322bc3 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/MergeStrategy.java +++ b/solr/core/src/java/org/apache/solr/handler/component/MergeStrategy.java @@ -66,6 +66,7 @@ public interface MergeStrategy { * */ public int getCost(); + @SuppressWarnings({"rawtypes"}) final Comparator MERGE_COMP = (o1, o2) -> { MergeStrategy m1 = (MergeStrategy) o1; MergeStrategy m2 = (MergeStrategy) o2; diff --git a/solr/core/src/java/org/apache/solr/handler/component/PhrasesIdentificationComponent.java b/solr/core/src/java/org/apache/solr/handler/component/PhrasesIdentificationComponent.java index bac5a4c089af..575a358ae0f7 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/PhrasesIdentificationComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/PhrasesIdentificationComponent.java @@ -217,8 +217,10 @@ public void finishStage(ResponseBuilder rb) { if (null == rsp) continue; final NamedList top = rsp.getResponse(); if (null == top) continue; + @SuppressWarnings({"unchecked"}) final NamedList phrasesWrapper = (NamedList) top.get("phrases"); if (null == phrasesWrapper) continue; + @SuppressWarnings({"unchecked"}) final List> shardPhrases = (List>) phrasesWrapper.get("_all"); if (null == shardPhrases) continue; @@ -660,6 +662,7 @@ public static List> formatShardResponse(final List phr * Populates the phrases with (merged) stats from a remote shard * @see #formatShardResponse */ + @SuppressWarnings({"unchecked"}) public static void populateStats(final List phrases, final List> shardData) { final int numPhrases = phrases.size(); if (shardData.size() != numPhrases) { @@ -800,6 +803,7 @@ public String toString() { + "[" + position_start + ":" + position_end + "]"; } + @SuppressWarnings({"rawtypes"}) public NamedList getDetails() { SimpleOrderedMap out = new SimpleOrderedMap(); out.add("text", subSequence); diff --git a/solr/core/src/java/org/apache/solr/handler/component/PivotFacetFieldValueCollection.java b/solr/core/src/java/org/apache/solr/handler/component/PivotFacetFieldValueCollection.java index d9aeaeaef906..9af3a93d953b 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/PivotFacetFieldValueCollection.java +++ b/solr/core/src/java/org/apache/solr/handler/component/PivotFacetFieldValueCollection.java @@ -342,6 +342,7 @@ public int compare(PivotFacetValue left, PivotFacetValue right) { * A helper method for use in Comparator classes where object properties * are Comparable but may be null. */ + @SuppressWarnings({"unchecked"}) static int compareWithNullLast(final Comparable o1, final Comparable o2) { if (null == o1) { if (null == o2) { diff --git a/solr/core/src/java/org/apache/solr/handler/component/PivotFacetHelper.java b/solr/core/src/java/org/apache/solr/handler/component/PivotFacetHelper.java index 33fe0860c615..77cd6a404286 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/PivotFacetHelper.java +++ b/solr/core/src/java/org/apache/solr/handler/component/PivotFacetHelper.java @@ -92,6 +92,7 @@ public static List decodeRefinementValuePath(String valuePath) { } /** @see PivotListEntry#VALUE */ + @SuppressWarnings({"rawtypes"}) public static Comparable getValue(NamedList pivotList) { return (Comparable) PivotListEntry.VALUE.extract(pivotList); } @@ -107,21 +108,25 @@ public static Integer getCount(NamedList pivotList) { } /** @see PivotListEntry#PIVOT */ + @SuppressWarnings({"unchecked"}) public static List> getPivots(NamedList pivotList) { return (List>) PivotListEntry.PIVOT.extract(pivotList); } /** @see PivotListEntry#STATS */ + @SuppressWarnings({"unchecked"}) public static NamedList>> getStats(NamedList pivotList) { return (NamedList>>) PivotListEntry.STATS.extract(pivotList); } /** @see PivotListEntry#QUERIES */ + @SuppressWarnings({"unchecked"}) public static NamedList getQueryCounts(NamedList pivotList) { return (NamedList) PivotListEntry.QUERIES.extract(pivotList); } /** @see PivotListEntry#RANGES */ + @SuppressWarnings({"unchecked"}) public static SimpleOrderedMap> getRanges(NamedList pivotList) { return (SimpleOrderedMap>) PivotListEntry.RANGES.extract(pivotList); } diff --git a/solr/core/src/java/org/apache/solr/handler/component/PivotFacetProcessor.java b/solr/core/src/java/org/apache/solr/handler/component/PivotFacetProcessor.java index 011d66238375..1069c50d12c1 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/PivotFacetProcessor.java +++ b/solr/core/src/java/org/apache/solr/handler/component/PivotFacetProcessor.java @@ -74,7 +74,7 @@ public SimpleOrderedMap>> process(String[] pivots) throws // rb._statsInfo may be null if stats=false, ie: refine requests // if that's the case, but we need to refine w/stats, then we'll lazy init our // own instance of StatsInfo - StatsInfo statsInfo = rb._statsInfo; + StatsInfo statsInfo = rb._statsInfo; SimpleOrderedMap>> pivotResponse = new SimpleOrderedMap<>(); for (String pivotList : pivots) { @@ -237,7 +237,7 @@ public SimpleOrderedMap>> process(String[] pivots) throws * * @return A list of StatsFields to compute for this pivot, or the empty list if none */ - private static List getTaggedStatsFields(StatsInfo statsInfo, + private static List getTaggedStatsFields(StatsInfo statsInfo, String statsLocalParam) { if (null == statsLocalParam || null == statsInfo) { return Collections.emptyList(); diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java index b03997aff975..853da1c3a387 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java @@ -44,6 +44,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.grouping.GroupDocs; import org.apache.lucene.search.grouping.SearchGroup; import org.apache.lucene.search.grouping.TopGroups; @@ -215,7 +216,7 @@ public void prepare(ResponseBuilder rb) throws IOException rb.setFilters( filters ); } } - } catch (SyntaxError | FuzzyTermsEnum.FuzzyTermsException e) { + } catch (SyntaxError e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } @@ -365,6 +366,7 @@ public void process(ResponseBuilder rb) throws IOException QueryCommand cmd = rb.createQueryCommand(); cmd.setTimeAllowed(timeAllowed); + cmd.setMinExactCount(getMinExactCount(params)); req.getContext().put(SolrIndexSearcher.STATS_SOURCE, statsCache.get(req)); @@ -401,6 +403,14 @@ public void process(ResponseBuilder rb) throws IOException doProcessUngroupedSearch(rb, cmd, result); } + private int getMinExactCount(SolrParams params) { + long minExactCount = params.getLong(CommonParams.MIN_EXACT_COUNT, Integer.MAX_VALUE); + if (minExactCount < 0 || minExactCount > Integer.MAX_VALUE) { + minExactCount = Integer.MAX_VALUE; + } + return (int)minExactCount; + } + protected void doFieldSortValues(ResponseBuilder rb, SolrIndexSearcher searcher) throws IOException { SolrQueryRequest req = rb.req; @@ -640,7 +650,7 @@ public void finishStage(ResponseBuilder rb) { protected static final EndResultTransformer MAIN_END_RESULT_TRANSFORMER = new MainEndResultTransformer(); protected static final EndResultTransformer SIMPLE_END_RESULT_TRANSFORMER = new SimpleEndResultTransformer(); - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) protected void groupedFinishStage(final ResponseBuilder rb) { // To have same response as non-distributed request. GroupingSpecification groupSpec = rb.getGroupingSpec(); @@ -796,6 +806,7 @@ protected boolean addFL(StringBuilder fl, String field, boolean additionalAdded) return true; } + @SuppressWarnings({"unchecked"}) protected void mergeIds(ResponseBuilder rb, ShardRequest sreq) { List mergeStrategies = rb.getMergeStrategies(); if(mergeStrategies != null) { @@ -840,6 +851,7 @@ protected void mergeIds(ResponseBuilder rb, ShardRequest sreq) { } long numFound = 0; + boolean hitCountIsExact = true; Float maxScore=null; boolean thereArePartialResults = false; Boolean segmentTerminatedEarly = null; @@ -871,6 +883,7 @@ protected void mergeIds(ResponseBuilder rb, ShardRequest sreq) { } docs = (SolrDocumentList)srsp.getSolrResponse().getResponse().get("response"); nl.add("numFound", docs.getNumFound()); + nl.add("numFoundExact", docs.getNumFoundExact()); nl.add("maxScore", docs.getMaxScore()); nl.add("shardAddress", srsp.getShardAddress()); } @@ -912,12 +925,18 @@ protected void mergeIds(ResponseBuilder rb, ShardRequest sreq) { maxScore = maxScore==null ? docs.getMaxScore() : Math.max(maxScore, docs.getMaxScore()); } numFound += docs.getNumFound(); + + if (hitCountIsExact && Boolean.FALSE.equals(docs.getNumFoundExact())) { + hitCountIsExact = false; + } + @SuppressWarnings({"rawtypes"}) NamedList sortFieldValues = (NamedList)(srsp.getSolrResponse().getResponse().get("sort_values")); if (sortFieldValues.size()==0 && // we bypass merging this response only if it's partial itself thisResponseIsPartial) { // but not the previous one!! continue; //fsv timeout yields empty sort_vlaues } + @SuppressWarnings({"rawtypes"}) NamedList unmarshalledSortFieldValues = unmarshalSortValues(ss, sortFieldValues, schema); // go through every doc in this response, construct a ShardDoc, and @@ -983,6 +1002,7 @@ protected void mergeIds(ResponseBuilder rb, ShardRequest sreq) { SolrDocumentList responseDocs = new SolrDocumentList(); if (maxScore!=null) responseDocs.setMaxScore(maxScore); responseDocs.setNumFound(numFound); + responseDocs.setNumFoundExact(hitCountIsExact); responseDocs.setStart(ss.getOffset()); // size appropriately for (int i=0; i fieldVals = (List) lastDoc.sortFieldValues.get(sf.getField()); nextCursorMarkValues.add(fieldVals.get(lastDoc.orderInShard)); } @@ -1061,7 +1082,8 @@ protected void populateNextCursorMarkFromMergedShards(ResponseBuilder rb) { rb.setNextCursorMark(nextCursorMark); } - protected NamedList unmarshalSortValues(SortSpec sortSpec, + @SuppressWarnings({"unchecked", "rawtypes"}) + protected NamedList unmarshalSortValues(SortSpec sortSpec, NamedList sortFieldValues, IndexSchema schema) { NamedList unmarshalledSortValsPerField = new NamedList(); @@ -1274,7 +1296,7 @@ private boolean doProcessSearchByIds(ResponseBuilder rb) throws IOException { } DocListAndSet res = new DocListAndSet(); - res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0); + res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0, TotalHits.Relation.EQUAL_TO); if (rb.isNeedDocSet()) { // TODO: create a cache for this! List queries = new ArrayList<>(); @@ -1486,7 +1508,12 @@ private void doProcessUngroupedSearch(ResponseBuilder rb, QueryCommand cmd, Quer SolrQueryResponse rsp = rb.rsp; SolrIndexSearcher searcher = req.getSearcher(); - searcher.search(result, cmd); + + try { + searcher.search(result, cmd); + } catch (FuzzyTermsEnum.FuzzyTermsException e) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); + } rb.setResult(result); ResultContext ctx = new BasicResultContext(rb); diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java index 3697bbd1acf3..0292653a1b7f 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java @@ -150,7 +150,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore private final Map elevationProviderCache = new WeakHashMap<>(); @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { this.initArgs = args.toSolrParams(); } @@ -633,7 +633,9 @@ private void addDebugInfo(ResponseBuilder rb, Elevation elevation) { * @param context the {@link SolrQueryRequest#getContext()} or null if none. We'll cache our results here. */ //TODO consider simplifying to remove "boosted" arg which can be looked up in context via BOOSTED key? - public static IntIntHashMap getBoostDocs(SolrIndexSearcher indexSearcher, Set boosted, Map context) throws IOException { + @SuppressWarnings({"unchecked"}) + public static IntIntHashMap getBoostDocs(SolrIndexSearcher indexSearcher, Set boosted, + @SuppressWarnings({"rawtypes"})Map context) throws IOException { IntIntHashMap boostDocs = null; diff --git a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java index d81bc9edb3ed..9dad2f611fab 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java +++ b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java @@ -98,6 +98,7 @@ public void getFacetRangeCounts(RangeFacetRequest rangeFacetRequest, NamedList> NamedList getFacetRangeCounts(final RangeFacetRequest rfr) throws IOException, SyntaxError { diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java index 3c4d472549b1..093c4199015e 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java @@ -246,6 +246,7 @@ public void process(ResponseBuilder rb) throws IOException Object o = ulog.lookup(idBytes.get()); if (o != null) { // should currently be a List + @SuppressWarnings({"rawtypes"}) List entry = (List)o; assert entry.size() >= 3; int oper = (Integer)entry.get(UpdateLog.FLAGS_IDX) & UpdateLog.OPERATION_MASK; @@ -409,7 +410,9 @@ public SolrIndexSearcher getSearcher() { * after the resolving began) */ private static SolrDocument resolveFullDocument(SolrCore core, BytesRef idBytes, - ReturnFields returnFields, SolrInputDocument partialDoc, List logEntry, Set onlyTheseFields) throws IOException { + ReturnFields returnFields, SolrInputDocument partialDoc, + @SuppressWarnings({"rawtypes"}) List logEntry, + Set onlyTheseFields) throws IOException { if (idBytes == null || (logEntry.size() != 5 && logEntry.size() != 6)) { throw new SolrException(ErrorCode.INVALID_STATE, "Either Id field not present in partial document or log entry doesn't have previous version."); } @@ -547,6 +550,7 @@ private static SolrDocument mergePartialDocWithFullDocFromIndex(SolrCore core, B * was an in-place update. In that case, should this partial document be resolved to a full document (by following * back prevPointer/prevVersion)? */ + @SuppressWarnings({"fallthrough"}) public static SolrInputDocument getInputDocumentFromTlog(SolrCore core, BytesRef idBytes, AtomicLong versionReturned, Set onlyTheseNonStoredDVs, boolean resolveFullDocument) { @@ -556,6 +560,7 @@ public static SolrInputDocument getInputDocumentFromTlog(SolrCore core, BytesRef Object o = ulog.lookup(idBytes); if (o != null) { // should currently be a List + @SuppressWarnings({"rawtypes"}) List entry = (List)o; assert entry.size() >= 3; int oper = (Integer)entry.get(0) & UpdateLog.OPERATION_MASK; @@ -693,7 +698,8 @@ public static SolrInputDocument getInputDocument(SolrCore core, BytesRef idBytes return sid; } - private static void decorateDocValueFields(SolrDocumentFetcher docFetcher, SolrDocumentBase doc, int docid, Set onlyTheseNonStoredDVs, boolean resolveNestedFields) throws IOException { + private static void decorateDocValueFields(SolrDocumentFetcher docFetcher, + @SuppressWarnings({"rawtypes"})SolrDocumentBase doc, int docid, Set onlyTheseNonStoredDVs, boolean resolveNestedFields) throws IOException { if (onlyTheseNonStoredDVs != null) { docFetcher.decorateDocValueFields(doc, docid, onlyTheseNonStoredDVs); } else { @@ -1004,6 +1010,7 @@ private void mergeResponses(ResponseBuilder rb) { // can get more than one response for (ShardResponse srsp : sreq.responses) { SolrResponse sr = srsp.getSolrResponse(); + @SuppressWarnings({"rawtypes"}) NamedList nl = sr.getResponse(); SolrDocumentList subList = (SolrDocumentList)nl.get("response"); docList.addAll(subList); @@ -1116,9 +1123,12 @@ public void processGetVersions(ResponseBuilder rb) throws IOException } public void processSyncWithLeader(ResponseBuilder rb, int nVersions, String syncWithLeader, List versions) { - PeerSyncWithLeader peerSync = new PeerSyncWithLeader(rb.req.getCore(), syncWithLeader, nVersions); - boolean success = peerSync.sync(versions).isSuccess(); - rb.rsp.add("syncWithLeader", success); + try (PeerSyncWithLeader peerSync = new PeerSyncWithLeader(rb.req.getCore(), syncWithLeader, nVersions)) { + boolean success = peerSync.sync(versions).isSuccess(); + rb.rsp.add("syncWithLeader", success); + } catch (IOException e) { + log.error("Error while closing", e); + } } @@ -1137,12 +1147,13 @@ public void processSync(ResponseBuilder rb, int nVersions, String sync) { List replicas = StrUtils.splitSmart(sync, ",", true); boolean cantReachIsSuccess = rb.req.getParams().getBool("cantReachIsSuccess", false); - - PeerSync peerSync = new PeerSync(rb.req.getCore(), replicas, nVersions, cantReachIsSuccess); - boolean success = peerSync.sync().isSuccess(); - - // TODO: more complex response? - rb.rsp.add("sync", success); + try (PeerSync peerSync = new PeerSync(rb.req.getCore(), replicas, nVersions, cantReachIsSuccess)) { + boolean success = peerSync.sync().isSuccess(); + // TODO: more complex response? + rb.rsp.add("sync", success); + } catch (IOException e) { + log.error("Error while closing", e); + } } diff --git a/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java b/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java index 40af722c8a88..cf4719817f9b 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java +++ b/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java @@ -24,6 +24,7 @@ import java.util.Set; import org.apache.lucene.search.Query; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.grouping.SearchGroup; import org.apache.lucene.search.grouping.TopGroups; import org.apache.lucene.util.BytesRef; @@ -214,6 +215,7 @@ public void addDebug(Object val, String... path) { NamedList target = debugInfo; for (int i=0; i newTarget = (NamedList)debugInfo.get(elem); if (newTarget == null) { newTarget = new SimpleOrderedMap<>(); @@ -249,7 +251,7 @@ public void setDebug(boolean dbg){ public void addMergeStrategy(MergeStrategy mergeStrategy) { if(mergeStrategies == null) { - mergeStrategies = new ArrayList(); + mergeStrategies = new ArrayList<>(); } mergeStrategies.add(mergeStrategy); @@ -450,7 +452,7 @@ public void setResult(QueryResult result) { rsp.getResponseHeader().asShallowMap() .put(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY, Boolean.TRUE); if(getResults() != null && getResults().docList==null) { - getResults().docList = new DocSlice(0, 0, new int[] {}, new float[] {}, 0, 0); + getResults().docList = new DocSlice(0, 0, new int[] {}, new float[] {}, 0, 0, TotalHits.Relation.EQUAL_TO); } } final Boolean segmentTerminatedEarly = result.getSegmentTerminatedEarly(); diff --git a/solr/core/src/java/org/apache/solr/handler/component/SearchComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SearchComponent.java index 5a524a9c14a4..fb27f686af09 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/SearchComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/SearchComponent.java @@ -92,7 +92,7 @@ public void setName(String name) { //////////////////////// NamedListInitializedPlugin methods ////////////////////// @Override - public void init( NamedList args ) + public void init( @SuppressWarnings({"rawtypes"})NamedList args ) { // By default do nothing } diff --git a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java index 16e39fba3487..2051b2044516 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java @@ -26,6 +26,7 @@ import java.util.Set; import org.apache.lucene.index.ExitableDirectoryReader; +import org.apache.lucene.search.TotalHits; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.cloud.ZkController; import org.apache.solr.common.SolrDocumentList; @@ -169,6 +170,7 @@ public PackageLoader.Package.Version getPackageVersion() { } + @SuppressWarnings({"unchecked"}) private void initComponents() { Object declaredComponents = initArgs.get(INIT_COMPONENTS); List first = (List) initArgs.get(INIT_FIRST_COMPONENTS); @@ -278,6 +280,7 @@ protected ResponseBuilder newResponseBuilder(SolrQueryRequest req, SolrQueryResp } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { List components = getComponents(); @@ -484,6 +487,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw } else { nl.add("numFound", rb.getResults().docList.matches()); + nl.add("numFoundExact", rb.getResults().docList.hitCountRelation() == TotalHits.Relation.EQUAL_TO); nl.add("maxScore", rb.getResults().docList.maxScore()); } nl.add("shardAddress", rb.shortCircuitedURL); diff --git a/solr/core/src/java/org/apache/solr/handler/component/ShardDoc.java b/solr/core/src/java/org/apache/solr/handler/component/ShardDoc.java index 2935aa1a4b8a..baca0fc66656 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/ShardDoc.java +++ b/solr/core/src/java/org/apache/solr/handler/component/ShardDoc.java @@ -32,6 +32,7 @@ public class ShardDoc extends FieldDoc { // this is currently the uniqueKeyField but // may be replaced with internal docid in a future release. + @SuppressWarnings({"rawtypes"}) public NamedList sortFieldValues; // sort field values for *all* docs in a particular shard. // this doc's values are in position orderInShard diff --git a/solr/core/src/java/org/apache/solr/handler/component/ShardFieldSortedHitQueue.java b/solr/core/src/java/org/apache/solr/handler/component/ShardFieldSortedHitQueue.java index ef0e6240ece3..e5598a3d3509 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/ShardFieldSortedHitQueue.java +++ b/solr/core/src/java/org/apache/solr/handler/component/ShardFieldSortedHitQueue.java @@ -41,10 +41,10 @@ public class ShardFieldSortedHitQueue extends PriorityQueue { /** The order of these fieldNames should correspond to the order of sort field values retrieved from the shard */ protected List fieldNames = new ArrayList<>(); + @SuppressWarnings({"unchecked", "rawtypes"}) public ShardFieldSortedHitQueue(SortField[] fields, int size, IndexSearcher searcher) { super(size); final int n = fields.length; - //noinspection unchecked comparators = new Comparator[n]; this.fields = new SortField[n]; for (int i = 0; i < n; ++i) { @@ -143,12 +143,14 @@ public ShardComparator(SortField sortField) { Object sortVal(ShardDoc shardDoc) { assert(shardDoc.sortFieldValues.getName(fieldNum).equals(fieldName)); + @SuppressWarnings({"rawtypes"}) List lst = (List)shardDoc.sortFieldValues.getVal(fieldNum); return lst.get(shardDoc.orderInShard); } } Comparator comparatorFieldComparator(SortField sortField) { + @SuppressWarnings({"rawtypes"}) final FieldComparator fieldComparator = sortField.getComparator(0, 0); return new ShardComparator(sortField) { // Since the PriorityQueue keeps the biggest elements by default, @@ -156,6 +158,7 @@ Comparator comparatorFieldComparator(SortField sortField) { // smallest elements are kept instead of the largest... hence // the negative sign. @Override + @SuppressWarnings({"unchecked"}) public int compare(final ShardDoc o1, final ShardDoc o2) { //noinspection unchecked return -fieldComparator.compareValues(sortVal(o1), sortVal(o2)); diff --git a/solr/core/src/java/org/apache/solr/handler/component/ShardRequestor.java b/solr/core/src/java/org/apache/solr/handler/component/ShardRequestor.java new file mode 100644 index 000000000000..c87f126b5c07 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/handler/component/ShardRequestor.java @@ -0,0 +1,178 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.handler.component; + +import io.opentracing.Span; +import io.opentracing.Tracer; +import io.opentracing.propagation.Format; +import java.net.ConnectException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; +import org.apache.solr.client.solrj.SolrRequest; +import org.apache.solr.client.solrj.SolrResponse; +import org.apache.solr.client.solrj.impl.LBSolrClient; +import org.apache.solr.client.solrj.request.QueryRequest; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.CommonParams; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.request.SolrRequestInfo; +import org.apache.solr.util.tracing.GlobalTracer; +import org.apache.solr.util.tracing.SolrRequestCarrier; +import org.slf4j.MDC; + +class ShardRequestor implements Callable { + private final ShardRequest sreq; + private final String shard; + private final ModifiableSolrParams params; + private final Tracer tracer; + private final Span span; + private final List urls; + private final HttpShardHandler httpShardHandler; + + // maps "localhost:8983|localhost:7574" to a shuffled List("http://localhost:8983","http://localhost:7574") + // This is primarily to keep track of what order we should use to query the replicas of a shard + // so that we use the same replica for all phases of a distributed request. + private Map> shardToURLs = new HashMap<>(); + + public ShardRequestor(ShardRequest sreq, String shard, ModifiableSolrParams params, HttpShardHandler httpShardHandler) { + this.sreq = sreq; + this.shard = shard; + this.params = params; + this.httpShardHandler = httpShardHandler; + // do this before call() for thread safety reasons + this.urls = getURLs(shard); + tracer = GlobalTracer.getTracer(); + span = tracer != null ? tracer.activeSpan() : null; + } + + + // Not thread safe... don't use in Callable. + // Don't modify the returned URL list. + private List getURLs(String shard) { + List urls = shardToURLs.get(shard); + if (urls == null) { + urls = httpShardHandler.httpShardHandlerFactory.buildURLList(shard); + shardToURLs.put(shard, urls); + } + return urls; + } + + void init() { + if (shard != null) { + MDC.put("ShardRequest.shards", shard); + } + if (urls != null && !urls.isEmpty()) { + MDC.put("ShardRequest.urlList", urls.toString()); + } + } + + void end() { + MDC.remove("ShardRequest.shards"); + MDC.remove("ShardRequest.urlList"); + } + + @Override + public ShardResponse call() throws Exception { + + ShardResponse srsp = new ShardResponse(); + if (sreq.nodeName != null) { + srsp.setNodeName(sreq.nodeName); + } + srsp.setShardRequest(sreq); + srsp.setShard(shard); + SimpleSolrResponse ssr = new SimpleSolrResponse(); + srsp.setSolrResponse(ssr); + long startTime = System.nanoTime(); + + try { + params.remove(CommonParams.WT); // use default (currently javabin) + params.remove(CommonParams.VERSION); + + QueryRequest req = httpShardHandler.makeQueryRequest(sreq, params, shard); + if (tracer != null && span != null) { + tracer.inject(span.context(), Format.Builtin.HTTP_HEADERS, new SolrRequestCarrier(req)); + } + req.setMethod(SolrRequest.METHOD.POST); + SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo(); + if (requestInfo != null) req.setUserPrincipal(requestInfo.getReq().getUserPrincipal()); + + // no need to set the response parser as binary is the defaultJab + // req.setResponseParser(new BinaryResponseParser()); + + // if there are no shards available for a slice, urls.size()==0 + if (urls.size() == 0) { + // TODO: what's the right error code here? We should use the same thing when + // all of the servers for a shard are down. + throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "no servers hosting shard: " + shard); + } + + if (urls.size() <= 1) { + String url = urls.get(0); + srsp.setShardAddress(url); + ssr.nl = httpShardHandler.request(url, req); + } else { + LBSolrClient.Rsp rsp = httpShardHandler.httpShardHandlerFactory.makeLoadBalancedRequest(req, urls); + ssr.nl = rsp.getResponse(); + srsp.setShardAddress(rsp.getServer()); + } + } catch (ConnectException cex) { + srsp.setException(cex); //???? + } catch (Exception th) { + srsp.setException(th); + if (th instanceof SolrException) { + srsp.setResponseCode(((SolrException) th).code()); + } else { + srsp.setResponseCode(-1); + } + } + + ssr.elapsedTime = TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS); + + return httpShardHandler.transfomResponse(sreq, srsp, shard); + } + + static class SimpleSolrResponse extends SolrResponse { + + long elapsedTime; + + NamedList nl; + + @Override + public long getElapsedTime() { + return elapsedTime; + } + + @Override + public NamedList getResponse() { + return nl; + } + + @Override + public void setResponse(NamedList rsp) { + nl = rsp; + } + + @Override + public void setElapsedTime(long elapsedTime) { + this.elapsedTime = elapsedTime; + } + } +} diff --git a/solr/core/src/java/org/apache/solr/handler/component/SortedDateStatsValues.java b/solr/core/src/java/org/apache/solr/handler/component/SortedDateStatsValues.java index 9e6076f182c0..33bea213fadc 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/SortedDateStatsValues.java +++ b/solr/core/src/java/org/apache/solr/handler/component/SortedDateStatsValues.java @@ -29,18 +29,18 @@ public class SortedDateStatsValues implements StatsValues { - private final DateStatsValues dsv; + private final StatsValuesFactory.DateStatsValues dsv; private final String fieldName; private SortedNumericDocValues sndv; - public SortedDateStatsValues(DateStatsValues dsv, StatsField field) { + public SortedDateStatsValues(StatsValuesFactory.DateStatsValues dsv, StatsField field) { this.dsv = dsv; this.fieldName = field.getSchemaField().getName(); } @Override - public void accumulate(NamedList stv) { + public void accumulate(@SuppressWarnings({"rawtypes"})NamedList stv) { dsv.accumulate(stv); } diff --git a/solr/core/src/java/org/apache/solr/handler/component/SortedNumericStatsValues.java b/solr/core/src/java/org/apache/solr/handler/component/SortedNumericStatsValues.java index 91b313d40e85..dcddf779157b 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/SortedNumericStatsValues.java +++ b/solr/core/src/java/org/apache/solr/handler/component/SortedNumericStatsValues.java @@ -29,20 +29,20 @@ public class SortedNumericStatsValues implements StatsValues { - private final NumericStatsValues nsv; + private final StatsValuesFactory.NumericStatsValues nsv; private final String fieldName; private final NumberType numberType; private SortedNumericDocValues sndv; - public SortedNumericStatsValues(NumericStatsValues nsv, StatsField field) { + public SortedNumericStatsValues(StatsValuesFactory.NumericStatsValues nsv, StatsField field) { this.nsv = nsv; this.fieldName = field.getSchemaField().getName(); this.numberType = field.getSchemaField().getType().getNumberType(); } @Override - public void accumulate(NamedList stv) { + public void accumulate(@SuppressWarnings({"rawtypes"})NamedList stv) { nsv.accumulate(stv); } diff --git a/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java b/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java index 8814953bc152..f11f0d0d447e 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java +++ b/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java @@ -45,6 +45,7 @@ public class SpatialHeatmapFacets { public static final String RESPONSE_KEY = "facet_heatmaps"; /** Called by {@link org.apache.solr.request.SimpleFacets} to compute heatmap facets. */ + @SuppressWarnings({"unchecked", "rawtypes"}) public static NamedList getHeatmapForField(String fieldKey, String fieldName, ResponseBuilder rb, SolrParams params, DocSet docSet) throws IOException { final FacetRequest facetRequest = createHeatmapRequest(fieldKey, fieldName, rb, params); return (NamedList) facetRequest.process(rb.req, docSet); @@ -106,7 +107,7 @@ public static void distribModifyRequest(ShardRequest sreq, LinkedHashMap heatmapFacets, NamedList srsp_facet_counts) { + public static void distribHandleResponse(LinkedHashMap heatmapFacets, @SuppressWarnings({"rawtypes"})NamedList srsp_facet_counts) { NamedList> facet_heatmaps = (NamedList>) srsp_facet_counts.get(RESPONSE_KEY); if (facet_heatmaps == null) { return; @@ -127,6 +128,7 @@ public static void distribHandleResponse(LinkedHashMap hea /** Called by FacetComponent's impl of * {@link org.apache.solr.handler.component.SearchComponent#finishStage(ResponseBuilder)}. */ + @SuppressWarnings({"unchecked", "rawtypes"}) public static NamedList distribFinish(LinkedHashMap heatmapInfos, ResponseBuilder rb) { NamedList> result = new SimpleOrderedMap<>(); for (Map.Entry entry : heatmapInfos.entrySet()) { diff --git a/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java index ad8bfaf3dd00..ae5b43900e90 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java @@ -97,7 +97,7 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar */ public static final String COMPONENT_NAME = "spellcheck"; - @SuppressWarnings("unchecked") + @SuppressWarnings({"rawtypes"}) protected NamedList initParams; @@ -109,8 +109,7 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar protected QueryConverter queryConverter; @Override - @SuppressWarnings("unchecked") - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); this.initParams = args; } @@ -198,7 +197,9 @@ public void process(ResponseBuilder rb) throws IOException { } boolean isCorrectlySpelled = hits > (maxResultsForSuggest==null ? 0 : maxResultsForSuggest); + @SuppressWarnings({"rawtypes"}) NamedList response = new SimpleOrderedMap(); + @SuppressWarnings({"rawtypes"}) NamedList suggestions = toNamedList(shardRequest, spellingResult, q, extendedResults); response.add("suggestions", suggestions); @@ -276,7 +277,7 @@ private Integer maxResultsForSuggest(ResponseBuilder rb) { @SuppressWarnings("unchecked") protected void addCollationsToResponse(SolrParams params, SpellingResult spellingResult, ResponseBuilder rb, String q, - NamedList response, boolean suggestionsMayOverlap) { + @SuppressWarnings({"rawtypes"})NamedList response, boolean suggestionsMayOverlap) { int maxCollations = params.getInt(SPELLCHECK_MAX_COLLATIONS, 1); int maxCollationTries = params.getInt(SPELLCHECK_MAX_COLLATION_TRIES, 0); int maxCollationEvaluations = params.getInt(SPELLCHECK_MAX_COLLATION_EVALUATIONS, 10000); @@ -300,9 +301,11 @@ protected void addCollationsToResponse(SolrParams params, SpellingResult spellin //even in cases when the internal rank is the same. Collections.sort(collations); + @SuppressWarnings({"rawtypes"}) NamedList collationList = new NamedList(); for (SpellCheckCollation collation : collations) { if (collationExtendedResults) { + @SuppressWarnings({"rawtypes"}) NamedList extendedResult = new SimpleOrderedMap(); extendedResult.add("collationQuery", collation.getCollationQuery()); extendedResult.add("hits", collation.getHits()); @@ -322,7 +325,8 @@ protected void addCollationsToResponse(SolrParams params, SpellingResult spellin response.add("collations", collationList); } - private void addOriginalTermsToResponse(NamedList response, Collection originalTerms) { + @SuppressWarnings({"unchecked"}) + private void addOriginalTermsToResponse(@SuppressWarnings({"rawtypes"})NamedList response, Collection originalTerms) { List originalTermStr = new ArrayList(); for(Token t : originalTerms) { originalTermStr.add(t.toString()); @@ -397,6 +401,7 @@ public void finishStage(ResponseBuilder rb) { if (maxResultsForSuggest==null || !isCorrectlySpelled) { for (ShardRequest sreq : rb.finished) { for (ShardResponse srsp : sreq.responses) { + @SuppressWarnings({"rawtypes"}) NamedList nl = null; try { nl = (NamedList) srsp.getSolrResponse().getResponse().get("spellcheck"); @@ -424,8 +429,10 @@ public void finishStage(ResponseBuilder rb) { SolrSpellChecker checker = getSpellChecker(rb.req.getParams()); SpellingResult result = checker.mergeSuggestions(mergeData, numSug, count, extendedResults); + @SuppressWarnings({"rawtypes"}) NamedList response = new SimpleOrderedMap(); + @SuppressWarnings({"rawtypes"}) NamedList suggestions = toNamedList(false, result, origQuery, extendedResults); response.add("suggestions", suggestions); @@ -438,12 +445,14 @@ public void finishStage(ResponseBuilder rb) { .toArray(new SpellCheckCollation[mergeData.collations.size()]); Arrays.sort(sortedCollations); + @SuppressWarnings({"rawtypes"}) NamedList collations = new NamedList(); int i = 0; while (i < maxCollations && i < sortedCollations.length) { SpellCheckCollation collation = sortedCollations[i]; i++; if (collationExtendedResults) { + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap extendedResult = new SimpleOrderedMap(); extendedResult.add("collationQuery", collation.getCollationQuery()); extendedResult.add("hits", collation.getHits()); @@ -462,7 +471,7 @@ public void finishStage(ResponseBuilder rb) { } @SuppressWarnings("unchecked") - private void collectShardSuggestions(NamedList nl, SpellCheckMergeData mergeData) { + private void collectShardSuggestions(@SuppressWarnings({"rawtypes"})NamedList nl, SpellCheckMergeData mergeData) { SpellCheckResponse spellCheckResp = new SpellCheckResponse(nl); Iterable originalTermStrings = (Iterable) nl.get("originalTerms"); if(originalTermStrings!=null) { @@ -515,8 +524,9 @@ private void collectShardSuggestions(NamedList nl, SpellCheckMergeData mergeData } @SuppressWarnings("unchecked") - private void collectShardCollations(SpellCheckMergeData mergeData, NamedList spellCheckResponse, int maxCollationTries) { + private void collectShardCollations(SpellCheckMergeData mergeData, @SuppressWarnings({"rawtypes"})NamedList spellCheckResponse, int maxCollationTries) { Map collations = mergeData.collations; + @SuppressWarnings({"rawtypes"}) NamedList collationHolder = (NamedList) spellCheckResponse.get("collations"); if(collationHolder != null) { List collationList = collationHolder.getAll("collation"); @@ -542,6 +552,7 @@ private void collectShardCollations(SpellCheckMergeData mergeData, NamedList spe collations.put(coll.getCollationQuery(), coll); } else { + @SuppressWarnings({"rawtypes"}) NamedList expandedCollation = (NamedList) o; SpellCheckCollation coll = new SpellCheckCollation(); coll.setCollationQuery((String) expandedCollation.get("collationQuery")); @@ -636,6 +647,7 @@ public SolrSpellChecker getSpellChecker(String name) { return spellCheckers.get(name); } + @SuppressWarnings({"unchecked", "rawtypes"}) protected NamedList toNamedList(boolean shardRequest, SpellingResult spellingResult, String origQuery, boolean extendedResults) { NamedList result = new NamedList(); @@ -704,6 +716,7 @@ protected NamedList toNamedList(boolean shardRequest, } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void inform(SolrCore core) { if (initParams != null) { log.info("Initializing spell checkers"); @@ -748,7 +761,7 @@ public void inform(SolrCore core) { } } - private boolean addSpellChecker(SolrCore core, boolean hasDefault, NamedList spellchecker) { + @SuppressWarnings({"rawtypes"})private boolean addSpellChecker(SolrCore core, boolean hasDefault, @SuppressWarnings({"rawtypes"})NamedList spellchecker) { String className = (String) spellchecker.get("classname"); if (className == null) className = (String) spellchecker.get("class"); // TODO: this is a little bit sneaky: warn if class isnt supplied @@ -805,7 +818,7 @@ public SpellCheckerListener(SolrCore core, SolrSpellChecker checker, boolean bui } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { } @Override diff --git a/solr/core/src/java/org/apache/solr/handler/component/StandaloneReplicaSource.java b/solr/core/src/java/org/apache/solr/handler/component/StandaloneReplicaSource.java index 33a515354abc..91090a526a8f 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/StandaloneReplicaSource.java +++ b/solr/core/src/java/org/apache/solr/handler/component/StandaloneReplicaSource.java @@ -28,6 +28,7 @@ class StandaloneReplicaSource implements ReplicaSource { private List[] replicas; + @SuppressWarnings({"unchecked", "rawtypes"}) public StandaloneReplicaSource(Builder builder) { List list = StrUtils.splitSmart(builder.shardsParam, ",", true); replicas = new List[list.size()]; diff --git a/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java b/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java index fc5c29f829af..4ba2069c715e 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java @@ -17,15 +17,11 @@ package org.apache.solr.handler.component; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; import java.util.LinkedHashMap; -import java.util.List; import java.util.Map; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.ShardParams; -import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.params.StatsParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; @@ -41,13 +37,13 @@ public class StatsComponent extends SearchComponent { @Override public void prepare(ResponseBuilder rb) throws IOException { - if (rb.req.getParams().getBool(StatsParams.STATS,false)) { - rb.setNeedDocSet( true ); + if (rb.req.getParams().getBool(StatsParams.STATS, false)) { + rb.setNeedDocSet(true); rb.doStats = true; rb._statsInfo = new StatsInfo(rb); for (StatsField statsField : rb._statsInfo.getStatsFields()) { if (statsField.getSchemaField() != null && statsField.getSchemaField().getType().isPointField() && !statsField.getSchemaField().hasDocValues()) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Can't calculate stats on a PointField without docValues"); } } @@ -63,8 +59,8 @@ public void process(ResponseBuilder rb) throws IOException { DocSet docs = statsField.computeBaseDocSet(); statsValues.put(statsField.getOutputKey(), statsField.computeLocalStatsValues(docs)); } - - rb.rsp.add( "stats", convertToResponse(statsValues) ); + + rb.rsp.add("stats", convertToResponse(statsValues)); } @Override @@ -88,6 +84,7 @@ public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { if (!rb.doStats || (sreq.purpose & ShardRequest.PURPOSE_GET_STATS) == 0) return; @@ -96,8 +93,8 @@ public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { for (ShardResponse srsp : sreq.responses) { NamedList stats = null; try { - stats = (NamedList>>) - srsp.getSolrResponse().getResponse().get("stats"); + stats = (NamedList>>) + srsp.getSolrResponse().getResponse().get("stats"); } catch (Exception e) { if (ShardParams.getShardsTolerantAsBool(rb.req.getParams())) { continue; // looks like a shard did not return anything @@ -111,6 +108,7 @@ public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { for (int i = 0; i < stats_fields.size(); i++) { String key = stats_fields.getName(i); StatsValues stv = allStatsValues.get(key); + @SuppressWarnings({"rawtypes"}) NamedList shardStv = (NamedList) stats_fields.get(key); stv.accumulate(shardStv); } @@ -141,18 +139,19 @@ public static NamedList> unwrapStats(NamedList>> convertToResponse - (Map statsValues) { + (Map statsValues) { NamedList>> stats = new SimpleOrderedMap<>(); NamedList> stats_fields = new SimpleOrderedMap<>(); stats.add("stats_fields", stats_fields); - - for (Map.Entry entry : statsValues.entrySet()) { + + for (Map.Entry entry : statsValues.entrySet()) { String key = entry.getKey(); + @SuppressWarnings({"rawtypes"}) NamedList stv = entry.getValue().getStatsValues(); stats_fields.add(key, stv); } @@ -169,87 +168,3 @@ public String getDescription() { } } -/** - * Models all of the information about stats needed for a single request - * @see StatsField - */ -class StatsInfo { - - private final ResponseBuilder rb; - private final List statsFields = new ArrayList<>(7); - private final Map distribStatsValues = new LinkedHashMap<>(); - private final Map statsFieldMap = new LinkedHashMap<>(); - private final Map> tagToStatsFields = new LinkedHashMap<>(); - - public StatsInfo(ResponseBuilder rb) { - this.rb = rb; - SolrParams params = rb.req.getParams(); - String[] statsParams = params.getParams(StatsParams.STATS_FIELD); - if (null == statsParams) { - // no stats.field params, nothing to parse. - return; - } - - for (String paramValue : statsParams) { - StatsField current = new StatsField(rb, paramValue); - statsFields.add(current); - for (String tag : current.getTagList()) { - List fieldList = tagToStatsFields.get(tag); - if (fieldList == null) { - fieldList = new ArrayList<>(); - } - fieldList.add(current); - tagToStatsFields.put(tag, fieldList); - } - statsFieldMap.put(current.getOutputKey(), current); - distribStatsValues.put(current.getOutputKey(), - StatsValuesFactory.createStatsValues(current)); - } - } - - /** - * Returns an immutable list of {@link StatsField} instances - * modeling each of the {@link StatsParams#STATS_FIELD} params specified - * as part of this request - */ - public List getStatsFields() { - return Collections.unmodifiableList(statsFields); - } - - /** - * Returns the {@link StatsField} associated with the specified (effective) - * outputKey, or null if there was no {@link StatsParams#STATS_FIELD} param - * that would corrispond with that key. - */ - public StatsField getStatsField(String outputKey) { - return statsFieldMap.get(outputKey); - } - - /** - * Return immutable list of {@link StatsField} instances by string tag local parameter. - * - * @param tag tag local parameter - * @return list of stats fields - */ - public List getStatsFieldsByTag(String tag) { - List raw = tagToStatsFields.get(tag); - if (null == raw) { - return Collections.emptyList(); - } else { - return Collections.unmodifiableList(raw); - } - } - - /** - * Returns an immutable map of response key => {@link StatsValues} - * instances for the current distributed request. - * Depending on where we are in the process of handling this request, - * these {@link StatsValues} instances may not be complete -- but they - * will never be null. - */ - public Map getAggregateStatsValues() { - return Collections.unmodifiableMap(distribStatsValues); - } - -} - diff --git a/solr/core/src/java/org/apache/solr/handler/component/StatsInfo.java b/solr/core/src/java/org/apache/solr/handler/component/StatsInfo.java new file mode 100644 index 000000000000..f3f2871b2463 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/handler/component/StatsInfo.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.handler.component; + +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.params.StatsParams; + +import java.util.*; + +/** + * Models all of the information about stats needed for a single request + * + * @see StatsField + */ +class StatsInfo { + + private final ResponseBuilder rb; + private final List statsFields = new ArrayList<>(7); + private final Map distribStatsValues = new LinkedHashMap<>(); + private final Map statsFieldMap = new LinkedHashMap<>(); + private final Map> tagToStatsFields = new LinkedHashMap<>(); + + public StatsInfo(ResponseBuilder rb) { + this.rb = rb; + SolrParams params = rb.req.getParams(); + String[] statsParams = params.getParams(StatsParams.STATS_FIELD); + if (null == statsParams) { + // no stats.field params, nothing to parse. + return; + } + + for (String paramValue : statsParams) { + StatsField current = new StatsField(rb, paramValue); + statsFields.add(current); + for (String tag : current.getTagList()) { + List fieldList = tagToStatsFields.get(tag); + if (fieldList == null) { + fieldList = new ArrayList<>(); + } + fieldList.add(current); + tagToStatsFields.put(tag, fieldList); + } + statsFieldMap.put(current.getOutputKey(), current); + distribStatsValues.put(current.getOutputKey(), + StatsValuesFactory.createStatsValues(current)); + } + } + + /** + * Returns an immutable list of {@link StatsField} instances + * modeling each of the {@link StatsParams#STATS_FIELD} params specified + * as part of this request + */ + public List getStatsFields() { + return Collections.unmodifiableList(statsFields); + } + + /** + * Returns the {@link StatsField} associated with the specified (effective) + * outputKey, or null if there was no {@link StatsParams#STATS_FIELD} param + * that would corrispond with that key. + */ + public StatsField getStatsField(String outputKey) { + return statsFieldMap.get(outputKey); + } + + /** + * Return immutable list of {@link StatsField} instances by string tag local parameter. + * + * @param tag tag local parameter + * @return list of stats fields + */ + public List getStatsFieldsByTag(String tag) { + List raw = tagToStatsFields.get(tag); + if (null == raw) { + return Collections.emptyList(); + } else { + return Collections.unmodifiableList(raw); + } + } + + /** + * Returns an immutable map of response key => {@link StatsValues} + * instances for the current distributed request. + * Depending on where we are in the process of handling this request, + * these {@link StatsValues} instances may not be complete -- but they + * will never be null. + */ + public Map getAggregateStatsValues() { + return Collections.unmodifiableMap(distribStatsValues); + } + +} diff --git a/solr/core/src/java/org/apache/solr/handler/component/StatsValues.java b/solr/core/src/java/org/apache/solr/handler/component/StatsValues.java index ae245685a45d..50f8c309333d 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/StatsValues.java +++ b/solr/core/src/java/org/apache/solr/handler/component/StatsValues.java @@ -35,7 +35,7 @@ public interface StatsValues { * * @param stv NamedList whose values will be used to accumulate the current values */ - void accumulate(NamedList stv); + void accumulate(@SuppressWarnings({"rawtypes"})NamedList stv); /** Accumulate the value associated with docID. * @see #setNextReader(org.apache.lucene.index.LeafReaderContext) */ diff --git a/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java b/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java index 2cbdb73bb815..574cf05b627f 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java +++ b/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java @@ -86,781 +86,788 @@ public static StatsValues createStatsValues(StatsField statsField) { "Field type " + fieldType + " is not currently supported"); } } -} -/** - * Abstract implementation of - * {@link org.apache.solr.handler.component.StatsValues} that provides the - * default behavior for most StatsValues implementations. - * - * There are very few requirements placed on what statistics concrete - * implementations should collect, with the only required statistics being the - * minimum and maximum values. - */ -abstract class AbstractStatsValues implements StatsValues { - private static final String FACETS = "facets"; - - /** Tracks all data about tthe stats we need to collect */ - final protected StatsField statsField; - - /** may be null if we are collecting stats directly from a function ValueSource */ - final protected SchemaField sf; - /** - * may be null if we are collecting stats directly from a function ValueSource - */ - final protected FieldType ft; - - // final booleans from StatsField to allow better inlining & JIT optimizing - final protected boolean computeCount; - final protected boolean computeMissing; - final protected boolean computeCalcDistinct; // needed for either countDistinct or distinctValues - final protected boolean computeMin; - final protected boolean computeMax; - final protected boolean computeMinOrMax; - final protected boolean computeCardinality; - - /** - * Either a function value source to collect from, or the ValueSource associated - * with a single valued field we are collecting from. Will be null until/unless - * {@link #setNextReader} is called at least once - */ - private ValueSource valueSource; - /** - * Context to use when retrieving FunctionValues, will be null until/unless - * {@link #setNextReader} is called at least once - */ - private Map vsContext; /** - * Values to collect, will be null until/unless {@link #setNextReader} is - * called at least once - */ - protected FunctionValues values; - - protected T max; - protected T min; - protected long missing; - protected long count; - protected long countDistinct; - protected final Set distinctValues; - - /** - * Hash function that must be used by implementations of {@link #hash} + * Abstract implementation of + * {@link StatsValues} that provides the + * default behavior for most StatsValues implementations. + * + * There are very few requirements placed on what statistics concrete + * implementations should collect, with the only required statistics being the + * minimum and maximum values. */ - protected final HashFunction hasher; - // if null, no HLL logic can be computed; not final because of "union" optimization (see below) - private HLL hll; - - // facetField facetValue - protected Map> facets = new HashMap<>(); - - protected AbstractStatsValues(StatsField statsField) { - this.statsField = statsField; - this.computeCount = statsField.calculateStats(Stat.count); - this.computeMissing = statsField.calculateStats(Stat.missing); - this.computeCalcDistinct = statsField.calculateStats(Stat.countDistinct) - || statsField.calculateStats(Stat.distinctValues); - this.computeMin = statsField.calculateStats(Stat.min); - this.computeMax = statsField.calculateStats(Stat.max); - this.computeMinOrMax = computeMin || computeMax; - - this.distinctValues = computeCalcDistinct ? new TreeSet<>() : null; - - this.computeCardinality = statsField.calculateStats(Stat.cardinality); - if ( computeCardinality ) { + private abstract static class AbstractStatsValues implements StatsValues { + private static final String FACETS = "facets"; + + /** Tracks all data about tthe stats we need to collect */ + final protected StatsField statsField; + + /** may be null if we are collecting stats directly from a function ValueSource */ + final protected SchemaField sf; + /** + * may be null if we are collecting stats directly from a function ValueSource + */ + final protected FieldType ft; + + // final booleans from StatsField to allow better inlining & JIT optimizing + final protected boolean computeCount; + final protected boolean computeMissing; + final protected boolean computeCalcDistinct; // needed for either countDistinct or distinctValues + final protected boolean computeMin; + final protected boolean computeMax; + final protected boolean computeMinOrMax; + final protected boolean computeCardinality; + + /** + * Either a function value source to collect from, or the ValueSource associated + * with a single valued field we are collecting from. Will be null until/unless + * {@link #setNextReader} is called at least once + */ + private ValueSource valueSource; + /** + * Context to use when retrieving FunctionValues, will be null until/unless + * {@link #setNextReader} is called at least once + */ + @SuppressWarnings({"rawtypes"}) + private Map vsContext; + /** + * Values to collect, will be null until/unless {@link #setNextReader} is + * called at least once + */ + protected FunctionValues values; + + protected T max; + protected T min; + protected long missing; + protected long count; + protected long countDistinct; + protected final Set distinctValues; + + /** + * Hash function that must be used by implementations of {@link #hash} + */ + protected final HashFunction hasher; + // if null, no HLL logic can be computed; not final because of "union" optimization (see below) + private HLL hll; + + // facetField facetValue + protected Map> facets = new HashMap<>(); + + protected AbstractStatsValues(StatsField statsField) { + this.statsField = statsField; + this.computeCount = statsField.calculateStats(Stat.count); + this.computeMissing = statsField.calculateStats(Stat.missing); + this.computeCalcDistinct = statsField.calculateStats(Stat.countDistinct) + || statsField.calculateStats(Stat.distinctValues); + this.computeMin = statsField.calculateStats(Stat.min); + this.computeMax = statsField.calculateStats(Stat.max); + this.computeMinOrMax = computeMin || computeMax; + + this.distinctValues = computeCalcDistinct ? new TreeSet<>() : null; + + this.computeCardinality = statsField.calculateStats(Stat.cardinality); + if ( computeCardinality ) { + + hasher = statsField.getHllOptions().getHasher(); + hll = statsField.getHllOptions().newHLL(); + assert null != hll : "Cardinality requires an HLL"; + } else { + hll = null; + hasher = null; + } - hasher = statsField.getHllOptions().getHasher(); - hll = statsField.getHllOptions().newHLL(); - assert null != hll : "Cardinality requires an HLL"; - } else { - hll = null; - hasher = null; - } - - // alternatively, we could refactor a common base class that doesn't know/care - // about either SchemaField or ValueSource - but then there would be a lot of - // duplicate code between "NumericSchemaFieldStatsValues" and - // "NumericValueSourceStatsValues" which would have diff parent classes - // - // part of the complexity here being that the StatsValues API serves two - // masters: collecting concrete Values from things like DocValuesStats and - // the distributed aggregation logic, but also collecting docIds which it - // then - // uses to go out and pull concreate values from the ValueSource - // (from a func, or single valued field) - if (null != statsField.getSchemaField()) { - assert null == statsField.getValueSource(); - this.sf = statsField.getSchemaField(); - this.ft = sf.getType(); - } else { - assert null != statsField.getValueSource(); - assert null == statsField.getSchemaField(); - this.sf = null; - this.ft = null; - } - } - - @Override - public void accumulate(NamedList stv) { - if (computeCount) { - count += (Long) stv.get("count"); - } - if (computeMissing) { - missing += (Long) stv.get("missing"); - } - if (computeCalcDistinct) { - distinctValues.addAll((Collection) stv.get("distinctValues")); - countDistinct = distinctValues.size(); - } - - if (computeMinOrMax) { - updateMinMax((T) stv.get("min"), (T) stv.get("max")); - } - - if (computeCardinality) { - byte[] data = (byte[]) stv.get("cardinality"); - HLL other = HLL.fromBytes(data); - if (hll.getType().equals(HLLType.EMPTY)) { - // The HLL.union method goes out of it's way not to modify the "other" HLL. - // Which means in the case of merging into an "EMPTY" HLL (garunteed to happen at - // least once in every coordination of shard requests) it always clones all - // of the internal storage -- but since we're going to throw "other" away after - // the merge, this just means a short term doubling of RAM that we can skip. - hll = other; + // alternatively, we could refactor a common base class that doesn't know/care + // about either SchemaField or ValueSource - but then there would be a lot of + // duplicate code between "NumericSchemaFieldStatsValues" and + // "NumericValueSourceStatsValues" which would have diff parent classes + // + // part of the complexity here being that the StatsValues API serves two + // masters: collecting concrete Values from things like DocValuesStats and + // the distributed aggregation logic, but also collecting docIds which it + // then + // uses to go out and pull concreate values from the ValueSource + // (from a func, or single valued field) + if (null != statsField.getSchemaField()) { + assert null == statsField.getValueSource(); + this.sf = statsField.getSchemaField(); + this.ft = sf.getType(); } else { - hll.union(other); + assert null != statsField.getValueSource(); + assert null == statsField.getSchemaField(); + this.sf = null; + this.ft = null; } } - updateTypeSpecificStats(stv); - - NamedList f = (NamedList) stv.get(FACETS); - if (f == null) { - return; - } - - for (int i = 0; i < f.size(); i++) { - String field = f.getName(i); - NamedList vals = (NamedList) f.getVal(i); - Map addTo = facets.get(field); - if (addTo == null) { - addTo = new HashMap<>(); - facets.put(field, addTo); - } - for (int j = 0; j < vals.size(); j++) { - String val = vals.getName(j); - StatsValues vvals = addTo.get(val); - if (vvals == null) { - vvals = StatsValuesFactory.createStatsValues(statsField); - addTo.put(val, vvals); + @Override + @SuppressWarnings({"unchecked"}) + public void accumulate(@SuppressWarnings({"rawtypes"})NamedList stv) { + if (computeCount) { + count += (Long) stv.get("count"); + } + if (computeMissing) { + missing += (Long) stv.get("missing"); + } + if (computeCalcDistinct) { + distinctValues.addAll((Collection) stv.get("distinctValues")); + countDistinct = distinctValues.size(); + } + + if (computeMinOrMax) { + updateMinMax((T) stv.get("min"), (T) stv.get("max")); + } + + if (computeCardinality) { + byte[] data = (byte[]) stv.get("cardinality"); + HLL other = HLL.fromBytes(data); + if (hll.getType().equals(HLLType.EMPTY)) { + // The HLL.union method goes out of it's way not to modify the "other" HLL. + // Which means in the case of merging into an "EMPTY" HLL (garunteed to happen at + // least once in every coordination of shard requests) it always clones all + // of the internal storage -- but since we're going to throw "other" away after + // the merge, this just means a short term doubling of RAM that we can skip. + hll = other; + } else { + hll.union(other); } - vvals.accumulate((NamedList) vals.getVal(j)); } - } - } - - @Override - public void accumulate(BytesRef value, int count) { - if (null == ft) { - throw new IllegalStateException( - "Can't collect & convert BytesRefs on stats that do't use a a FieldType: " - + statsField); - } - T typedValue = (T) ft.toObject(sf, value); - accumulate(typedValue, count); - } - public void accumulate(T value, int count) { - assert null != value : "Can't accumulate null"; + updateTypeSpecificStats(stv); - if (computeCount) { - this.count += count; - } - if (computeCalcDistinct) { - distinctValues.add(value); - countDistinct = distinctValues.size(); - } - if (computeMinOrMax) { - updateMinMax(value, value); - } - if (computeCardinality) { - if (null == hasher) { - assert value instanceof Number : "pre-hashed value support only works with numeric longs"; - hll.addRaw(((Number)value).longValue()); - } else { - hll.addRaw(hash(value)); + @SuppressWarnings({"rawtypes"}) + NamedList f = (NamedList) stv.get(FACETS); + if (f == null) { + return; } - } - updateTypeSpecificStats(value, count); - } - - @Override - public void missing() { - if (computeMissing) { - missing++; - } - } - - @Override - public void addMissing(int count) { - missing += count; - } - - @Override - public void addFacet(String facetName, Map facetValues) { - facets.put(facetName, facetValues); - } - - @Override - public NamedList getStatsValues() { - NamedList res = new SimpleOrderedMap<>(); - if (statsField.includeInResponse(Stat.min)) { - res.add("min", min); + for (int i = 0; i < f.size(); i++) { + String field = f.getName(i); + @SuppressWarnings({"rawtypes"}) + NamedList vals = (NamedList) f.getVal(i); + Map addTo = facets.get(field); + if (addTo == null) { + addTo = new HashMap<>(); + facets.put(field, addTo); + } + for (int j = 0; j < vals.size(); j++) { + String val = vals.getName(j); + StatsValues vvals = addTo.get(val); + if (vvals == null) { + vvals = createStatsValues(statsField); + addTo.put(val, vvals); + } + vvals.accumulate((NamedList) vals.getVal(j)); + } + } } - if (statsField.includeInResponse(Stat.max)) { - res.add("max", max); + + @Override + @SuppressWarnings({"unchecked"}) + public void accumulate(BytesRef value, int count) { + if (null == ft) { + throw new IllegalStateException( + "Can't collect & convert BytesRefs on stats that do't use a a FieldType: " + + statsField); + } + T typedValue = (T) ft.toObject(sf, value); + accumulate(typedValue, count); } - if (statsField.includeInResponse(Stat.count)) { - res.add("count", count); + + public void accumulate(T value, int count) { + assert null != value : "Can't accumulate null"; + + if (computeCount) { + this.count += count; + } + if (computeCalcDistinct) { + distinctValues.add(value); + countDistinct = distinctValues.size(); + } + if (computeMinOrMax) { + updateMinMax(value, value); + } + if (computeCardinality) { + if (null == hasher) { + assert value instanceof Number : "pre-hashed value support only works with numeric longs"; + hll.addRaw(((Number)value).longValue()); + } else { + hll.addRaw(hash(value)); + } + } + updateTypeSpecificStats(value, count); } - if (statsField.includeInResponse(Stat.missing)) { - res.add("missing", missing); + + @Override + public void missing() { + if (computeMissing) { + missing++; + } } - if (statsField.includeInResponse(Stat.distinctValues)) { - res.add("distinctValues", distinctValues); + + @Override + public void addMissing(int count) { + missing += count; } - if (statsField.includeInResponse(Stat.countDistinct)) { - res.add("countDistinct", countDistinct); + + @Override + public void addFacet(String facetName, Map facetValues) { + facets.put(facetName, facetValues); } - if (statsField.includeInResponse(Stat.cardinality)) { - if (statsField.getIsShard()) { - res.add("cardinality", hll.toBytes()); - } else { - res.add("cardinality", hll.cardinality()); + + @Override + public NamedList getStatsValues() { + NamedList res = new SimpleOrderedMap<>(); + + if (statsField.includeInResponse(Stat.min)) { + res.add("min", min); } - } - - addTypeSpecificStats(res); - - if (!facets.isEmpty()) { - - // add the facet stats - NamedList> nl = new SimpleOrderedMap<>(); - for (Map.Entry> entry : facets.entrySet()) { - NamedList> nl2 = new SimpleOrderedMap<>(); - nl.add(entry.getKey(), nl2); - for (Map.Entry e2 : entry.getValue().entrySet()) { - nl2.add(e2.getKey(), e2.getValue().getStatsValues()); + if (statsField.includeInResponse(Stat.max)) { + res.add("max", max); + } + if (statsField.includeInResponse(Stat.count)) { + res.add("count", count); + } + if (statsField.includeInResponse(Stat.missing)) { + res.add("missing", missing); + } + if (statsField.includeInResponse(Stat.distinctValues)) { + res.add("distinctValues", distinctValues); + } + if (statsField.includeInResponse(Stat.countDistinct)) { + res.add("countDistinct", countDistinct); + } + if (statsField.includeInResponse(Stat.cardinality)) { + if (statsField.getIsShard()) { + res.add("cardinality", hll.toBytes()); + } else { + res.add("cardinality", hll.cardinality()); } } - res.add(FACETS, nl); + addTypeSpecificStats(res); + + if (!facets.isEmpty()) { + + // add the facet stats + NamedList> nl = new SimpleOrderedMap<>(); + for (Map.Entry> entry : facets.entrySet()) { + NamedList> nl2 = new SimpleOrderedMap<>(); + nl.add(entry.getKey(), nl2); + for (Map.Entry e2 : entry.getValue().entrySet()) { + nl2.add(e2.getKey(), e2.getValue().getStatsValues()); + } + } + + res.add(FACETS, nl); + } + + return res; } - return res; - } - - public void setNextReader(LeafReaderContext ctx) throws IOException { - if (valueSource == null) { - // first time we've collected local values, get the right ValueSource - valueSource = (null == ft) - ? statsField.getValueSource() - : ft.getValueSource(sf, null); - vsContext = ValueSource.newContext(statsField.getSearcher()); - } - values = valueSource.getValues(vsContext, ctx); + @SuppressWarnings({"unchecked"}) + public void setNextReader(LeafReaderContext ctx) throws IOException { + if (valueSource == null) { + // first time we've collected local values, get the right ValueSource + valueSource = (null == ft) + ? statsField.getValueSource() + : ft.getValueSource(sf, null); + vsContext = ValueSource.newContext(statsField.getSearcher()); + } + values = valueSource.getValues(vsContext, ctx); + } + + /** + * Hash function to be used for computing cardinality. + * + * This method will not be called in cases where the user has indicated the values + * are already hashed. If this method is called, then {@link #hasher} will be non-null, + * and should be used to generate the appropriate hash value. + * + * @see Stat#cardinality + * @see #hasher + */ + protected abstract long hash(T value); + + /** + * Updates the minimum and maximum statistics based on the given values + * + * @param min + * Value that the current minimum should be updated against + * @param max + * Value that the current maximum should be updated against + */ + protected abstract void updateMinMax(T min, T max); + + /** + * Updates the type specific statistics based on the given value + * + * @param value + * Value the statistics should be updated against + * @param count + * Number of times the value is being accumulated + */ + protected abstract void updateTypeSpecificStats(T value, int count); + + /** + * Updates the type specific statistics based on the values in the given list + * + * @param stv + * List containing values the current statistics should be updated + * against + */ + protected abstract void updateTypeSpecificStats(@SuppressWarnings({"rawtypes"})NamedList stv); + + /** + * Add any type specific statistics to the given NamedList + * + * @param res + * NamedList to add the type specific statistics too + */ + protected abstract void addTypeSpecificStats(NamedList res); } - - /** - * Hash function to be used for computing cardinality. - * - * This method will not be called in cases where the user has indicated the values - * are already hashed. If this method is called, then {@link #hasher} will be non-null, - * and should be used to generate the appropriate hash value. - * - * @see Stat#cardinality - * @see #hasher - */ - protected abstract long hash(T value); /** - * Updates the minimum and maximum statistics based on the given values - * - * @param min - * Value that the current minimum should be updated against - * @param max - * Value that the current maximum should be updated against - */ - protected abstract void updateMinMax(T min, T max); - - /** - * Updates the type specific statistics based on the given value - * - * @param value - * Value the statistics should be updated against - * @param count - * Number of times the value is being accumulated - */ - protected abstract void updateTypeSpecificStats(T value, int count); - - /** - * Updates the type specific statistics based on the values in the given list - * - * @param stv - * List containing values the current statistics should be updated - * against + * Implementation of StatsValues that supports Double values */ - protected abstract void updateTypeSpecificStats(NamedList stv); - - /** - * Add any type specific statistics to the given NamedList - * - * @param res - * NamedList to add the type specific statistics too - */ - protected abstract void addTypeSpecificStats(NamedList res); -} + static class NumericStatsValues extends AbstractStatsValues { -/** - * Implementation of StatsValues that supports Double values - */ -class NumericStatsValues extends AbstractStatsValues { - - double sum; - double sumOfSquares; - - AVLTreeDigest tdigest; - - double minD; // perf optimization, only valid if (null != this.min) - double maxD; // perf optimization, only valid if (null != this.max) - - final protected boolean computeSum; - final protected boolean computeSumOfSquares; - final protected boolean computePercentiles; - - public NumericStatsValues(StatsField statsField) { - super(statsField); - - this.computeSum = statsField.calculateStats(Stat.sum); - this.computeSumOfSquares = statsField.calculateStats(Stat.sumOfSquares); - - this.computePercentiles = statsField.calculateStats(Stat.percentiles); - if ( computePercentiles ) { - tdigest = new AVLTreeDigest(statsField.getTdigestCompression()); - } + double sum; + double sumOfSquares; - } + AVLTreeDigest tdigest; + + double minD; // perf optimization, only valid if (null != this.min) + double maxD; // perf optimization, only valid if (null != this.max) + + final protected boolean computeSum; + final protected boolean computeSumOfSquares; + final protected boolean computePercentiles; + + public NumericStatsValues(StatsField statsField) { + super(statsField); + + this.computeSum = statsField.calculateStats(Stat.sum); + this.computeSumOfSquares = statsField.calculateStats(Stat.sumOfSquares); + + this.computePercentiles = statsField.calculateStats(Stat.percentiles); + if ( computePercentiles ) { + tdigest = new AVLTreeDigest(statsField.getTdigestCompression()); + } - @Override - public long hash(Number v) { - // have to use a bit of reflection to ensure good hash values since - // we don't have truely type specific stats - if (v instanceof Long) { - return hasher.hashLong(v.longValue()).asLong(); - } else if (v instanceof Integer) { - return hasher.hashInt(v.intValue()).asLong(); - } else if (v instanceof Double) { - return hasher.hashLong(Double.doubleToRawLongBits(v.doubleValue())).asLong(); - } else if (v instanceof Float) { - return hasher.hashInt(Float.floatToRawIntBits(v.floatValue())).asLong(); - } else if (v instanceof Byte) { - return hasher.newHasher().putByte(v.byteValue()).hash().asLong(); - } else if (v instanceof Short) { - return hasher.newHasher().putShort(v.shortValue()).hash().asLong(); - } - // else... - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Unsupported Numeric Type ("+v.getClass()+") for hashing: " +statsField); - } - - @Override - public void accumulate(int docID) throws IOException { - if (values.exists(docID)) { - Number value = (Number) values.objectVal(docID); - accumulate(value, 1); - } else { - missing(); - } - } - - @Override - public void updateTypeSpecificStats(NamedList stv) { - if (computeSum) { - sum += ((Number) stv.get("sum")).doubleValue(); - } - if (computeSumOfSquares) { - sumOfSquares += ((Number) stv.get("sumOfSquares")).doubleValue(); } - - if (computePercentiles) { - byte[] data = (byte[]) stv.get("percentiles"); - ByteBuffer buf = ByteBuffer.wrap(data); - tdigest.add(AVLTreeDigest.fromBytes(buf)); + + @Override + public long hash(Number v) { + // have to use a bit of reflection to ensure good hash values since + // we don't have truely type specific stats + if (v instanceof Long) { + return hasher.hashLong(v.longValue()).asLong(); + } else if (v instanceof Integer) { + return hasher.hashInt(v.intValue()).asLong(); + } else if (v instanceof Double) { + return hasher.hashLong(Double.doubleToRawLongBits(v.doubleValue())).asLong(); + } else if (v instanceof Float) { + return hasher.hashInt(Float.floatToRawIntBits(v.floatValue())).asLong(); + } else if (v instanceof Byte) { + return hasher.newHasher().putByte(v.byteValue()).hash().asLong(); + } else if (v instanceof Short) { + return hasher.newHasher().putShort(v.shortValue()).hash().asLong(); + } + // else... + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, + "Unsupported Numeric Type ("+v.getClass()+") for hashing: " +statsField); } - } - - @Override - public void updateTypeSpecificStats(Number v, int count) { - double value = v.doubleValue(); - if (computeSumOfSquares) { - sumOfSquares += (value * value * count); // for std deviation + + @Override + public void accumulate(int docID) throws IOException { + if (values.exists(docID)) { + Number value = (Number) values.objectVal(docID); + accumulate(value, 1); + } else { + missing(); + } } - if (computeSum) { - sum += value * count; + + @Override + public void updateTypeSpecificStats(@SuppressWarnings({"rawtypes"})NamedList stv) { + if (computeSum) { + sum += ((Number) stv.get("sum")).doubleValue(); + } + if (computeSumOfSquares) { + sumOfSquares += ((Number) stv.get("sumOfSquares")).doubleValue(); + } + + if (computePercentiles) { + byte[] data = (byte[]) stv.get("percentiles"); + ByteBuffer buf = ByteBuffer.wrap(data); + tdigest.add(AVLTreeDigest.fromBytes(buf)); + } } - if (computePercentiles) { - tdigest.add(value, count); + + @Override + public void updateTypeSpecificStats(Number v, int count) { + double value = v.doubleValue(); + if (computeSumOfSquares) { + sumOfSquares += (value * value * count); // for std deviation + } + if (computeSum) { + sum += value * count; + } + if (computePercentiles) { + tdigest.add(value, count); + } } - } - - @Override - protected void updateMinMax(Number min, Number max) { - // we always use the double values, because that way the response Object class is - // consistent regardless of whether we only have 1 value or many that we min/max - // - // TODO: would be nice to have subclasses for each type of Number ... breaks backcompat - - if (computeMin) { // nested if to encourage JIT to optimize aware final var? - if (null != min) { - double minD = min.doubleValue(); - if (null == this.min || minD < this.minD) { - // Double for result & cached primitive double to minimize unboxing in future comparisons - this.min = this.minD = minD; + + @Override + protected void updateMinMax(Number min, Number max) { + // we always use the double values, because that way the response Object class is + // consistent regardless of whether we only have 1 value or many that we min/max + // + // TODO: would be nice to have subclasses for each type of Number ... breaks backcompat + + if (computeMin) { // nested if to encourage JIT to optimize aware final var? + if (null != min) { + double minD = min.doubleValue(); + if (null == this.min || minD < this.minD) { + // Double for result & cached primitive double to minimize unboxing in future comparisons + this.min = this.minD = minD; + } } } - } - if (computeMax) { // nested if to encourage JIT to optimize aware final var? - if (null != max) { - double maxD = max.doubleValue(); - if (null == this.max || this.maxD < maxD) { - // Double for result & cached primitive double to minimize unboxing in future comparisons - this.max = this.maxD = maxD; + if (computeMax) { // nested if to encourage JIT to optimize aware final var? + if (null != max) { + double maxD = max.doubleValue(); + if (null == this.max || this.maxD < maxD) { + // Double for result & cached primitive double to minimize unboxing in future comparisons + this.max = this.maxD = maxD; + } } } } - } - - /** - * Adds sum, sumOfSquares, mean, stddev, and percentiles to the given - * NamedList - * - * @param res - * NamedList to add the type specific statistics too - */ - @Override - protected void addTypeSpecificStats(NamedList res) { - if (statsField.includeInResponse(Stat.sum)) { - res.add("sum", sum); - } - if (statsField.includeInResponse(Stat.sumOfSquares)) { - res.add("sumOfSquares", sumOfSquares); - } - if (statsField.includeInResponse(Stat.mean)) { - res.add("mean", sum / count); - } - if (statsField.includeInResponse(Stat.stddev)) { - res.add("stddev", getStandardDeviation()); - } - if (statsField.includeInResponse(Stat.percentiles)) { - if (statsField.getIsShard()) { - // as of current t-digest version, smallByteSize() internally does a full conversion in - // order to determine what the size is (can't be precomputed?) .. so rather then - // serialize to a ByteBuffer twice, allocate the max possible size buffer, - // serialize once, and then copy only the byte[] subset that we need, and free up the buffer - ByteBuffer buf = ByteBuffer.allocate(tdigest.byteSize()); // upper bound - tdigest.asSmallBytes(buf); - res.add("percentiles", Arrays.copyOf(buf.array(), buf.position()) ); - } else { - NamedList percentileNameList = new NamedList(); - for (Double percentile : statsField.getPercentilesList()) { - // Empty document set case - if (tdigest.size() == 0) { - percentileNameList.add(percentile.toString(), null); - } else { - Double cutoff = tdigest.quantile(percentile / 100); - percentileNameList.add(percentile.toString(), cutoff); + + /** + * Adds sum, sumOfSquares, mean, stddev, and percentiles to the given + * NamedList + * + * @param res + * NamedList to add the type specific statistics too + */ + @Override + protected void addTypeSpecificStats(NamedList res) { + if (statsField.includeInResponse(Stat.sum)) { + res.add("sum", sum); + } + if (statsField.includeInResponse(Stat.sumOfSquares)) { + res.add("sumOfSquares", sumOfSquares); + } + if (statsField.includeInResponse(Stat.mean)) { + res.add("mean", sum / count); + } + if (statsField.includeInResponse(Stat.stddev)) { + res.add("stddev", getStandardDeviation()); + } + if (statsField.includeInResponse(Stat.percentiles)) { + if (statsField.getIsShard()) { + // as of current t-digest version, smallByteSize() internally does a full conversion in + // order to determine what the size is (can't be precomputed?) .. so rather then + // serialize to a ByteBuffer twice, allocate the max possible size buffer, + // serialize once, and then copy only the byte[] subset that we need, and free up the buffer + ByteBuffer buf = ByteBuffer.allocate(tdigest.byteSize()); // upper bound + tdigest.asSmallBytes(buf); + res.add("percentiles", Arrays.copyOf(buf.array(), buf.position()) ); + } else { + NamedList percentileNameList = new NamedList(); + for (Double percentile : statsField.getPercentilesList()) { + // Empty document set case + if (tdigest.size() == 0) { + percentileNameList.add(percentile.toString(), null); + } else { + Double cutoff = tdigest.quantile(percentile / 100); + percentileNameList.add(percentile.toString(), cutoff); + } } + res.add("percentiles", percentileNameList); } - res.add("percentiles", percentileNameList); } } + + + /** + * Calculates the standard deviation statistic + * + * @return Standard deviation statistic + */ + private double getStandardDeviation() { + if (count <= 1.0D) { + return 0.0D; + } + + return Math.sqrt(((count * sumOfSquares) - (sum * sum)) / (count * (count - 1.0D))); + + } } - - + /** - * Calculates the standard deviation statistic - * - * @return Standard deviation statistic + * Implementation of StatsValues that supports EnumField values */ - private double getStandardDeviation() { - if (count <= 1.0D) { - return 0.0D; + private static class EnumStatsValues extends AbstractStatsValues { + + public EnumStatsValues(StatsField statsField) { + super(statsField); } - - return Math.sqrt(((count * sumOfSquares) - (sum * sum)) / (count * (count - 1.0D))); - - } -} -/** - * Implementation of StatsValues that supports EnumField values - */ -class EnumStatsValues extends AbstractStatsValues { - - public EnumStatsValues(StatsField statsField) { - super(statsField); - } - - @Override - public long hash(EnumFieldValue v) { - return hasher.hashInt(v.toInt().intValue()).asLong(); - } + @Override + public long hash(EnumFieldValue v) { + return hasher.hashInt(v.toInt().intValue()).asLong(); + } - @Override - public void accumulate(int docID) throws IOException { - if (values.exists(docID)) { - Integer intValue = (Integer) values.objectVal(docID); - String stringValue = values.strVal(docID); - EnumFieldValue enumFieldValue = new EnumFieldValue(intValue, stringValue); - accumulate(enumFieldValue, 1); - } else { - missing(); + @Override + public void accumulate(int docID) throws IOException { + if (values.exists(docID)) { + Integer intValue = (Integer) values.objectVal(docID); + String stringValue = values.strVal(docID); + EnumFieldValue enumFieldValue = new EnumFieldValue(intValue, stringValue); + accumulate(enumFieldValue, 1); + } else { + missing(); + } } - } - - protected void updateMinMax(EnumFieldValue min, EnumFieldValue max) { - if (computeMin) { // nested if to encourage JIT to optimize aware final var? - if (null != min) { - if (null == this.min || (min.compareTo(this.min) < 0)) { - this.min = min; + + protected void updateMinMax(EnumFieldValue min, EnumFieldValue max) { + if (computeMin) { // nested if to encourage JIT to optimize aware final var? + if (null != min) { + if (null == this.min || (min.compareTo(this.min) < 0)) { + this.min = min; + } } } - } - if (computeMax) { // nested if to encourage JIT to optimize aware final var? - if (null != max) { - if (null == this.max || (max.compareTo(this.max) > 0)) { - this.max = max; + if (computeMax) { // nested if to encourage JIT to optimize aware final var? + if (null != max) { + if (null == this.max || (max.compareTo(this.max) > 0)) { + this.max = max; + } } } } - } - - @Override - protected void updateTypeSpecificStats(NamedList stv) { - // No type specific stats - } - - @Override - protected void updateTypeSpecificStats(EnumFieldValue value, int count) { - // No type specific stats - } - - /** - * Adds no type specific statistics - */ - @Override - protected void addTypeSpecificStats(NamedList res) { - // Add no statistics - } - -} - -/** - * /** Implementation of StatsValues that supports Date values - */ -class DateStatsValues extends AbstractStatsValues { - - private double sum = 0.0; - double sumOfSquares = 0; - - final protected boolean computeSum; - final protected boolean computeSumOfSquares; - - public DateStatsValues(StatsField statsField) { - super(statsField); - this.computeSum = statsField.calculateStats(Stat.sum); - this.computeSumOfSquares = statsField.calculateStats(Stat.sumOfSquares); - } - @Override - public long hash(Date v) { - return hasher.hashLong(v.getTime()).asLong(); - } - - @Override - public void accumulate(int docID) throws IOException { - if (values.exists(docID)) { - accumulate((Date) values.objectVal(docID), 1); - } else { - missing(); + @Override + protected void updateTypeSpecificStats(@SuppressWarnings({"rawtypes"})NamedList stv) { + // No type specific stats } - } - - @Override - protected void updateTypeSpecificStats(NamedList stv) { - if (computeSum) { - sum += ((Number) stv.get("sum")).doubleValue(); + + @Override + protected void updateTypeSpecificStats(EnumFieldValue value, int count) { + // No type specific stats } - if (computeSumOfSquares) { - sumOfSquares += ((Number) stv.get("sumOfSquares")).doubleValue(); + + /** + * Adds no type specific statistics + */ + @Override + protected void addTypeSpecificStats(NamedList res) { + // Add no statistics } + } - - @Override - public void updateTypeSpecificStats(Date v, int count) { - long value = v.getTime(); - if (computeSumOfSquares) { - sumOfSquares += ((double)value * value * count); // for std deviation + + /** + * /** Implementation of StatsValues that supports Date values + */ + static class DateStatsValues extends AbstractStatsValues { + + private double sum = 0.0; + double sumOfSquares = 0; + + final protected boolean computeSum; + final protected boolean computeSumOfSquares; + + public DateStatsValues(StatsField statsField) { + super(statsField); + this.computeSum = statsField.calculateStats(Stat.sum); + this.computeSumOfSquares = statsField.calculateStats(Stat.sumOfSquares); } - if (computeSum) { - sum += value * count; + + @Override + public long hash(Date v) { + return hasher.hashLong(v.getTime()).asLong(); } - } - - @Override - protected void updateMinMax(Date min, Date max) { - if (computeMin) { // nested if to encourage JIT to optimize aware final var? - if (null != min && (this.min==null || this.min.after(min))) { - this.min = min; + + @Override + public void accumulate(int docID) throws IOException { + if (values.exists(docID)) { + accumulate((Date) values.objectVal(docID), 1); + } else { + missing(); } } - if (computeMax) { // nested if to encourage JIT to optimize aware final var? - if (null != max && (this.max==null || this.max.before(max))) { - this.max = max; + + @Override + protected void updateTypeSpecificStats(@SuppressWarnings({"rawtypes"})NamedList stv) { + if (computeSum) { + sum += ((Number) stv.get("sum")).doubleValue(); + } + if (computeSumOfSquares) { + sumOfSquares += ((Number) stv.get("sumOfSquares")).doubleValue(); } } - } - - /** - * Adds sum and mean statistics to the given NamedList - * - * @param res - * NamedList to add the type specific statistics too - */ - @Override - protected void addTypeSpecificStats(NamedList res) { - if (statsField.includeInResponse(Stat.sum)) { - res.add("sum", sum); + + @Override + public void updateTypeSpecificStats(Date v, int count) { + long value = v.getTime(); + if (computeSumOfSquares) { + sumOfSquares += ((double)value * value * count); // for std deviation + } + if (computeSum) { + sum += value * count; + } } - if (statsField.includeInResponse(Stat.mean)) { - res.add("mean", (count > 0) ? new Date((long)(sum / count)) : null); + + @Override + protected void updateMinMax(Date min, Date max) { + if (computeMin) { // nested if to encourage JIT to optimize aware final var? + if (null != min && (this.min==null || this.min.after(min))) { + this.min = min; + } + } + if (computeMax) { // nested if to encourage JIT to optimize aware final var? + if (null != max && (this.max==null || this.max.before(max))) { + this.max = max; + } + } } - if (statsField.includeInResponse(Stat.sumOfSquares)) { - res.add("sumOfSquares", sumOfSquares); + + /** + * Adds sum and mean statistics to the given NamedList + * + * @param res + * NamedList to add the type specific statistics too + */ + @Override + protected void addTypeSpecificStats(NamedList res) { + if (statsField.includeInResponse(Stat.sum)) { + res.add("sum", sum); + } + if (statsField.includeInResponse(Stat.mean)) { + res.add("mean", (count > 0) ? new Date((long)(sum / count)) : null); + } + if (statsField.includeInResponse(Stat.sumOfSquares)) { + res.add("sumOfSquares", sumOfSquares); + } + if (statsField.includeInResponse(Stat.stddev)) { + res.add("stddev", getStandardDeviation()); + } } - if (statsField.includeInResponse(Stat.stddev)) { - res.add("stddev", getStandardDeviation()); + + /** + * Calculates the standard deviation. For dates, this is really the MS + * deviation + * + * @return Standard deviation statistic + */ + private double getStandardDeviation() { + if (count <= 1) { + return 0.0D; + } + return Math.sqrt(((count * sumOfSquares) - (sum * sum)) + / (count * (count - 1.0D))); } } - + /** - * Calculates the standard deviation. For dates, this is really the MS - * deviation - * - * @return Standard deviation statistic + * Implementation of StatsValues that supports String values */ - private double getStandardDeviation() { - if (count <= 1) { - return 0.0D; + private static class StringStatsValues extends AbstractStatsValues { + + public StringStatsValues(StatsField statsField) { + super(statsField); } - return Math.sqrt(((count * sumOfSquares) - (sum * sum)) - / (count * (count - 1.0D))); - } -} -/** - * Implementation of StatsValues that supports String values - */ -class StringStatsValues extends AbstractStatsValues { - - public StringStatsValues(StatsField statsField) { - super(statsField); - } + @Override + public long hash(String v) { + return hasher.hashString(v, StandardCharsets.UTF_8).asLong(); + } - @Override - public long hash(String v) { - return hasher.hashString(v, StandardCharsets.UTF_8).asLong(); - } - - @Override - public void accumulate(int docID) throws IOException { - if (values.exists(docID)) { - String value = values.strVal(docID); - if (value != null) { - accumulate(value, 1); - } else { + @Override + public void accumulate(int docID) throws IOException { + if (values.exists(docID)) { + String value = values.strVal(docID); + if (value != null) { + accumulate(value, 1); + } else { + missing(); + } + } else { missing(); } - } else { - missing(); } - } - - @Override - protected void updateTypeSpecificStats(NamedList stv) { - // No type specific stats - } - - @Override - protected void updateTypeSpecificStats(String value, int count) { - // No type specific stats - } - - @Override - protected void updateMinMax(String min, String max) { - if (computeMin) { // nested if to encourage JIT to optimize aware final var? - this.min = min(this.min, min); + + @Override + protected void updateTypeSpecificStats(@SuppressWarnings({"rawtypes"})NamedList stv) { + // No type specific stats } - if (computeMax) { // nested if to encourage JIT to optimize aware final var? - this.max = max(this.max, max); + + @Override + protected void updateTypeSpecificStats(String value, int count) { + // No type specific stats } - } - - /** - * Adds no type specific statistics - */ - @Override - protected void addTypeSpecificStats(NamedList res) { - // Add no statistics - } - - /** - * Determines which of the given Strings is the maximum, as computed by - * {@link String#compareTo(String)} - * - * @param str1 - * String to compare against b - * @param str2 - * String compared against a - * @return str1 if it is considered greater by - * {@link String#compareTo(String)}, str2 otherwise - */ - private static String max(String str1, String str2) { - if (str1 == null) { - return str2; - } else if (str2 == null) { - return str1; + + @Override + protected void updateMinMax(String min, String max) { + if (computeMin) { // nested if to encourage JIT to optimize aware final var? + this.min = min(this.min, min); + } + if (computeMax) { // nested if to encourage JIT to optimize aware final var? + this.max = max(this.max, max); + } } - return (str1.compareTo(str2) > 0) ? str1 : str2; - } - - /** - * Determines which of the given Strings is the minimum, as computed by - * {@link String#compareTo(String)} - * - * @param str1 - * String to compare against b - * @param str2 - * String compared against a - * @return str1 if it is considered less by {@link String#compareTo(String)}, - * str2 otherwise - */ - private static String min(String str1, String str2) { - if (str1 == null) { - return str2; - } else if (str2 == null) { - return str1; + + /** + * Adds no type specific statistics + */ + @Override + protected void addTypeSpecificStats(NamedList res) { + // Add no statistics + } + + /** + * Determines which of the given Strings is the maximum, as computed by + * {@link String#compareTo(String)} + * + * @param str1 + * String to compare against b + * @param str2 + * String compared against a + * @return str1 if it is considered greater by + * {@link String#compareTo(String)}, str2 otherwise + */ + private static String max(String str1, String str2) { + if (str1 == null) { + return str2; + } else if (str2 == null) { + return str1; + } + return (str1.compareTo(str2) > 0) ? str1 : str2; + } + + /** + * Determines which of the given Strings is the minimum, as computed by + * {@link String#compareTo(String)} + * + * @param str1 + * String to compare against b + * @param str2 + * String compared against a + * @return str1 if it is considered less by {@link String#compareTo(String)}, + * str2 otherwise + */ + private static String min(String str1, String str2) { + if (str1 == null) { + return str2; + } else if (str2 == null) { + return str1; + } + return (str1.compareTo(str2) < 0) ? str1 : str2; } - return (str1.compareTo(str2) < 0) ? str1 : str2; } } + diff --git a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java index 596e9f10a5d5..59a95717e933 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java @@ -84,11 +84,9 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware, /** SolrConfig label to identify boolean value to build suggesters on startup */ private static final String BUILD_ON_STARTUP_LABEL = "buildOnStartup"; - - @SuppressWarnings("unchecked") - protected NamedList initParams; - protected SolrMetricsContext metricsContext; + @SuppressWarnings({"rawtypes"}) + protected NamedList initParams; /** * Key is the dictionary name used in SolrConfig, value is the corresponding {@link SolrSuggester} @@ -107,7 +105,7 @@ private static class SuggesterResultLabels { @Override @SuppressWarnings("unchecked") - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); this.initParams = args; } @@ -119,6 +117,7 @@ public void inform(SolrCore core) { boolean hasDefault = false; for (int i = 0; i < initParams.size(); i++) { if (initParams.getName(i).equals(CONFIG_PARAM_LABEL)) { + @SuppressWarnings({"rawtypes"}) NamedList suggesterParams = (NamedList) initParams.getVal(i); SolrSuggester suggester = new SolrSuggester(); String dictionary = suggester.init(suggesterParams, core); @@ -353,23 +352,18 @@ public String getDescription() { return "Suggester component"; } - @Override - public SolrMetricsContext getSolrMetricsContext() { - return metricsContext; - } - @Override public void initializeMetrics(SolrMetricsContext parentContext, String scope) { - this.metricsContext = parentContext.getChildContext(this); + super.initializeMetrics(parentContext, scope); - this.metricsContext.gauge(() -> ramBytesUsed(), true, "totalSizeInBytes", getCategory().toString()); + this.solrMetricsContext.gauge(() -> ramBytesUsed(), true, "totalSizeInBytes", getCategory().toString()); MetricsMap suggestersMap = new MetricsMap((detailed, map) -> { for (Map.Entry entry : suggesters.entrySet()) { SolrSuggester suggester = entry.getValue(); map.put(entry.getKey(), suggester.toString()); } }); - this.metricsContext.gauge(suggestersMap, true, "suggesters", getCategory().toString(), scope); + this.solrMetricsContext.gauge(suggestersMap, true, "suggesters", getCategory().toString(), scope); } @Override @@ -508,7 +502,7 @@ public SuggesterListener(SolrCore core, SolrSuggester checker, boolean buildOnCo } @Override - public void init(NamedList args) {} + public void init(@SuppressWarnings({"rawtypes"})NamedList args) {} @Override public void newSearcher(SolrIndexSearcher newSearcher, diff --git a/solr/core/src/java/org/apache/solr/handler/component/TermVectorComponent.java b/solr/core/src/java/org/apache/solr/handler/component/TermVectorComponent.java index 5a18ee4cc0f2..f28e6e3e62c3 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/TermVectorComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/TermVectorComponent.java @@ -84,6 +84,7 @@ public class TermVectorComponent extends SearchComponent implements SolrCoreAwar private static final String TV_KEY_WARNINGS = "warnings"; + @SuppressWarnings({"rawtypes"}) protected NamedList initParams; /** @@ -412,6 +413,8 @@ public void finishStage(ResponseBuilder rb) { if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS) { NamedList termVectorsNL = new NamedList<>(); + + @SuppressWarnings({"unchecked", "rawtypes"}) Map.Entry[] arr = new NamedList.NamedListEntry[rb.resultIds.size()]; for (ShardRequest sreq : rb.finished) { @@ -419,6 +422,7 @@ public void finishStage(ResponseBuilder rb) { continue; } for (ShardResponse srsp : sreq.responses) { + @SuppressWarnings({"unchecked"}) NamedList nl = (NamedList)srsp.getSolrResponse().getResponse().get(TERM_VECTORS); // Add metadata (that which isn't a uniqueKey value): @@ -450,7 +454,7 @@ public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest //////////////////////// NamedListInitializedPlugin methods ////////////////////// @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); this.initParams = args; } diff --git a/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java b/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java index 71d1390ecea1..4eddadc108ac 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java @@ -89,7 +89,7 @@ public class TermsComponent extends SearchComponent { !HttpShardHandlerFactory.doGetDisableShardsWhitelist()); @Override - public void init( NamedList args ) + public void init( @SuppressWarnings({"rawtypes"})NamedList args ) { super.init(args); whitelistHostChecker = new WhitelistHostChecker( @@ -415,6 +415,7 @@ public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { th.parse(terms); + @SuppressWarnings({"unchecked"}) NamedList stats = (NamedList)srsp.getSolrResponse().getResponse().get("indexstats"); if(stats != null) { th.numDocs += stats.get("numDocs").longValue(); @@ -431,6 +432,7 @@ public void finishStage(ResponseBuilder rb) { } TermsHelper ti = rb._termsHelper; + @SuppressWarnings({"rawtypes"}) NamedList terms = ti.buildResponse(); rb.rsp.add("terms", terms); diff --git a/solr/core/src/java/org/apache/solr/handler/export/DoubleCmp.java b/solr/core/src/java/org/apache/solr/handler/export/DoubleComp.java similarity index 69% rename from solr/core/src/java/org/apache/solr/handler/export/DoubleCmp.java rename to solr/core/src/java/org/apache/solr/handler/export/DoubleComp.java index 50341fd490cd..69739484086c 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/DoubleCmp.java +++ b/solr/core/src/java/org/apache/solr/handler/export/DoubleComp.java @@ -19,25 +19,27 @@ interface DoubleComp { int compare(double a, double b); + double resetValue(); -} -class DoubleAsc implements DoubleComp { - public double resetValue() { - return Double.MAX_VALUE; - } - public int compare(double a, double b) { - return Double.compare(b, a); - } -} + static class DoubleAsc implements DoubleComp { + public double resetValue() { + return Double.MAX_VALUE; + } -class DoubleDesc implements DoubleComp { - public double resetValue() { - return -Double.MAX_VALUE; + public int compare(double a, double b) { + return Double.compare(b, a); + } } - public int compare(double a, double b) { - return Double.compare(a, b); + static class DoubleDesc implements DoubleComp { + public double resetValue() { + return -Double.MAX_VALUE; + } + + public int compare(double a, double b) { + return Double.compare(a, b); + } } } diff --git a/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java b/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java index e4d6da0a5bb4..c7a29dc9a3df 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java @@ -35,12 +35,29 @@ import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.FixedBitSet; import org.apache.solr.client.solrj.impl.BinaryResponseParser; +import org.apache.solr.client.solrj.io.Tuple; +import org.apache.solr.client.solrj.io.comp.ComparatorOrder; +import org.apache.solr.client.solrj.io.comp.FieldComparator; +import org.apache.solr.client.solrj.io.comp.MultipleFieldComparator; +import org.apache.solr.client.solrj.io.comp.StreamComparator; +import org.apache.solr.client.solrj.io.stream.StreamContext; +import org.apache.solr.client.solrj.io.stream.TupleStream; +import org.apache.solr.client.solrj.io.stream.expr.Explanation; +import org.apache.solr.client.solrj.io.stream.expr.Expressible; +import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParser; +import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; import org.apache.solr.common.IteratorWriter; import org.apache.solr.common.MapWriter; import org.apache.solr.common.MapWriter.EntryWriter; import org.apache.solr.common.PushWriter; import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.params.StreamParams; import org.apache.solr.common.util.JavaBinCodec; import org.apache.solr.core.SolrCore; import org.apache.solr.request.SolrQueryRequest; @@ -84,23 +101,165 @@ * once), and it allows {@link ExportWriter} to scale well with regard to numDocs. */ public class ExportWriter implements SolrCore.RawWriter, Closeable { - private static final int DOCUMENT_BATCH_SIZE = 30000; private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private static final int DOCUMENT_BATCH_SIZE = 30000; + + private static final String EXPORT_WRITER_KEY = "__ew__"; + private static final String SORT_DOCS_KEY = "_ew_docs_"; + private static final String TOTAL_HITS_KEY = "_ew_totalHits_"; + private static final String LEAF_READERS_KEY = "_ew_leaves_"; + private static final String SORT_QUEUE_KEY = "_ew_queue_"; + private static final String SORT_DOC_KEY = "_ew_sort_"; + private OutputStreamWriter respWriter; final SolrQueryRequest req; final SolrQueryResponse res; + final StreamContext initialStreamContext; + StreamExpression streamExpression; + StreamContext streamContext; FieldWriter[] fieldWriters; int totalHits = 0; FixedBitSet[] sets = null; PushWriter writer; private String wt; + private static class TupleEntryWriter implements EntryWriter { + Tuple tuple; + + void setTuple(Tuple tuple) { + this.tuple = tuple; + } + + @Override + public EntryWriter put(CharSequence k, Object v) throws IOException { + tuple.put(k, v); + return this; + } + } + + public static class ExportWriterStream extends TupleStream implements Expressible { + StreamContext context; + StreamComparator streamComparator; + int pos = -1; + int outDocIndex = -1; + int count; + SortDoc sortDoc; + SortQueue queue; + SortDoc[] docs; + int totalHits; + ExportWriter exportWriter; + List leaves; + final TupleEntryWriter entryWriter = new TupleEntryWriter(); + + public ExportWriterStream(StreamExpression expression, StreamFactory factory) throws IOException { + streamComparator = parseComp(factory.getDefaultSort()); + } + + @Override + public void setStreamContext(StreamContext context) { + this.context = context; + } + + @Override + public List children() { + return null; + } + + private StreamComparator parseComp(String sort) throws IOException { - public ExportWriter(SolrQueryRequest req, SolrQueryResponse res, String wt) { + String[] sorts = sort.split(","); + StreamComparator[] comps = new StreamComparator[sorts.length]; + for(int i=0; i 1) { + return new MultipleFieldComparator(comps); + } else { + return comps[0]; + } + } + + @Override + @SuppressWarnings({"unchecked"}) + public void open() throws IOException { + docs = (SortDoc[]) context.get(SORT_DOCS_KEY); + queue = (SortQueue) context.get(SORT_QUEUE_KEY); + sortDoc = (SortDoc) context.get(SORT_DOC_KEY); + totalHits = (Integer) context.get(TOTAL_HITS_KEY); + exportWriter = (ExportWriter) context.get(EXPORT_WRITER_KEY); + leaves = (List) context.get(LEAF_READERS_KEY); + count = 0; + } + + @Override + public void close() throws IOException { + exportWriter = null; + leaves = null; + } + + @Override + public Tuple read() throws IOException { + if (pos < 0) { + if (count < totalHits) { + outDocIndex = exportWriter.fillOutDocs(leaves, sortDoc, queue, docs); + count += (outDocIndex + 1); + pos = outDocIndex; + } else { + return Tuple.EOF(); + } + } + if (pos < 0) { + return Tuple.EOF(); + } + Tuple tuple = new Tuple(); + entryWriter.setTuple(tuple); + SortDoc s = docs[pos]; + exportWriter.writeDoc(s, leaves, entryWriter); + s.reset(); + pos--; + return tuple; + } + + @Override + public StreamComparator getStreamSort() { + return streamComparator; + } + + @Override + public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException { + StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass())); + return expression; + } + + @Override + public Explanation toExplanation(StreamFactory factory) throws IOException { + return new StreamExplanation(getStreamNodeId().toString()) + .withFunctionName("input") + .withImplementingClass(this.getClass().getName()) + .withExpressionType(Explanation.ExpressionType.STREAM_SOURCE) + .withExpression("--non-expressible--"); + } + } + + + public ExportWriter(SolrQueryRequest req, SolrQueryResponse res, String wt, StreamContext initialStreamContext) { this.req = req; this.res = res; this.wt = wt; - + this.initialStreamContext = initialStreamContext; } @Override @@ -216,6 +375,36 @@ public void write(OutputStream os) throws IOException { return; } + String expr = params.get(StreamParams.EXPR); + if (expr != null) { + StreamFactory streamFactory = initialStreamContext.getStreamFactory(); + streamFactory.withDefaultSort(params.get(CommonParams.SORT)); + try { + StreamExpression expression = StreamExpressionParser.parse(expr); + if (streamFactory.isEvaluator(expression)) { + streamExpression = new StreamExpression(StreamParams.TUPLE); + streamExpression.addParameter(new StreamExpressionNamedParameter(StreamParams.RETURN_VALUE, expression)); + } else { + streamExpression = expression; + } + } catch (Exception e) { + writeException(e, writer, true); + return; + } + streamContext = new StreamContext(); + streamContext.setRequestParams(params); + streamContext.setLocal(true); + + streamContext.workerID = 0; + streamContext.numWorkers = 1; + streamContext.setSolrClientCache(initialStreamContext.getSolrClientCache()); + streamContext.setModelCache(initialStreamContext.getModelCache()); + streamContext.setObjectCache(initialStreamContext.getObjectCache()); + streamContext.put("core", req.getCore().getName()); + streamContext.put("solr-core", req.getCore()); + streamContext.put(CommonParams.SORT, params.get(CommonParams.SORT)); + } + writer.writeMap(m -> { m.put("responseHeader", singletonMap("status", 0)); m.put("response", (MapWriter) mw -> { @@ -223,7 +412,18 @@ public void write(OutputStream os) throws IOException { mw.put("docs", (IteratorWriter) iw -> writeDocs(req, iw, sort)); }); }); + if (streamContext != null) { + streamContext = null; + } + } + private TupleStream createTupleStream() throws IOException { + StreamFactory streamFactory = (StreamFactory)initialStreamContext.getStreamFactory().clone(); + //Set the sort in the stream factory so it can be used during initialization. + streamFactory.withDefaultSort(((String)streamContext.get(CommonParams.SORT))); + TupleStream tupleStream = streamFactory.constructStream(streamExpression); + tupleStream.setStreamContext(streamContext); + return tupleStream; } protected void identifyLowestSortingUnexportedDocs(List leaves, SortDoc sortDoc, SortQueue queue) throws IOException { @@ -285,22 +485,47 @@ protected void addDocsToItemWriter(List leaves, IteratorWrite protected void writeDocs(SolrQueryRequest req, IteratorWriter.ItemWriter writer, Sort sort) throws IOException { List leaves = req.getSearcher().getTopReaderContext().leaves(); SortDoc sortDoc = getSortDoc(req.getSearcher(), sort.getSort()); - int count = 0; final int queueSize = Math.min(DOCUMENT_BATCH_SIZE, totalHits); SortQueue queue = new SortQueue(queueSize, sortDoc); SortDoc[] outDocs = new SortDoc[queueSize]; - while (count < totalHits) { - identifyLowestSortingUnexportedDocs(leaves, sortDoc, queue); - int outDocsIndex = transferBatchToArrayForOutput(queue, outDocs); - - count += (outDocsIndex + 1); - addDocsToItemWriter(leaves, writer, outDocs, outDocsIndex); + if (streamExpression != null) { + streamContext.put(SORT_DOCS_KEY, outDocs); + streamContext.put(SORT_QUEUE_KEY, queue); + streamContext.put(SORT_DOC_KEY, sortDoc); + streamContext.put(TOTAL_HITS_KEY, totalHits); + streamContext.put(EXPORT_WRITER_KEY, this); + streamContext.put(LEAF_READERS_KEY, leaves); + TupleStream tupleStream = createTupleStream(); + tupleStream.open(); + for (;;) { + final Tuple t = tupleStream.read(); + if (t == null) { + break; + } + if (t.EOF) { + break; + } + writer.add((MapWriter) ew -> t.writeMap(ew)); + } + tupleStream.close(); + } else { + for (int count = 0; count < totalHits; ) { + int outDocsIndex = fillOutDocs(leaves, sortDoc, queue, outDocs); + count += (outDocsIndex + 1); + addDocsToItemWriter(leaves, writer, outDocs, outDocsIndex); + } } } - protected void writeDoc(SortDoc sortDoc, + private int fillOutDocs(List leaves, SortDoc sortDoc, + SortQueue sortQueue, SortDoc[] outDocs) throws IOException { + identifyLowestSortingUnexportedDocs(leaves, sortDoc, sortQueue); + return transferBatchToArrayForOutput(sortQueue, outDocs); + } + + void writeDoc(SortDoc sortDoc, List leaves, EntryWriter ew) throws IOException { @@ -408,41 +633,41 @@ private SortDoc getSortDoc(SolrIndexSearcher searcher, SortField[] sortFields) t if (ft instanceof IntValueFieldType) { if (reverse) { - sortValues[i] = new IntValue(field, new IntDesc()); + sortValues[i] = new IntValue(field, new IntComp.IntDesc()); } else { - sortValues[i] = new IntValue(field, new IntAsc()); + sortValues[i] = new IntValue(field, new IntComp.IntAsc()); } } else if (ft instanceof FloatValueFieldType) { if (reverse) { - sortValues[i] = new FloatValue(field, new FloatDesc()); + sortValues[i] = new FloatValue(field, new FloatComp.FloatDesc()); } else { - sortValues[i] = new FloatValue(field, new FloatAsc()); + sortValues[i] = new FloatValue(field, new FloatComp.FloatAsc()); } } else if (ft instanceof DoubleValueFieldType) { if (reverse) { - sortValues[i] = new DoubleValue(field, new DoubleDesc()); + sortValues[i] = new DoubleValue(field, new DoubleComp.DoubleDesc()); } else { - sortValues[i] = new DoubleValue(field, new DoubleAsc()); + sortValues[i] = new DoubleValue(field, new DoubleComp.DoubleAsc()); } } else if (ft instanceof LongValueFieldType) { if (reverse) { - sortValues[i] = new LongValue(field, new LongDesc()); + sortValues[i] = new LongValue(field, new LongComp.LongDesc()); } else { - sortValues[i] = new LongValue(field, new LongAsc()); + sortValues[i] = new LongValue(field, new LongComp.LongAsc()); } } else if (ft instanceof StrField || ft instanceof SortableTextField) { LeafReader reader = searcher.getSlowAtomicReader(); SortedDocValues vals = reader.getSortedDocValues(field); if (reverse) { - sortValues[i] = new StringValue(vals, field, new IntDesc()); + sortValues[i] = new StringValue(vals, field, new IntComp.IntDesc()); } else { - sortValues[i] = new StringValue(vals, field, new IntAsc()); + sortValues[i] = new StringValue(vals, field, new IntComp.IntAsc()); } } else if (ft instanceof DateValueFieldType) { if (reverse) { - sortValues[i] = new LongValue(field, new LongDesc()); + sortValues[i] = new LongValue(field, new LongComp.LongDesc()); } else { - sortValues[i] = new LongValue(field, new LongAsc()); + sortValues[i] = new LongValue(field, new LongComp.LongAsc()); } } else if (ft instanceof BoolField) { // This is a bit of a hack, but since the boolean field stores ByteRefs, just like Strings @@ -451,9 +676,9 @@ private SortDoc getSortDoc(SolrIndexSearcher searcher, SortField[] sortFields) t LeafReader reader = searcher.getSlowAtomicReader(); SortedDocValues vals = reader.getSortedDocValues(field); if (reverse) { - sortValues[i] = new StringValue(vals, field, new IntDesc()); + sortValues[i] = new StringValue(vals, field, new IntComp.IntDesc()); } else { - sortValues[i] = new StringValue(vals, field, new IntAsc()); + sortValues[i] = new StringValue(vals, field, new IntComp.IntAsc()); } } else { throw new IOException("Sort fields must be one of the following types: int,float,long,double,string,date,boolean,SortableText"); diff --git a/solr/core/src/java/org/apache/solr/handler/export/FloatCmp.java b/solr/core/src/java/org/apache/solr/handler/export/FloatComp.java similarity index 70% rename from solr/core/src/java/org/apache/solr/handler/export/FloatCmp.java rename to solr/core/src/java/org/apache/solr/handler/export/FloatComp.java index 7ef078c76cfb..1ce6e57f0282 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/FloatCmp.java +++ b/solr/core/src/java/org/apache/solr/handler/export/FloatComp.java @@ -19,26 +19,26 @@ interface FloatComp { int compare(float a, float b); + float resetValue(); -} -class FloatAsc implements FloatComp { - public float resetValue() { - return Float.MAX_VALUE; - } + static class FloatAsc implements FloatComp { + public float resetValue() { + return Float.MAX_VALUE; + } - public int compare(float a, float b) { - return Float.compare(b, a); + public int compare(float a, float b) { + return Float.compare(b, a); + } } -} -class FloatDesc implements FloatComp { - public float resetValue() { - return -Float.MAX_VALUE; - } + static class FloatDesc implements FloatComp { + public float resetValue() { + return -Float.MAX_VALUE; + } - public int compare(float a, float b) { - return Float.compare(a, b); + public int compare(float a, float b) { + return Float.compare(a, b); + } } } - diff --git a/solr/core/src/java/org/apache/solr/handler/export/IntComp.java b/solr/core/src/java/org/apache/solr/handler/export/IntComp.java index ac83d5dc1e64..b44ebc842a64 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/IntComp.java +++ b/solr/core/src/java/org/apache/solr/handler/export/IntComp.java @@ -19,27 +19,29 @@ public interface IntComp { int compare(int a, int b); + int resetValue(); -} -class IntAsc implements IntComp { - public int resetValue() { - return Integer.MAX_VALUE; - } + static class IntAsc implements IntComp { - public int compare(int a, int b) { - return Integer.compare(b, a); + public int resetValue() { + return Integer.MAX_VALUE; + } + + public int compare(int a, int b) { + return Integer.compare(b, a); + } } -} -class IntDesc implements IntComp { + static class IntDesc implements IntComp { - public int resetValue() { - return Integer.MIN_VALUE; - } + public int resetValue() { + return Integer.MIN_VALUE; + } - public int compare(int a, int b) { - return Integer.compare(a, b); + public int compare(int a, int b) { + return Integer.compare(a, b); + } } -} +} \ No newline at end of file diff --git a/solr/core/src/java/org/apache/solr/handler/export/LongCmp.java b/solr/core/src/java/org/apache/solr/handler/export/LongComp.java similarity index 70% rename from solr/core/src/java/org/apache/solr/handler/export/LongCmp.java rename to solr/core/src/java/org/apache/solr/handler/export/LongComp.java index 7d997acba1ab..45a522c23f3d 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/LongCmp.java +++ b/solr/core/src/java/org/apache/solr/handler/export/LongComp.java @@ -19,27 +19,28 @@ interface LongComp { int compare(long a, long b); + long resetValue(); -} -class LongAsc implements LongComp { + static class LongAsc implements LongComp { - public long resetValue() { - return Long.MAX_VALUE; - } + public long resetValue() { + return Long.MAX_VALUE; + } - public int compare(long a, long b) { - return Long.compare(b, a); + public int compare(long a, long b) { + return Long.compare(b, a); + } } -} -class LongDesc implements LongComp { + static class LongDesc implements LongComp { - public long resetValue() { - return Long.MIN_VALUE; - } + public long resetValue() { + return Long.MIN_VALUE; + } - public int compare(long a, long b) { - return Long.compare(a, b); + public int compare(long a, long b) { + return Long.compare(a, b); + } } -} +} \ No newline at end of file diff --git a/solr/core/src/java/org/apache/solr/handler/export/SingleValueSortDoc.java b/solr/core/src/java/org/apache/solr/handler/export/SingleValueSortDoc.java index 963901c40e9a..164c07b6de96 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/SingleValueSortDoc.java +++ b/solr/core/src/java/org/apache/solr/handler/export/SingleValueSortDoc.java @@ -32,6 +32,11 @@ public SortValue getSortValue(String field) { return null; } + @Override + public SortValue[] getSortValues() { + return new SortValue[] { value1 }; + } + public void setNextReader(LeafReaderContext context) throws IOException { this.ord = context.ord; this.docBase = context.docBase; diff --git a/solr/core/src/java/org/apache/solr/handler/export/SortDoc.java b/solr/core/src/java/org/apache/solr/handler/export/SortDoc.java index 5e2c75de0ea1..292e795da4e2 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/SortDoc.java +++ b/solr/core/src/java/org/apache/solr/handler/export/SortDoc.java @@ -45,6 +45,10 @@ public SortValue getSortValue(String field) { return null; } + public SortValue[] getSortValues() { + return sortValues; + } + public void setNextReader(LeafReaderContext context) throws IOException { this.ord = context.ord; this.docBase = context.docBase; diff --git a/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java index c14e4d7770de..b82c365c1df4 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java @@ -72,9 +72,21 @@ public boolean write(SortDoc sortDoc, LeafReader reader, MapWriter.EntryWriter e if (ew instanceof JavaBinCodec.BinEntryWriter) { ew.put(this.field, utf8.reset(ref.bytes, ref.offset, ref.length, null)); } else { - fieldType.indexedToReadable(ref, cref); - String v = cref.toString(); + String v = null; + if(sortValue != null) { + v = ((StringValue) sortValue).getLastString(); + if(v == null) { + fieldType.indexedToReadable(ref, cref); + v = cref.toString(); + ((StringValue) sortValue).setLastString(v); + } + } else { + fieldType.indexedToReadable(ref, cref); + v = cref.toString(); + } + ew.put(this.field, v); + } return true; } diff --git a/solr/core/src/java/org/apache/solr/handler/export/StringValue.java b/solr/core/src/java/org/apache/solr/handler/export/StringValue.java index 5df4eebf81b7..fc7056599f89 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/StringValue.java +++ b/solr/core/src/java/org/apache/solr/handler/export/StringValue.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.OrdinalMap; import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongValues; class StringValue implements SortValue { @@ -40,6 +41,10 @@ class StringValue implements SortValue { protected int lastDocID; private boolean present; + private BytesRef lastBytes; + private String lastString; + private int lastOrd = -1; + public StringValue(SortedDocValues globalDocValues, String field, IntComp comp) { this.globalDocValues = globalDocValues; this.docValues = globalDocValues; @@ -52,6 +57,14 @@ public StringValue(SortedDocValues globalDocValues, String field, IntComp comp) this.present = false; } + public String getLastString() { + return this.lastString; + } + + public void setLastString(String lastString) { + this.lastString = lastString; + } + public StringValue copy() { return new StringValue(globalDocValues, field, comp); } @@ -88,7 +101,12 @@ public void setCurrentValue(SortValue sv) { public Object getCurrentValue() throws IOException { assert present == true; - return docValues.lookupOrd(currentOrd); + if (currentOrd != lastOrd) { + lastBytes = docValues.lookupOrd(currentOrd); + lastOrd = currentOrd; + lastString = null; + } + return lastBytes; } public String getField() { @@ -109,7 +127,7 @@ public void reset() { } public int compareTo(SortValue o) { - StringValue sv = (StringValue)o; + StringValue sv = (StringValue) o; return comp.compare(currentOrd, sv.currentOrd); } diff --git a/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java index aa4b4f988287..5671359f7071 100644 --- a/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java +++ b/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java @@ -133,7 +133,7 @@ private void handleMultiStream(SolrQueryRequest req, SolrQueryResponse rsp, Inpu throws IOException { FastInputStream in = FastInputStream.wrap(stream); SolrParams old = req.getParams(); - new JavaBinCodec() { + try (JavaBinCodec jbc = new JavaBinCodec() { SolrParams params; AddUpdateCommand addCmd = null; @@ -164,7 +164,9 @@ public List readIterator(DataInputInputStream fis) throws IOException { return Collections.emptyList(); } - }.unmarshal(in); + }) { + jbc.unmarshal(in); + } } private AddUpdateCommand getAddCommand(SolrQueryRequest req, SolrParams params) { diff --git a/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java index 70f9cae8987a..2964834244e4 100644 --- a/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java +++ b/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java @@ -219,7 +219,7 @@ private void handleSplitMode(String split, String[] fields, final Reader reader) JsonRecordReader jsonRecordReader = JsonRecordReader.getInst(split, Arrays.asList(fields)); jsonRecordReader.streamRecords(parser, new JsonRecordReader.Handler() { - ArrayList docs = null; + ArrayList> docs = null; @Override public void handle(Map record, String path) { @@ -227,7 +227,7 @@ public void handle(Map record, String path) { if (echo) { if (docs == null) { - docs = new ArrayList(); + docs = new ArrayList<>(); rsp.add("docs", docs); } changeChildDoc(copy); @@ -247,6 +247,7 @@ public void handle(Map record, String path) { }); } + @SuppressWarnings({"unchecked", "rawtypes"}) private SolrInputDocument buildDoc(Map m) { SolrInputDocument result = new SolrInputDocument(); for (Map.Entry e : m.entrySet()) { @@ -272,6 +273,7 @@ private SolrInputDocument buildDoc(Map m) { return result; } + @SuppressWarnings({"unchecked", "rawtypes"}) private Map getDocMap(Map record, JSONParser parser, String srcField, boolean mapUniqueKeyOnly) { Map result = record; if (srcField != null && parser instanceof RecordingJSONParser) { @@ -418,6 +420,7 @@ RollbackUpdateCommand parseRollback() throws IOException { void parseCommitOptions(CommitUpdateCommand cmd) throws IOException { assertNextEvent(JSONParser.OBJECT_START); + @SuppressWarnings({"unchecked"}) final Map map = (Map) ObjectBuilder.getVal(parser); // SolrParams currently expects string values... @@ -583,9 +586,11 @@ private Object parseFieldValue(int ev, String fieldName) throws IOException { } } + @SuppressWarnings({"unchecked"}) private List parseArrayFieldValue(int ev, String fieldName) throws IOException { assert ev == JSONParser.ARRAY_START; + @SuppressWarnings({"rawtypes"}) ArrayList lst = new ArrayList(2); for (; ; ) { ev = parser.nextEvent(); @@ -622,6 +627,7 @@ private boolean isChildDoc(SolrInputDocument extendedFieldValue) { private boolean mapEntryIsChildDoc(Object val) { if(val instanceof List) { + @SuppressWarnings({"rawtypes"}) List listVal = (List) val; if (listVal.size() == 0) return false; return listVal.get(0) instanceof Map; @@ -630,6 +636,7 @@ private boolean mapEntryIsChildDoc(Object val) { } } + @SuppressWarnings({"unchecked", "rawtypes"}) private static Object changeChildDoc(Object o) { if (o instanceof List) { return ((List) o) diff --git a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java index 83178e5d756e..c8eac0b31aa7 100644 --- a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java +++ b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java @@ -385,6 +385,7 @@ void processDelete(SolrQueryRequest req, UpdateRequestProcessor processor, XMLSt * * @since solr 1.3 */ + @SuppressWarnings({"unchecked"}) public SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamException { SolrInputDocument doc = new SolrInputDocument(); @@ -445,6 +446,7 @@ public SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamExcepti } else { // multiple val are present if (val instanceof List) { + @SuppressWarnings({"rawtypes"}) List list = (List) val; list.add(v); } else { diff --git a/solr/core/src/java/org/apache/solr/handler/sql/CalciteSolrDriver.java b/solr/core/src/java/org/apache/solr/handler/sql/CalciteSolrDriver.java index 3a7640de83e0..664cb8af6982 100644 --- a/solr/core/src/java/org/apache/solr/handler/sql/CalciteSolrDriver.java +++ b/solr/core/src/java/org/apache/solr/handler/sql/CalciteSolrDriver.java @@ -19,6 +19,7 @@ import org.apache.calcite.jdbc.CalciteConnection; import org.apache.calcite.jdbc.Driver; import org.apache.calcite.schema.SchemaPlus; +import org.apache.solr.client.solrj.io.SolrClientCache; import java.sql.Connection; import java.sql.SQLException; @@ -32,12 +33,17 @@ public class CalciteSolrDriver extends Driver { public final static String CONNECT_STRING_PREFIX = "jdbc:calcitesolr:"; + public static CalciteSolrDriver INSTANCE = new CalciteSolrDriver(); + + private SolrClientCache solrClientCache; + + private CalciteSolrDriver() { super(); } static { - new CalciteSolrDriver().register(); + INSTANCE.register(); } @Override @@ -59,11 +65,15 @@ public Connection connect(String url, Properties info) throws SQLException { if(schemaName == null) { throw new SQLException("zk must be set"); } - rootSchema.add(schemaName, new SolrSchema(info)); + final SolrSchema solrSchema = new SolrSchema(info, solrClientCache); + rootSchema.add(schemaName, solrSchema); // Set the default schema calciteConnection.setSchema(schemaName); + return calciteConnection; + } - return connection; + public void setSolrClientCache(SolrClientCache solrClientCache) { + this.solrClientCache = solrClientCache; } } diff --git a/solr/core/src/java/org/apache/solr/handler/sql/LimitStream.java b/solr/core/src/java/org/apache/solr/handler/sql/LimitStream.java index 0d4bb72adf42..772f639a762b 100644 --- a/solr/core/src/java/org/apache/solr/handler/sql/LimitStream.java +++ b/solr/core/src/java/org/apache/solr/handler/sql/LimitStream.java @@ -26,9 +26,7 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; class LimitStream extends TupleStream { @@ -79,9 +77,7 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { public Tuple read() throws IOException { ++count; if(count > limit) { - Map fields = new HashMap<>(); - fields.put("EOF", "true"); - return new Tuple(fields); + return Tuple.EOF(); } return stream.read(); diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrAggregate.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrAggregate.java index f207eeb44198..3d6215d2b997 100644 --- a/solr/core/src/java/org/apache/solr/handler/sql/SolrAggregate.java +++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrAggregate.java @@ -89,6 +89,7 @@ public void implement(Implementor implementor) { } } + @SuppressWarnings({"fallthrough"}) private Pair toSolrMetric(Implementor implementor, AggregateCall aggCall, List inNames) { SqlAggFunction aggregation = aggCall.getAggregation(); List args = aggCall.getArgList(); diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java index 8c06f3204c52..cd038f518e48 100644 --- a/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java +++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java @@ -33,6 +33,7 @@ class SolrEnumerator implements Enumerator { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final TupleStream tupleStream; + @SuppressWarnings({"rawtypes"}) private final List> fields; private Tuple current; private char sep = 31; @@ -42,6 +43,7 @@ class SolrEnumerator implements Enumerator { * @param tupleStream Solr TupleStream * @param fields Fields to get from each Tuple */ + @SuppressWarnings({"rawtypes"}) SolrEnumerator(TupleStream tupleStream, List> fields) { this.tupleStream = tupleStream; @@ -72,6 +74,7 @@ public Object current() { } } + @SuppressWarnings({"rawtypes"}) private Object getter(Tuple tuple, Map.Entry field) { Object val = tuple.get(field.getKey()); diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrFilter.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrFilter.java index ce12aece907e..e35a23804e89 100644 --- a/solr/core/src/java/org/apache/solr/handler/sql/SolrFilter.java +++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrFilter.java @@ -97,11 +97,11 @@ private String translateOr(RexNode condition) { } private String translateAnd(RexNode node0) { - List andStrings = new ArrayList(); - List notStrings = new ArrayList(); + List andStrings = new ArrayList<>(); + List notStrings = new ArrayList<>(); - List ands = new ArrayList(); - List nots = new ArrayList(); + List ands = new ArrayList<>(); + List nots = new ArrayList<>(); RelOptUtil.decomposeConjunction(node0, ands, nots); @@ -253,11 +253,11 @@ private String translateOr(RexNode condition) { } private String translateAnd(RexNode node0) { - List andStrings = new ArrayList(); - List notStrings = new ArrayList(); + List andStrings = new ArrayList<>(); + List notStrings = new ArrayList<>(); - List ands = new ArrayList(); - List nots = new ArrayList(); + List ands = new ArrayList<>(); + List nots = new ArrayList<>(); RelOptUtil.decomposeConjunction(node0, ands, nots); diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrMethod.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrMethod.java index b0bf80140b32..d6e12e74fd2b 100644 --- a/solr/core/src/java/org/apache/solr/handler/sql/SolrMethod.java +++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrMethod.java @@ -38,6 +38,7 @@ enum SolrMethod { public final Method method; + @SuppressWarnings({"rawtypes"}) SolrMethod(Class clazz, String methodName, Class... argumentTypes) { this.method = Types.lookupMethod(clazz, methodName, argumentTypes); } diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrSchema.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrSchema.java index b60844217e27..3bf5bd471b1f 100644 --- a/solr/core/src/java/org/apache/solr/handler/sql/SolrSchema.java +++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrSchema.java @@ -16,10 +16,9 @@ */ package org.apache.solr.handler.sql; +import java.io.Closeable; import java.io.IOException; -import java.util.Collections; import java.util.Map; -import java.util.Optional; import java.util.Properties; import java.util.Set; @@ -33,6 +32,7 @@ import org.apache.calcite.sql.type.SqlTypeFactoryImpl; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrClient; +import org.apache.solr.client.solrj.io.SolrClientCache; import org.apache.solr.client.solrj.request.LukeRequest; import org.apache.solr.client.solrj.response.LukeResponse; import org.apache.solr.common.cloud.Aliases; @@ -41,47 +41,59 @@ import com.google.common.collect.ImmutableMap; -class SolrSchema extends AbstractSchema { +class SolrSchema extends AbstractSchema implements Closeable { final Properties properties; + final SolrClientCache solrClientCache; + private volatile boolean isClosed = false; - SolrSchema(Properties properties) { + SolrSchema(Properties properties, SolrClientCache solrClientCache) { super(); this.properties = properties; + this.solrClientCache = solrClientCache; + } + + public SolrClientCache getSolrClientCache() { + return solrClientCache; + } + + @Override + public void close() { + isClosed = true; + } + + public boolean isClosed() { + return isClosed; } @Override protected Map getTableMap() { String zk = this.properties.getProperty("zk"); - try(CloudSolrClient cloudSolrClient = new CloudSolrClient.Builder(Collections.singletonList(zk), Optional.empty()).withSocketTimeout(30000).withConnectionTimeout(15000).build()) { - cloudSolrClient.connect(); - ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader(); - ClusterState clusterState = zkStateReader.getClusterState(); + CloudSolrClient cloudSolrClient = solrClientCache.getCloudSolrClient(zk); + ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader(); + ClusterState clusterState = zkStateReader.getClusterState(); - final ImmutableMap.Builder builder = ImmutableMap.builder(); + final ImmutableMap.Builder builder = ImmutableMap.builder(); - Set collections = clusterState.getCollectionsMap().keySet(); - for (String collection : collections) { - builder.put(collection, new SolrTable(this, collection)); - } + Set collections = clusterState.getCollectionsMap().keySet(); + for (String collection : collections) { + builder.put(collection, new SolrTable(this, collection)); + } - Aliases aliases = zkStateReader.getAliases(); - for (String alias : aliases.getCollectionAliasListMap().keySet()) { - // don't create duplicate entries - if (!collections.contains(alias)) { - builder.put(alias, new SolrTable(this, alias)); - } + Aliases aliases = zkStateReader.getAliases(); + for (String alias : aliases.getCollectionAliasListMap().keySet()) { + // don't create duplicate entries + if (!collections.contains(alias)) { + builder.put(alias, new SolrTable(this, alias)); } - - return builder.build(); - } catch (IOException e) { - throw new RuntimeException(e); } + + return builder.build(); } private Map getFieldInfo(String collection) { String zk = this.properties.getProperty("zk"); - try(CloudSolrClient cloudSolrClient = new CloudSolrClient.Builder(Collections.singletonList(zk), Optional.empty()).withSocketTimeout(30000).withConnectionTimeout(15000).build()) { - cloudSolrClient.connect(); + CloudSolrClient cloudSolrClient = solrClientCache.getCloudSolrClient(zk); + try { LukeRequest lukeRequest = new LukeRequest(); lukeRequest.setNumTerms(0); LukeResponse lukeResponse = lukeRequest.process(cloudSolrClient, collection); diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java index 46b09d219432..55724147831e 100644 --- a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java +++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java @@ -51,7 +51,6 @@ import org.apache.solr.client.solrj.io.stream.metrics.*; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; -import org.apache.solr.handler.StreamHandler; import java.io.IOException; import java.util.*; @@ -98,7 +97,7 @@ private Enumerable query(final Properties properties) { * @return Enumerator of results */ private Enumerable query(final Properties properties, - final List> fields, + @SuppressWarnings("rawtypes") final List> fields, final String query, final List> orders, final List buckets, @@ -160,7 +159,7 @@ private Enumerable query(final Properties properties, } StreamContext streamContext = new StreamContext(); - streamContext.setSolrClientCache(StreamHandler.getClientCache()); + streamContext.setSolrClientCache(schema.getSolrClientCache()); tupleStream.setStreamContext(streamContext); final TupleStream finalStream = tupleStream; @@ -252,6 +251,7 @@ private Metric getMetric(Pair metricPair) { } } + @SuppressWarnings({"rawtypes"}) private TupleStream handleSelect(String zk, String collection, String query, @@ -319,6 +319,7 @@ private String getSingleSort(Pair order) { return buf.toString(); } + @SuppressWarnings({"rawtypes"}) private String getFields(List> fields) { StringBuilder buf = new StringBuilder(); for(Map.Entry field : fields) { @@ -349,8 +350,9 @@ private String getFields(Set fieldSet) { } + @SuppressWarnings({"unchecked", "rawtypes"}) private Set getFieldSet(Metric[] metrics, List> fields) { - HashSet set = new HashSet(); + HashSet set = new HashSet<>(); for(Metric metric : metrics) { for(String column : metric.getColumns()) { set.add(column); @@ -424,6 +426,7 @@ private static boolean sortsEqual(Bucket[] buckets, String direction, List fmap = new HashMap(); + Map fmap = new HashMap<>(); for(Map.Entry entry : fields) { fmap.put(entry.getKey(), entry.getValue()); } @@ -550,6 +553,7 @@ private TupleStream handleGroupByMapReduce(String zk, return tupleStream; } + @SuppressWarnings({"rawtypes"}) private Bucket[] buildBuckets(List buckets, List> fields) { Bucket[] bucketsArray = new Bucket[buckets.size()]; @@ -564,6 +568,7 @@ private Bucket[] buildBuckets(List buckets, List> fields, @@ -575,7 +580,7 @@ private TupleStream handleGroupByFacet(String zkHost, final String havingPredicate) throws IOException { - Map fmap = new HashMap(); + Map fmap = new HashMap<>(); for(Map.Entry f : fields) { fmap.put(f.getKey(), f.getValue()); } @@ -654,6 +659,7 @@ private TupleStream handleGroupByFacet(String zkHost, return tupleStream; } + @SuppressWarnings({"rawtypes"}) private TupleStream handleSelectDistinctMapReduce(final String zkHost, final String collection, final Properties properties, @@ -762,9 +768,9 @@ private TupleStream handleSelectDistinctMapReduce(final String zkHost, private StreamComparator[] adjustSorts(List> orders, Bucket[] buckets) throws IOException { - List adjustedSorts = new ArrayList(); - Set bucketFields = new HashSet(); - Set sortFields = new HashSet(); + List adjustedSorts = new ArrayList<>(); + Set bucketFields = new HashSet<>(); + Set sortFields = new HashSet<>(); ComparatorOrder comparatorOrder = ComparatorOrder.ASCENDING; for(Pair order : orders) { @@ -796,14 +802,15 @@ private StreamComparator[] adjustSorts(List> orders, Bucket return adjustedSorts.toArray(new FieldComparator[adjustedSorts.size()]); } + @SuppressWarnings({"rawtypes"}) private TupleStream handleStats(String zk, String collection, String query, List> metricPairs, - List> fields) { + List> fields) throws IOException { - Map fmap = new HashMap(); + Map fmap = new HashMap<>(); for(Map.Entry entry : fields) { fmap.put(entry.getKey(), entry.getValue()); } @@ -855,7 +862,7 @@ private Properties getProperties() { * * @see SolrMethod#SOLR_QUERYABLE_QUERY */ - @SuppressWarnings("UnusedDeclaration") + @SuppressWarnings({"rawtypes","UnusedDeclaration"}) public Enumerable query(List> fields, String query, List> order, List buckets, List> metricPairs, String limit, String negativeQuery, String havingPredicate) { return getTable().query(getProperties(), fields, query, order, buckets, metricPairs, limit, negativeQuery, havingPredicate); diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrToEnumerableConverter.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrToEnumerableConverter.java index c97303b4a427..c9d8caeb3b43 100644 --- a/solr/core/src/java/org/apache/solr/handler/sql/SolrToEnumerableConverter.java +++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrToEnumerableConverter.java @@ -62,12 +62,14 @@ public Result implement(EnumerableRelImplementor implementor, Prefer pref) { final RelDataType rowType = getRowType(); final PhysType physType = PhysTypeImpl.of(implementor.getTypeFactory(), rowType, pref.prefer(JavaRowFormat.ARRAY)); final Expression table = list.append("table", solrImplementor.table.getExpression(SolrTable.SolrQueryable.class)); + @SuppressWarnings({"rawtypes"}) final Expression fields = list.append("fields", constantArrayList( Pair.zip(generateFields(SolrRules.solrFieldNames(rowType), solrImplementor.fieldMappings), new AbstractList() { @Override + @SuppressWarnings({"rawtypes"}) public Class get(int index) { return physType.fieldClass(index); } @@ -122,6 +124,7 @@ private String getField(Map fieldMappings, String field) { * E.g. {@code constantArrayList("x", "y")} returns * "Arrays.asList('x', 'y')". */ + @SuppressWarnings({"rawtypes"}) private static MethodCallExpression constantArrayList(List values, Class clazz) { return Expressions.call(BuiltInMethod.ARRAYS_AS_LIST.method, Expressions.newArrayInit(clazz, constantList(values))); diff --git a/solr/core/src/java/org/apache/solr/handler/tagger/TaggerRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/tagger/TaggerRequestHandler.java index adc8947b5536..e2a9f90688f5 100644 --- a/solr/core/src/java/org/apache/solr/handler/tagger/TaggerRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/tagger/TaggerRequestHandler.java @@ -47,6 +47,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.Bits; import org.apache.lucene.util.FixedBitSet; @@ -159,6 +160,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw final SolrIndexSearcher searcher = req.getSearcher(); final FixedBitSet matchDocIdsBS = new FixedBitSet(searcher.maxDoc()); + @SuppressWarnings({"rawtypes"}) final List tags = new ArrayList(2000); try { @@ -184,6 +186,7 @@ protected void tagCallback(int startOffset, int endOffset, Object docIdsKey) { endOffset = offsetPair[1]; } + @SuppressWarnings({"rawtypes"}) NamedList tag = new NamedList(); tag.add("startOffset", startOffset); tag.add("endOffset", endOffset); @@ -194,19 +197,20 @@ protected void tagCallback(int startOffset, int endOffset, Object docIdsKey) { tags.add(tag); } + @SuppressWarnings({"rawtypes"}) Map docIdsListCache = new HashMap<>(2000); ValueSourceAccessor uniqueKeyCache = new ValueSourceAccessor(searcher, idSchemaField.getType().getValueSource(idSchemaField, null)); - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) private List lookupSchemaDocIds(Object docIdsKey) { List schemaDocIds = docIdsListCache.get(docIdsKey); if (schemaDocIds != null) return schemaDocIds; IntsRef docIds = lookupDocIds(docIdsKey); //translate lucene docIds to schema ids - schemaDocIds = new ArrayList(docIds.length); + schemaDocIds = new ArrayList<>(docIds.length); for (int i = docIds.offset; i < docIds.offset + docIds.length; i++) { int docId = docIds.ints[i]; assert i == docIds.offset || docIds.ints[i - 1] < docId : "not sorted?"; @@ -284,7 +288,7 @@ private DocList getDocList(int rows, FixedBitSet matchDocIdsBS) throws IOExcepti for (int i = 0; i < docIds.length; i++) { docIds[i] = docIdIter.nextDoc(); } - return new DocSlice(0, docIds.length, docIds, null, matchDocs, 1f); + return new DocSlice(0, docIds.length, docIds, null, matchDocs, 1f, TotalHits.Relation.EQUAL_TO); } private TagClusterReducer chooseTagClusterReducer(String overlaps) { @@ -348,6 +352,7 @@ private boolean fieldHasIndexedStopFilter(String field, SolrQueryRequest req) { static class ValueSourceAccessor { private final List readerContexts; private final ValueSource valueSource; + @SuppressWarnings({"rawtypes"}) private final Map fContext; private final FunctionValues[] functionValuesPerSeg; private final int[] functionValuesDocIdPerSeg; @@ -360,6 +365,7 @@ static class ValueSourceAccessor { functionValuesDocIdPerSeg = new int[readerContexts.size()]; } + @SuppressWarnings({"unchecked"}) Object objectVal(int topDocId) throws IOException { // lookup segment level stuff: int segIdx = ReaderUtil.subIndex(topDocId, readerContexts); diff --git a/solr/core/src/java/org/apache/solr/handler/tagger/TermPrefixCursor.java b/solr/core/src/java/org/apache/solr/handler/tagger/TermPrefixCursor.java index 1e82dbe4b5b6..47561112961d 100644 --- a/solr/core/src/java/org/apache/solr/handler/tagger/TermPrefixCursor.java +++ b/solr/core/src/java/org/apache/solr/handler/tagger/TermPrefixCursor.java @@ -107,6 +107,7 @@ private void ensureBufIsACopy() { /** Seeks to prefixBuf or the next term that is prefixed by prefixBuf plus the separator char. * Sets docIds. **/ + @SuppressWarnings({"fallthrough"}) private boolean seekPrefix() throws IOException { TermsEnum.SeekStatus seekStatus = termsEnum.seekCeil(prefixBuf); diff --git a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java index a37543c51a94..3f05eceb83a3 100644 --- a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java +++ b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java @@ -88,15 +88,14 @@ * * @since solr 1.3 */ -public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInfoInitialized -{ +public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInfoInitialized { - /** - * This constant was formerly part of HighlightParams. After deprecation it was removed so clients + /** + * This constant was formerly part of HighlightParams. After deprecation it was removed so clients * would no longer use it, but we still support it server side. */ private static final String USE_FVH = HighlightParams.HIGHLIGHT + ".useFastVectorHighlighter"; - + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); protected final SolrCore solrCore; @@ -107,28 +106,28 @@ public DefaultSolrHighlighter(SolrCore solrCore) { } // Thread safe registry - protected final Map formatters = - new HashMap<>(); + protected final Map formatters = + new HashMap<>(); // Thread safe registry - protected final Map encoders = - new HashMap<>(); + protected final Map encoders = + new HashMap<>(); // Thread safe registry - protected final Map fragmenters = - new HashMap<>() ; + protected final Map fragmenters = + new HashMap<>(); // Thread safe registry protected final Map fragListBuilders = - new HashMap<>() ; + new HashMap<>(); // Thread safe registry protected final Map fragmentsBuilders = - new HashMap<>() ; + new HashMap<>(); // Thread safe registry protected final Map boundaryScanners = - new HashMap<>() ; + new HashMap<>(); @Override public void init(PluginInfo info) { @@ -140,7 +139,7 @@ public void init(PluginInfo info) { boundaryScanners.clear(); // Load the fragmenters - SolrFragmenter frag = solrCore.initPlugins(info.getChildren("fragmenter") , fragmenters,SolrFragmenter.class,null); + SolrFragmenter frag = solrCore.initPlugins(info.getChildren("fragmenter"), fragmenters, SolrFragmenter.class, null); if (frag == null) { frag = new GapFragmenter(); solrCore.initDefaultPlugin(frag, SolrFragmenter.class); @@ -149,7 +148,7 @@ public void init(PluginInfo info) { fragmenters.put(null, frag); // Load the formatters - SolrFormatter fmt = solrCore.initPlugins(info.getChildren("formatter"), formatters,SolrFormatter.class,null); + SolrFormatter fmt = solrCore.initPlugins(info.getChildren("formatter"), formatters, SolrFormatter.class, null); if (fmt == null) { fmt = new HtmlFormatter(); solrCore.initDefaultPlugin(fmt, SolrFormatter.class); @@ -158,7 +157,7 @@ public void init(PluginInfo info) { formatters.put(null, fmt); // Load the encoders - SolrEncoder enc = solrCore.initPlugins(info.getChildren("encoder"), encoders,SolrEncoder.class,null); + SolrEncoder enc = solrCore.initPlugins(info.getChildren("encoder"), encoders, SolrEncoder.class, null); if (enc == null) { enc = new DefaultEncoder(); solrCore.initDefaultPlugin(enc, SolrEncoder.class); @@ -168,28 +167,28 @@ public void init(PluginInfo info) { // Load the FragListBuilders SolrFragListBuilder fragListBuilder = solrCore.initPlugins(info.getChildren("fragListBuilder"), - fragListBuilders, SolrFragListBuilder.class, null ); - if( fragListBuilder == null ) { + fragListBuilders, SolrFragListBuilder.class, null); + if (fragListBuilder == null) { fragListBuilder = new SimpleFragListBuilder(); solrCore.initDefaultPlugin(fragListBuilder, SolrFragListBuilder.class); } - fragListBuilders.put( "", fragListBuilder ); - fragListBuilders.put( null, fragListBuilder ); + fragListBuilders.put("", fragListBuilder); + fragListBuilders.put(null, fragListBuilder); // Load the FragmentsBuilders SolrFragmentsBuilder fragsBuilder = solrCore.initPlugins(info.getChildren("fragmentsBuilder"), - fragmentsBuilders, SolrFragmentsBuilder.class, null); - if( fragsBuilder == null ) { + fragmentsBuilders, SolrFragmentsBuilder.class, null); + if (fragsBuilder == null) { fragsBuilder = new ScoreOrderFragmentsBuilder(); solrCore.initDefaultPlugin(fragsBuilder, SolrFragmentsBuilder.class); } - fragmentsBuilders.put( "", fragsBuilder ); - fragmentsBuilders.put( null, fragsBuilder ); + fragmentsBuilders.put("", fragsBuilder); + fragmentsBuilders.put(null, fragsBuilder); // Load the BoundaryScanners SolrBoundaryScanner boundaryScanner = solrCore.initPlugins(info.getChildren("boundaryScanner"), - boundaryScanners, SolrBoundaryScanner.class, null); - if(boundaryScanner == null) { + boundaryScanners, SolrBoundaryScanner.class, null); + if (boundaryScanner == null) { boundaryScanner = new SimpleBoundaryScanner(); solrCore.initDefaultPlugin(boundaryScanner, SolrBoundaryScanner.class); } @@ -200,9 +199,10 @@ public void init(PluginInfo info) { /** * Return a phrase {@link org.apache.lucene.search.highlight.Highlighter} appropriate for this field. - * @param query The current Query - * @param fieldName The name of the field - * @param request The current SolrQueryRequest + * + * @param query The current Query + * @param fieldName The name of the field + * @param request The current SolrQueryRequest * @param tokenStream document text tokenStream that implements reset() efficiently (e.g. CachingTokenFilter). * If it's used, call reset() first. * @throws IOException If there is a low-level I/O error. @@ -210,9 +210,9 @@ public void init(PluginInfo info) { protected Highlighter getPhraseHighlighter(Query query, String fieldName, SolrQueryRequest request, TokenStream tokenStream) throws IOException { SolrParams params = request.getParams(); Highlighter highlighter = new Highlighter( - getFormatter(fieldName, params), - getEncoder(fieldName, params), - getSpanQueryScorer(query, fieldName, tokenStream, request)); + getFormatter(fieldName, params), + getEncoder(fieldName, params), + getSpanQueryScorer(query, fieldName, tokenStream, request)); highlighter.setTextFragmenter(getFragmenter(fieldName, params)); @@ -221,31 +221,33 @@ protected Highlighter getPhraseHighlighter(Query query, String fieldName, SolrQu /** * Return a {@link org.apache.lucene.search.highlight.Highlighter} appropriate for this field. - * @param query The current Query + * + * @param query The current Query * @param fieldName The name of the field - * @param request The current SolrQueryRequest + * @param request The current SolrQueryRequest */ protected Highlighter getHighlighter(Query query, String fieldName, SolrQueryRequest request) { SolrParams params = request.getParams(); Highlighter highlighter = new Highlighter( - getFormatter(fieldName, params), - getEncoder(fieldName, params), - getQueryScorer(query, fieldName, request)); + getFormatter(fieldName, params), + getEncoder(fieldName, params), + getQueryScorer(query, fieldName, request)); highlighter.setTextFragmenter(getFragmenter(fieldName, params)); return highlighter; } /** * Return a {@link org.apache.lucene.search.highlight.QueryScorer} suitable for this Query and field. - * @param query The current query + * + * @param query The current query * @param tokenStream document text tokenStream that implements reset() efficiently (e.g. CachingTokenFilter). * If it's used, call reset() first. - * @param fieldName The name of the field - * @param request The SolrQueryRequest + * @param fieldName The name of the field + * @param request The SolrQueryRequest */ protected QueryScorer getSpanQueryScorer(Query query, String fieldName, TokenStream tokenStream, SolrQueryRequest request) { QueryScorer scorer = new QueryScorer(query, - request.getParams().getFieldBool(fieldName, HighlightParams.FIELD_MATCH, false) ? fieldName : null) { + request.getParams().getFieldBool(fieldName, HighlightParams.FIELD_MATCH, false) ? fieldName : null) { @Override protected WeightedSpanTermExtractor newTermExtractor(String defaultField) { return new CustomSpanTermExtractor(defaultField); @@ -288,9 +290,10 @@ protected void extract(Query query, float boost, Map t /** * Return a {@link org.apache.lucene.search.highlight.Scorer} suitable for this Query and field. - * @param query The current query + * + * @param query The current query * @param fieldName The name of the field - * @param request The SolrQueryRequest + * @param request The SolrQueryRequest */ protected Scorer getQueryScorer(Query query, String fieldName, SolrQueryRequest request) { boolean reqFieldMatch = request.getParams().getFieldBool(fieldName, HighlightParams.FIELD_MATCH, false); @@ -305,8 +308,9 @@ protected Scorer getQueryScorer(Query query, String fieldName, SolrQueryRequest * Return the max number of snippets for this field. If this has not * been configured for this field, fall back to the configured default * or the solr default. + * * @param fieldName The name of the field - * @param params The params controlling Highlighting + * @param params The params controlling Highlighting */ protected int getMaxSnippets(String fieldName, SolrParams params) { return params.getFieldInt(fieldName, HighlightParams.SNIPPETS, 1); @@ -314,10 +318,11 @@ protected int getMaxSnippets(String fieldName, SolrParams params) { /** * Return whether adjacent fragments should be merged. + * * @param fieldName The name of the field - * @param params The params controlling Highlighting + * @param params The params controlling Highlighting */ - protected boolean isMergeContiguousFragments(String fieldName, SolrParams params){ + protected boolean isMergeContiguousFragments(String fieldName, SolrParams params) { return params.getFieldBool(fieldName, HighlightParams.MERGE_CONTIGUOUS_FRAGMENTS, false); } @@ -327,15 +332,14 @@ protected boolean isMergeContiguousFragments(String fieldName, SolrParams params * default or the solr default ({@link org.apache.lucene.search.highlight.SimpleHTMLFormatter}). * * @param fieldName The name of the field - * @param params The params controlling Highlighting + * @param params The params controlling Highlighting * @return An appropriate {@link org.apache.lucene.search.highlight.Formatter}. */ - protected Formatter getFormatter(String fieldName, SolrParams params ) - { - String str = params.getFieldParam( fieldName, HighlightParams.FORMATTER ); + protected Formatter getFormatter(String fieldName, SolrParams params) { + String str = params.getFieldParam(fieldName, HighlightParams.FORMATTER); SolrFormatter formatter = formatters.get(str); - if( formatter == null ) { - throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown formatter: "+str ); + if (formatter == null) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown formatter: " + str); } return formatter.getFormatter(fieldName, params); } @@ -346,14 +350,14 @@ protected Formatter getFormatter(String fieldName, SolrParams params ) * default or the solr default ({@link org.apache.lucene.search.highlight.DefaultEncoder}). * * @param fieldName The name of the field - * @param params The params controlling Highlighting + * @param params The params controlling Highlighting * @return An appropriate {@link org.apache.lucene.search.highlight.Encoder}. */ - protected Encoder getEncoder(String fieldName, SolrParams params){ - String str = params.getFieldParam( fieldName, HighlightParams.ENCODER ); - SolrEncoder encoder = encoders.get( str ); - if( encoder == null ) { - throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown encoder: "+str ); + protected Encoder getEncoder(String fieldName, SolrParams params) { + String str = params.getFieldParam(fieldName, HighlightParams.ENCODER); + SolrEncoder encoder = encoders.get(str); + if (encoder == null) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown encoder: " + str); } return encoder.getEncoder(fieldName, params); } @@ -364,46 +368,45 @@ protected Encoder getEncoder(String fieldName, SolrParams params){ * default or the solr default ({@link GapFragmenter}). * * @param fieldName The name of the field - * @param params The params controlling Highlighting + * @param params The params controlling Highlighting * @return An appropriate {@link org.apache.lucene.search.highlight.Fragmenter}. */ - protected Fragmenter getFragmenter(String fieldName, SolrParams params) - { - String fmt = params.getFieldParam( fieldName, HighlightParams.FRAGMENTER ); + protected Fragmenter getFragmenter(String fieldName, SolrParams params) { + String fmt = params.getFieldParam(fieldName, HighlightParams.FRAGMENTER); SolrFragmenter frag = fragmenters.get(fmt); - if( frag == null ) { - throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown fragmenter: "+fmt ); + if (frag == null) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown fragmenter: " + fmt); } return frag.getFragmenter(fieldName, params); } - protected FragListBuilder getFragListBuilder( String fieldName, SolrParams params ){ - String flb = params.getFieldParam( fieldName, HighlightParams.FRAG_LIST_BUILDER ); + protected FragListBuilder getFragListBuilder(String fieldName, SolrParams params) { + String flb = params.getFieldParam(fieldName, HighlightParams.FRAG_LIST_BUILDER); SolrFragListBuilder solrFlb = fragListBuilders.get(flb); - if( solrFlb == null ){ - throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown fragListBuilder: " + flb ); + if (solrFlb == null) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown fragListBuilder: " + flb); } return solrFlb.getFragListBuilder(params); } - protected FragmentsBuilder getFragmentsBuilder( String fieldName, SolrParams params ){ + protected FragmentsBuilder getFragmentsBuilder(String fieldName, SolrParams params) { BoundaryScanner bs = getBoundaryScanner(fieldName, params); - return getSolrFragmentsBuilder( fieldName, params ).getFragmentsBuilder(params, bs); + return getSolrFragmentsBuilder(fieldName, params).getFragmentsBuilder(params, bs); } - protected SolrFragmentsBuilder getSolrFragmentsBuilder( String fieldName, SolrParams params ){ - String fb = params.getFieldParam( fieldName, HighlightParams.FRAGMENTS_BUILDER ); + protected SolrFragmentsBuilder getSolrFragmentsBuilder(String fieldName, SolrParams params) { + String fb = params.getFieldParam(fieldName, HighlightParams.FRAGMENTS_BUILDER); SolrFragmentsBuilder solrFb = fragmentsBuilders.get(fb); - if( solrFb == null ){ - throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown fragmentsBuilder: " + fb ); + if (solrFb == null) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown fragmentsBuilder: " + fb); } return solrFb; } - protected BoundaryScanner getBoundaryScanner(String fieldName, SolrParams params){ + protected BoundaryScanner getBoundaryScanner(String fieldName, SolrParams params) { String bs = params.getFieldParam(fieldName, HighlightParams.BOUNDARY_SCANNER); SolrBoundaryScanner solrBs = boundaryScanners.get(bs); - if(solrBs == null){ + if (solrBs == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown boundaryScanner: " + bs); } return solrBs.getBoundaryScanner(fieldName, params); @@ -413,12 +416,11 @@ protected BoundaryScanner getBoundaryScanner(String fieldName, SolrParams params * Generates a list of Highlighted query fragments for each item in a list * of documents, or returns null if highlighting is disabled. * - * @param docs query results - * @param query the query - * @param req the current request + * @param docs query results + * @param query the query + * @param req the current request * @param defaultFields default list of fields to summarize - * - * @return NamedList containing a NamedList for each document, which in + * @return NamedList containing a NamedList for each document, which in * turns contains sets (field, summary) pairs. */ @Override @@ -429,7 +431,7 @@ public NamedList doHighlighting(DocList docs, Query query, SolrQueryRequ return null; boolean rewrite = query != null && !(Boolean.valueOf(params.get(HighlightParams.USE_PHRASE_HIGHLIGHTER, "true")) && - Boolean.valueOf(params.get(HighlightParams.HIGHLIGHT_MULTI_TERM, "true"))); + Boolean.valueOf(params.get(HighlightParams.HIGHLIGHT_MULTI_TERM, "true"))); if (rewrite) { query = query.rewrite(req.getSearcher().getIndexReader()); @@ -459,6 +461,7 @@ public NamedList doHighlighting(DocList docs, Query query, SolrQueryRequ IndexReader reader = new TermVectorReusingLeafReader(req.getSearcher().getSlowAtomicReader()); // SOLR-5855 // Highlight each document + @SuppressWarnings({"rawtypes"}) NamedList fragments = new SimpleOrderedMap(); DocIterator iterator = docs.iterator(); for (int i = 0; i < docs.size(); i++) { @@ -499,10 +502,10 @@ protected Object doHighlightingOfField(SolrDocument doc, int docId, SchemaField } else if (useFastVectorHighlighter(params, schemaField)) { if (fvhContainer.fieldQuery == null) { FastVectorHighlighter fvh = new FastVectorHighlighter( - // FVH cannot process hl.usePhraseHighlighter parameter per-field basis - params.getBool(HighlightParams.USE_PHRASE_HIGHLIGHTER, true), - // FVH cannot process hl.requireFieldMatch parameter per-field basis - params.getBool(HighlightParams.FIELD_MATCH, false)) { + // FVH cannot process hl.usePhraseHighlighter parameter per-field basis + params.getBool(HighlightParams.USE_PHRASE_HIGHLIGHTER, true), + // FVH cannot process hl.requireFieldMatch parameter per-field basis + params.getBool(HighlightParams.FIELD_MATCH, false)) { @Override public FieldQuery getFieldQuery(Query query, IndexReader reader) throws IOException { return new FieldQuery(query, reader, phraseHighlight, fieldMatch) { @@ -525,7 +528,7 @@ protected void flatten(Query sourceQuery, IndexReader reader, Collection fvhContainer.fieldQuery = fvh.getFieldQuery(query, reader); } fieldHighlights = - doHighlightingByFastVectorHighlighter(doc, docId, schemaField, fvhContainer, reader, req); + doHighlightingByFastVectorHighlighter(doc, docId, schemaField, fvhContainer, reader, req); } else { // standard/default highlighter fieldHighlights = doHighlightingByHighlighter(doc, docId, schemaField, query, reader, req); } @@ -554,14 +557,14 @@ protected Set getDocPrefetchFieldNames(String[] hlFieldNames, SolrQueryR */ protected boolean useFastVectorHighlighter(SolrParams params, SchemaField schemaField) { boolean methodFvh = - HighlightComponent.HighlightMethod.FAST_VECTOR.getMethodName().equals( - params.getFieldParam(schemaField.getName(), HighlightParams.METHOD)) - || params.getFieldBool(schemaField.getName(), USE_FVH, false); + HighlightComponent.HighlightMethod.FAST_VECTOR.getMethodName().equals( + params.getFieldParam(schemaField.getName(), HighlightParams.METHOD)) + || params.getFieldBool(schemaField.getName(), USE_FVH, false); if (!methodFvh) return false; boolean termPosOff = schemaField.storeTermPositions() && schemaField.storeTermOffsets(); if (!termPosOff) { log.warn("Solr will use the standard Highlighter instead of FastVectorHighlighter because the {} field {}" - , "does not store TermVectors with TermPositions and TermOffsets.", schemaField.getName()); + , "does not store TermVectors with TermPositions and TermOffsets.", schemaField.getName()); } return termPosOff; } @@ -575,15 +578,15 @@ protected Object doHighlightingByFastVectorHighlighter(SolrDocument doc, int doc String fieldName = schemaField.getName(); SolrFragmentsBuilder solrFb = getSolrFragmentsBuilder(fieldName, params); - String[] snippets = fvhContainer.fvh.getBestFragments( fvhContainer.fieldQuery, reader, docId, fieldName, - params.getFieldInt( fieldName, HighlightParams.FRAGSIZE, 100 ), - params.getFieldInt( fieldName, HighlightParams.SNIPPETS, 1 ), - getFragListBuilder( fieldName, params ), - getFragmentsBuilder( fieldName, params ), - solrFb.getPreTags( params, fieldName ), - solrFb.getPostTags( params, fieldName ), - getEncoder( fieldName, params ) ); - if (snippets != null && snippets.length > 0 ) + String[] snippets = fvhContainer.fvh.getBestFragments(fvhContainer.fieldQuery, reader, docId, fieldName, + params.getFieldInt(fieldName, HighlightParams.FRAGSIZE, 100), + params.getFieldInt(fieldName, HighlightParams.SNIPPETS, 1), + getFragListBuilder(fieldName, params), + getFragmentsBuilder(fieldName, params), + solrFb.getPreTags(params, fieldName), + solrFb.getPostTags(params, fieldName), + getEncoder(fieldName, params)); + if (snippets != null && snippets.length > 0) return snippets; return null; } @@ -596,18 +599,18 @@ protected Object doHighlightingByHighlighter(SolrDocument doc, int docId, Schema final String fieldName = schemaField.getName(); final int mvToExamine = - params.getFieldInt(fieldName, HighlightParams.MAX_MULTIVALUED_TO_EXAMINE, - (schemaField.multiValued()) ? Integer.MAX_VALUE : 1); + params.getFieldInt(fieldName, HighlightParams.MAX_MULTIVALUED_TO_EXAMINE, + (schemaField.multiValued()) ? Integer.MAX_VALUE : 1); // Technically this is the max *fragments* (snippets), not max values: int mvToMatch = - params.getFieldInt(fieldName, HighlightParams.MAX_MULTIVALUED_TO_MATCH, Integer.MAX_VALUE); + params.getFieldInt(fieldName, HighlightParams.MAX_MULTIVALUED_TO_MATCH, Integer.MAX_VALUE); if (mvToExamine <= 0 || mvToMatch <= 0) { return null; } int maxCharsToAnalyze = params.getFieldInt(fieldName, - HighlightParams.MAX_CHARS, DEFAULT_MAX_CHARS); + HighlightParams.MAX_CHARS, DEFAULT_MAX_CHARS); if (maxCharsToAnalyze < 0) {//e.g. -1 maxCharsToAnalyze = Integer.MAX_VALUE; } @@ -629,7 +632,7 @@ protected Object doHighlightingByHighlighter(SolrDocument doc, int docId, Schema // note: offsets are minimally sufficient for this HL. final Fields tvFields = schemaField.storeTermOffsets() ? reader.getTermVectors(docId) : null; final TokenStream tvStream = - TokenSources.getTermVectorTokenStreamOrNull(fieldName, tvFields, maxCharsToAnalyze - 1); + TokenSources.getTermVectorTokenStreamOrNull(fieldName, tvFields, maxCharsToAnalyze - 1); // We need to wrap in OffsetWindowTokenFilter if multi-valued try (OffsetWindowTokenFilter tvWindowStream = (tvStream != null && fieldValues.size() > 1) ? new OffsetWindowTokenFilter(tvStream) : null) { @@ -686,7 +689,7 @@ protected Object doHighlightingByHighlighter(SolrDocument doc, int docId, Schema // Highlight! try { TextFragment[] bestTextFragments = - highlighter.getBestTextFragments(tstream, thisText, mergeContiguousFragments, numFragments); + highlighter.getBestTextFragments(tstream, thisText, mergeContiguousFragments, numFragments); for (TextFragment bestTextFragment : bestTextFragments) { if (bestTextFragment == null)//can happen via mergeContiguousFragments continue; @@ -719,7 +722,8 @@ protected Object doHighlightingByHighlighter(SolrDocument doc, int docId, Schema return null;//no highlights for this field } - /** Fetches field values to highlight. If the field value should come from an atypical place (or another aliased + /** + * Fetches field values to highlight. If the field value should come from an atypical place (or another aliased * field name, then a subclass could override to implement that. */ protected List getFieldValues(SolrDocument doc, String fieldName, int maxValues, int maxCharsToAnalyze, @@ -734,7 +738,7 @@ protected List getFieldValues(SolrDocument doc, String fieldName, int ma for (Object value : fieldValues) { String strValue; if (value instanceof IndexableField) { - strValue = fieldType.toExternal((IndexableField)value); + strValue = fieldType.toExternal((IndexableField) value); } else { strValue = value.toString(); // TODO FieldType needs an API for this, e.g. toExternalFromDv() } @@ -749,7 +753,8 @@ protected List getFieldValues(SolrDocument doc, String fieldName, int ma return result; } - /** Given the fragments, return the result to be put in the field {@link NamedList}. This is an extension + /** + * Given the fragments, return the result to be put in the field {@link NamedList}. This is an extension * point to allow adding other metadata like the absolute offsets or scores. */ protected Object getResponseForFragments(List frags, SolrQueryRequest req) { @@ -782,7 +787,7 @@ protected Object alternateField(SolrDocument doc, int docId, String fieldName, F invariants.put("f." + alternateField + "." + HighlightParams.SNIPPETS, "1"); // Enforce maxAlternateFieldLength by FRAGSIZE. Minimum 18 due to FVH limitations invariants.put("f." + alternateField + "." + HighlightParams.FRAGSIZE, - alternateFieldLen > 0 ? String.valueOf(Math.max(18, alternateFieldLen)) : String.valueOf(Integer.MAX_VALUE)); + alternateFieldLen > 0 ? String.valueOf(Math.max(18, alternateFieldLen)) : String.valueOf(Integer.MAX_VALUE)); SolrParams origParams = req.getParams(); req.setParams(SolrParams.wrapDefaults(new MapSolrParams(invariants), origParams)); fieldHighlights = doHighlightingOfField(doc, docId, schemaField, fvhContainer, query, reader, req, params); @@ -809,15 +814,15 @@ protected Object alternateField(SolrDocument doc, int docId, String fieldName, F Encoder encoder = getEncoder(fieldName, params); List altList = new ArrayList<>(); int len = 0; - for( String altText: altTexts ){ - if( alternateFieldLen <= 0 ){ + for (String altText : altTexts) { + if (alternateFieldLen <= 0) { altList.add(encoder.encodeText(altText)); - } else{ - altList.add( len + altText.length() > alternateFieldLen ? - encoder.encodeText(altText.substring(0, alternateFieldLen - len)) : - encoder.encodeText(altText) ); + } else { + altList.add(len + altText.length() > alternateFieldLen ? + encoder.encodeText(altText.substring(0, alternateFieldLen - len)) : + encoder.encodeText(altText)); len += altText.length(); - if( len >= alternateFieldLen ) break; + if (len >= alternateFieldLen) break; } } return altList; @@ -838,169 +843,172 @@ public FvhContainer(FastVectorHighlighter fvh, FieldQuery fieldQuery) { this.fieldQuery = fieldQuery; } } -} -/** Orders Tokens in a window first by their startOffset ascending. - * endOffset is currently ignored. - * This is meant to work around fickleness in the highlighter only. It - * can mess up token positions and should not be used for indexing or querying. - */ -final class TokenOrderingFilter extends TokenFilter { - private final int windowSize; - private final LinkedList queue = new LinkedList<>(); //TODO replace with Deque, Array impl - private boolean done=false; - private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); - - protected TokenOrderingFilter(TokenStream input, int windowSize) { - super(input); - this.windowSize = windowSize; - } - @Override - public void reset() throws IOException { - super.reset(); - queue.clear(); - done = false; - } + /** + * Orders Tokens in a window first by their startOffset ascending. + * endOffset is currently ignored. + * This is meant to work around fickleness in the highlighter only. It + * can mess up token positions and should not be used for indexing or querying. + */ + static final class TokenOrderingFilter extends TokenFilter { + private final int windowSize; + private final LinkedList queue = new LinkedList<>(); //TODO replace with Deque, Array impl + private boolean done = false; + private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); - @Override - public boolean incrementToken() throws IOException { - while (!done && queue.size() < windowSize) { - if (!input.incrementToken()) { - done = true; - break; - } + protected TokenOrderingFilter(TokenStream input, int windowSize) { + super(input); + this.windowSize = windowSize; + } + + @Override + public void reset() throws IOException { + super.reset(); + queue.clear(); + done = false; + } - // reverse iterating for better efficiency since we know the - // list is already sorted, and most token start offsets will be too. - ListIterator iter = queue.listIterator(queue.size()); - while(iter.hasPrevious()) { - if (offsetAtt.startOffset() >= iter.previous().startOffset) { - // insertion will be before what next() would return (what - // we just compared against), so move back one so the insertion - // will be after. - iter.next(); + @Override + public boolean incrementToken() throws IOException { + while (!done && queue.size() < windowSize) { + if (!input.incrementToken()) { + done = true; break; } + + // reverse iterating for better efficiency since we know the + // list is already sorted, and most token start offsets will be too. + ListIterator iter = queue.listIterator(queue.size()); + while (iter.hasPrevious()) { + if (offsetAtt.startOffset() >= iter.previous().startOffset) { + // insertion will be before what next() would return (what + // we just compared against), so move back one so the insertion + // will be after. + iter.next(); + break; + } + } + OrderedToken ot = new OrderedToken(); + ot.state = captureState(); + ot.startOffset = offsetAtt.startOffset(); + iter.add(ot); } - OrderedToken ot = new OrderedToken(); - ot.state = captureState(); - ot.startOffset = offsetAtt.startOffset(); - iter.add(ot); - } - if (queue.isEmpty()) { - return false; - } else { - restoreState(queue.removeFirst().state); - return true; + if (queue.isEmpty()) { + return false; + } else { + restoreState(queue.removeFirst().state); + return true; + } } - } -} - -// for TokenOrderingFilter, so it can easily sort by startOffset -class OrderedToken { - State state; - int startOffset; -} + } -/** For use with term vectors of multi-valued fields. We want an offset based window into its TokenStream. */ -final class OffsetWindowTokenFilter extends TokenFilter { + // for TokenOrderingFilter, so it can easily sort by startOffset + static class OrderedToken { + State state; + int startOffset; + } - private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); - private final PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class); - private int windowStartOffset; - private int windowEndOffset = -1;//exclusive - private boolean windowTokenIncremented = false; - private boolean inputWasReset = false; - private State capturedState;//only used for first token of each subsequent window + /** For use with term vectors of multi-valued fields. We want an offset based window into its TokenStream. */ + static final class OffsetWindowTokenFilter extends TokenFilter { - OffsetWindowTokenFilter(TokenStream input) {//input should not have been reset already - super(input); - } + private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); + private final PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class); + private int windowStartOffset; + private int windowEndOffset = -1;//exclusive + private boolean windowTokenIncremented = false; + private boolean inputWasReset = false; + private State capturedState;//only used for first token of each subsequent window - //Called at the start of each value/window - OffsetWindowTokenFilter advanceToNextWindowOfLength(int length) { - windowStartOffset = windowEndOffset + 1;//unclear why there's a single offset gap between values, but tests show it - windowEndOffset = windowStartOffset + length; - windowTokenIncremented = false;//thereby permit reset() - return this; - } + OffsetWindowTokenFilter(TokenStream input) {//input should not have been reset already + super(input); + } - @Override - public void reset() throws IOException { - //we do some state checking to ensure this is being used correctly - if (windowTokenIncremented) { - throw new IllegalStateException("This TokenStream does not support being subsequently reset()"); + //Called at the start of each value/window + OffsetWindowTokenFilter advanceToNextWindowOfLength(int length) { + windowStartOffset = windowEndOffset + 1;//unclear why there's a single offset gap between values, but tests show it + windowEndOffset = windowStartOffset + length; + windowTokenIncremented = false;//thereby permit reset() + return this; } - if (!inputWasReset) { - super.reset(); - inputWasReset = true; + + @Override + public void reset() throws IOException { + //we do some state checking to ensure this is being used correctly + if (windowTokenIncremented) { + throw new IllegalStateException("This TokenStream does not support being subsequently reset()"); + } + if (!inputWasReset) { + super.reset(); + inputWasReset = true; + } } - } - @Override - public boolean incrementToken() throws IOException { - assert inputWasReset; - windowTokenIncremented = true; - while (true) { - //increment Token - if (capturedState == null) { - if (!input.incrementToken()) { - return false; + @Override + public boolean incrementToken() throws IOException { + assert inputWasReset; + windowTokenIncremented = true; + while (true) { + //increment Token + if (capturedState == null) { + if (!input.incrementToken()) { + return false; + } + } else { + restoreState(capturedState); + capturedState = null; + //Set posInc to 1 on first token of subsequent windows. To be thorough, we could subtract posIncGap? + posIncAtt.setPositionIncrement(1); } - } else { - restoreState(capturedState); - capturedState = null; - //Set posInc to 1 on first token of subsequent windows. To be thorough, we could subtract posIncGap? - posIncAtt.setPositionIncrement(1); - } - final int startOffset = offsetAtt.startOffset(); - final int endOffset = offsetAtt.endOffset(); - if (startOffset >= windowEndOffset) {//end of window - capturedState = captureState(); - return false; - } - if (startOffset >= windowStartOffset) {//in this window - offsetAtt.setOffset(startOffset - windowStartOffset, endOffset - windowStartOffset); - return true; + final int startOffset = offsetAtt.startOffset(); + final int endOffset = offsetAtt.endOffset(); + if (startOffset >= windowEndOffset) {//end of window + capturedState = captureState(); + return false; + } + if (startOffset >= windowStartOffset) {//in this window + offsetAtt.setOffset(startOffset - windowStartOffset, endOffset - windowStartOffset); + return true; + } + //otherwise this token is before the window; continue to advance } - //otherwise this token is before the window; continue to advance } } -} -/** Wraps a DirectoryReader that caches the {@link LeafReader#getTermVectors(int)} so that - * if the next call has the same ID, then it is reused. - */ -class TermVectorReusingLeafReader extends FilterLeafReader { + /** + * Wraps a DirectoryReader that caches the {@link LeafReader#getTermVectors(int)} so that + * if the next call has the same ID, then it is reused. + */ + static class TermVectorReusingLeafReader extends FilterLeafReader { - private int lastDocId = -1; - private Fields tvFields; + private int lastDocId = -1; + private Fields tvFields; - public TermVectorReusingLeafReader(LeafReader in) { - super(in); - } + public TermVectorReusingLeafReader(LeafReader in) { + super(in); + } - @Override - public Fields getTermVectors(int docID) throws IOException { - if (docID != lastDocId) { - lastDocId = docID; - tvFields = in.getTermVectors(docID); + @Override + public Fields getTermVectors(int docID) throws IOException { + if (docID != lastDocId) { + lastDocId = docID; + tvFields = in.getTermVectors(docID); + } + return tvFields; } - return tvFields; - } - @Override - public CacheHelper getCoreCacheHelper() { - return null; - } + @Override + public CacheHelper getCoreCacheHelper() { + return null; + } - @Override - public CacheHelper getReaderCacheHelper() { - return null; - } + @Override + public CacheHelper getReaderCacheHelper() { + return null; + } + } } diff --git a/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java b/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java index 5c41b13f8d39..1e7b0d659154 100644 --- a/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java +++ b/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java @@ -36,7 +36,7 @@ public abstract class HighlightingPluginBase implements SolrInfoBean protected Set metricNames = ConcurrentHashMap.newKeySet(1); protected SolrMetricsContext solrMetricsContext; - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { if( args != null ) { Object o = args.get("defaults"); if (o != null && o instanceof NamedList ) { diff --git a/solr/core/src/java/org/apache/solr/highlight/RegexFragmenter.java b/solr/core/src/java/org/apache/solr/highlight/RegexFragmenter.java index ffefbad33ace..b1206d01e5f5 100644 --- a/solr/core/src/java/org/apache/solr/highlight/RegexFragmenter.java +++ b/solr/core/src/java/org/apache/solr/highlight/RegexFragmenter.java @@ -48,7 +48,7 @@ public class RegexFragmenter extends HighlightingPluginBase implements SolrFragm protected Pattern defaultPattern; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); defaultPatternRaw = LuceneRegexFragmenter.DEFAULT_PATTERN_RAW; if( defaults != null ) { diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrEncoder.java b/solr/core/src/java/org/apache/solr/highlight/SolrEncoder.java index 7b78a06969fa..a7a76b382f39 100644 --- a/solr/core/src/java/org/apache/solr/highlight/SolrEncoder.java +++ b/solr/core/src/java/org/apache/solr/highlight/SolrEncoder.java @@ -30,7 +30,7 @@ public interface SolrEncoder extends SolrInfoBean, NamedListInitializedPlugin { * solrconfig.xml */ @Override - public void init(NamedList args); + public void init(@SuppressWarnings({"rawtypes"})NamedList args); /** * Return an {@link org.apache.lucene.search.highlight.Encoder} appropriate for this field. diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrFormatter.java b/solr/core/src/java/org/apache/solr/highlight/SolrFormatter.java index 1a6443e6deff..50ef986079e2 100644 --- a/solr/core/src/java/org/apache/solr/highlight/SolrFormatter.java +++ b/solr/core/src/java/org/apache/solr/highlight/SolrFormatter.java @@ -30,7 +30,7 @@ public interface SolrFormatter extends SolrInfoBean, NamedListInitializedPlugin * solrconfig.xml */ @Override - public void init(NamedList args); + public void init(@SuppressWarnings({"rawtypes"})NamedList args); /** * Return a {@link org.apache.lucene.search.highlight.Formatter} appropriate for this field. diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrFragListBuilder.java b/solr/core/src/java/org/apache/solr/highlight/SolrFragListBuilder.java index 87da23513b01..3b8b80d42387 100644 --- a/solr/core/src/java/org/apache/solr/highlight/SolrFragListBuilder.java +++ b/solr/core/src/java/org/apache/solr/highlight/SolrFragListBuilder.java @@ -30,7 +30,7 @@ public interface SolrFragListBuilder extends SolrInfoBean, NamedListInitializedP * solrconfig.xml */ @Override - public void init( NamedList args); + public void init( @SuppressWarnings({"rawtypes"})NamedList args); /** * Return a FragListBuilder. diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrFragmenter.java b/solr/core/src/java/org/apache/solr/highlight/SolrFragmenter.java index 98c3056993df..e449fe223d36 100644 --- a/solr/core/src/java/org/apache/solr/highlight/SolrFragmenter.java +++ b/solr/core/src/java/org/apache/solr/highlight/SolrFragmenter.java @@ -30,7 +30,7 @@ public interface SolrFragmenter extends SolrInfoBean, NamedListInitializedPlugin * solrconfig.xml */ @Override - public void init(NamedList args); + public void init(@SuppressWarnings({"rawtypes"})NamedList args); /** * Return a {@link org.apache.lucene.search.highlight.Fragmenter} appropriate for this field. diff --git a/solr/core/src/java/org/apache/solr/index/WrapperMergePolicyFactory.java b/solr/core/src/java/org/apache/solr/index/WrapperMergePolicyFactory.java index 7e095d711445..ec5d5c002137 100644 --- a/solr/core/src/java/org/apache/solr/index/WrapperMergePolicyFactory.java +++ b/solr/core/src/java/org/apache/solr/index/WrapperMergePolicyFactory.java @@ -68,6 +68,7 @@ protected MergePolicy getDefaultWrappedMergePolicy() { } /** Returns an instance of the wrapped {@link MergePolicy} after it has been configured with all set parameters. */ + @SuppressWarnings({"rawtypes"}) protected final MergePolicy getWrappedMergePolicy() { if (wrappedMergePolicyArgs == null) { return getDefaultWrappedMergePolicy(); diff --git a/solr/core/src/java/org/apache/solr/internal/csv/CSVParser.java b/solr/core/src/java/org/apache/solr/internal/csv/CSVParser.java index 4ccb1aff573b..27c507975c30 100644 --- a/solr/core/src/java/org/apache/solr/internal/csv/CSVParser.java +++ b/solr/core/src/java/org/apache/solr/internal/csv/CSVParser.java @@ -72,6 +72,7 @@ public class CSVParser { // the following objects are shared to reduce garbage /** A record buffer for getLine(). Grows as necessary and is reused. */ + @SuppressWarnings({"rawtypes"}) private final ArrayList record = new ArrayList(); private final Token reusableToken = new Token(); private final CharBuffer wsBuf = new CharBuffer(); @@ -138,7 +139,9 @@ public CSVParser(Reader input, CSVStrategy strategy) { * @return matrix of records x values ('null' when end of file) * @throws IOException on parse error or input read-failure */ + @SuppressWarnings({"unchecked"}) public String[][] getAllValues() throws IOException { + @SuppressWarnings({"rawtypes"}) ArrayList records = new ArrayList(); String[] values; String[][] ret = null; @@ -189,6 +192,7 @@ public String nextValue() throws IOException { * ('null' when end of file has been reached) * @throws IOException on parse error or input read-failure */ + @SuppressWarnings({"unchecked"}) public String[] getLine() throws IOException { String[] ret = EMPTY_STRING_ARRAY; record.clear(); diff --git a/solr/core/src/java/org/apache/solr/internal/csv/CSVPrinter.java b/solr/core/src/java/org/apache/solr/internal/csv/CSVPrinter.java index be260e0108df..5ae6d71db6b8 100644 --- a/solr/core/src/java/org/apache/solr/internal/csv/CSVPrinter.java +++ b/solr/core/src/java/org/apache/solr/internal/csv/CSVPrinter.java @@ -89,6 +89,7 @@ public void println(String[] values) throws IOException { * * @param comment the comment to output */ + @SuppressWarnings({"fallthrough"}) public void printlnComment(String comment) throws IOException { if(this.strategy.isCommentingDisabled()) { return; diff --git a/solr/core/src/java/org/apache/solr/logging/LogWatcher.java b/solr/core/src/java/org/apache/solr/logging/LogWatcher.java index 74194826bbdc..89bf380ca838 100644 --- a/solr/core/src/java/org/apache/solr/logging/LogWatcher.java +++ b/solr/core/src/java/org/apache/solr/logging/LogWatcher.java @@ -124,6 +124,7 @@ public void reset() { * * @return a LogWatcher configured for the container's logging framework */ + @SuppressWarnings({"rawtypes"}) public static LogWatcher newRegisteredLogWatcher(LogWatcherConfig config, SolrResourceLoader loader) { if (!config.isEnabled()) { @@ -145,6 +146,7 @@ public static LogWatcher newRegisteredLogWatcher(LogWatcherConfig config, SolrRe return logWatcher; } + @SuppressWarnings({"rawtypes"}) private static LogWatcher createWatcher(LogWatcherConfig config, SolrResourceLoader loader) { String fname = config.getLoggingClass(); diff --git a/solr/core/src/java/org/apache/solr/metrics/MetricSuppliers.java b/solr/core/src/java/org/apache/solr/metrics/MetricSuppliers.java index c499153840e6..7ce07e821357 100644 --- a/solr/core/src/java/org/apache/solr/metrics/MetricSuppliers.java +++ b/solr/core/src/java/org/apache/solr/metrics/MetricSuppliers.java @@ -147,6 +147,7 @@ private static Clock getClock(PluginInfo info, String param) { private static final double DEFAULT_ALPHA = 0.015; private static final long DEFAULT_WINDOW = 300; + @SuppressWarnings({"unchecked"}) private static final Reservoir getReservoir(SolrResourceLoader loader, PluginInfo info) { if (info == null) { return new ExponentiallyDecayingReservoir(); @@ -276,6 +277,7 @@ public Histogram newMetric() { * @param info plugin configuration, or null for default * @return configured supplier instance, or default instance if configuration was invalid */ + @SuppressWarnings({"unchecked"}) public static MetricRegistry.MetricSupplier counterSupplier(SolrResourceLoader loader, PluginInfo info) { if (info == null || info.className == null || info.className.trim().isEmpty()) { return new DefaultCounterSupplier(); @@ -302,6 +304,7 @@ public static MetricRegistry.MetricSupplier counterSupplier(SolrResourc * @param info plugin configuration, or null for default * @return configured supplier instance, or default instance if configuration was invalid */ + @SuppressWarnings({"unchecked"}) public static MetricRegistry.MetricSupplier meterSupplier(SolrResourceLoader loader, PluginInfo info) { MetricRegistry.MetricSupplier supplier; if (info == null || info.className == null || info.className.isEmpty()) { @@ -328,6 +331,7 @@ public static MetricRegistry.MetricSupplier meterSupplier(SolrResourceLoa * @param info plugin configuration, or null for default * @return configured supplier instance, or default instance if configuration was invalid */ + @SuppressWarnings({"unchecked"}) public static MetricRegistry.MetricSupplier timerSupplier(SolrResourceLoader loader, PluginInfo info) { MetricRegistry.MetricSupplier supplier; if (info == null || info.className == null || info.className.isEmpty()) { @@ -353,6 +357,7 @@ public static MetricRegistry.MetricSupplier timerSupplier(SolrResourceLoa * @param info plugin configuration, or null for default * @return configured supplier instance, or default instance if configuration was invalid */ + @SuppressWarnings({"unchecked"}) public static MetricRegistry.MetricSupplier histogramSupplier(SolrResourceLoader loader, PluginInfo info) { MetricRegistry.MetricSupplier supplier; if (info == null || info.className == null || info.className.isEmpty()) { diff --git a/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java b/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java index e96450cafb7a..bd9abaf39852 100644 --- a/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java +++ b/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java @@ -159,7 +159,9 @@ public MBeanInfo getMBeanInfo() { if (jmxAttributes.containsKey(k)) { return; } + @SuppressWarnings({"rawtypes"}) Class type = v.getClass(); + @SuppressWarnings({"rawtypes"}) OpenType typeBox = determineType(type); if (type.equals(String.class) || typeBox == null) { attrInfoList.add(new MBeanAttributeInfo(k, String.class.getName(), @@ -179,6 +181,7 @@ public MBeanInfo getMBeanInfo() { return new MBeanInfo(getClass().getName(), "MetricsMap", attrInfoArr, null, null, null); } + @SuppressWarnings({"rawtypes"}) private OpenType determineType(Class type) { try { for (Field field : SimpleType.class.getFields()) { diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java index 14843ba02e9b..70f99a76fab3 100644 --- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java +++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java @@ -399,7 +399,7 @@ public Set registryNames(String... patterns) throws PatternSyntaxExcepti for (String pattern : patterns) { compiled.add(Pattern.compile(pattern)); } - return registryNames((Pattern[]) compiled.toArray(new Pattern[compiled.size()])); + return registryNames(compiled.toArray(new Pattern[compiled.size()])); } public Set registryNames(Pattern... patterns) { @@ -740,6 +740,7 @@ public Gauge getGauge() { } } + @SuppressWarnings({"unchecked", "rawtypes"}) public void registerGauge(SolrMetricsContext context, String registry, Gauge gauge, String tag, boolean force, String metricName, String... metricPath) { registerMetric(context, registry, new GaugeWrapper(gauge, tag), force, metricName, metricPath); } @@ -753,6 +754,7 @@ public int unregisterGauges(String registryName, String tagSegment) { AtomicInteger removed = new AtomicInteger(); registry.removeMatching((name, metric) -> { if (metric instanceof GaugeWrapper) { + @SuppressWarnings({"rawtypes"}) GaugeWrapper wrapper = (GaugeWrapper) metric; boolean toRemove = wrapper.getTag().contains(tagSegment); if (toRemove) { @@ -952,6 +954,7 @@ public void loadReporter(String registry, CoreContainer coreContainer, PluginInf * component instances. * @throws Exception if any argument is missing or invalid */ + @SuppressWarnings({"rawtypes"}) public void loadReporter(String registry, SolrResourceLoader loader, CoreContainer coreContainer, SolrCore solrCore, PluginInfo pluginInfo, String tag) throws Exception { if (registry == null || pluginInfo == null || pluginInfo.name == null || pluginInfo.className == null) { throw new IllegalArgumentException("loadReporter called with missing arguments: " + @@ -963,7 +966,7 @@ public void loadReporter(String registry, SolrResourceLoader loader, CoreContain pluginInfo.className, SolrMetricReporter.class, new String[0], - new Class[]{SolrMetricManager.class, String.class}, + new Class[]{SolrMetricManager.class, String.class}, new Object[]{this, registry} ); // prepare MDC for plugins that want to use its properties @@ -1173,6 +1176,7 @@ private List prepareCloudPlugins(PluginInfo[] pluginInfos, String gr return result; } + @SuppressWarnings({"unchecked", "rawtypes"}) private PluginInfo preparePlugin(PluginInfo info, Map defaultAttributes, Map defaultInitArgs) { if (info == null) { diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java index c321a11012a2..a7f24fdf7572 100644 --- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java +++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java @@ -16,6 +16,8 @@ */ package org.apache.solr.metrics; +import java.io.IOException; + /** * Used by objects that expose metrics through {@link SolrMetricManager}. */ @@ -62,9 +64,14 @@ static String getUniqueMetricTag(Object o, String parentName) { * Implementations should always call SolrMetricProducer.super.close() to ensure that * metrics with the same life-cycle as this component are properly unregistered. This prevents * obscure memory leaks. + * + * from: https://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html + * While this interface method is declared to throw Exception, implementers are strongly encouraged + * to declare concrete implementations of the close method to throw more specific exceptions, or to + * throw no exception at all if the close operation cannot fail. */ @Override - default void close() throws Exception { + default void close() throws IOException { SolrMetricsContext context = getSolrMetricsContext(); if (context == null) { return; diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java index aaf37c58c846..18a43bbf67d1 100644 --- a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java +++ b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java @@ -85,6 +85,7 @@ public void report() { } @Override + @SuppressWarnings({"rawtypes"}) public void report(SortedMap gauges, SortedMap counters, SortedMap histograms, SortedMap meters, SortedMap timers) { throw new UnsupportedOperationException("this method should never be called here!"); } diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java index b461ad6fbe17..f65b8f0fd716 100644 --- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java +++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java @@ -144,7 +144,7 @@ public void setHandler(String handler) { this.handler = handler; } - public void setReport(List reportConfig) { + public void setReport(@SuppressWarnings({"rawtypes"})List reportConfig) { if (reportConfig == null || reportConfig.isEmpty()) { return; } @@ -156,7 +156,7 @@ public void setReport(List reportConfig) { }); } - public void setReport(Map map) { + public void setReport(@SuppressWarnings({"rawtypes"})Map map) { if (map == null || map.isEmpty()) { return; } diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java index 8df6817a81e9..929aa93108ed 100644 --- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java +++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java @@ -94,6 +94,7 @@ public Report(String groupPattern, String labelPattern, String registryPattern, } } + @SuppressWarnings({"unchecked"}) public static Report fromMap(Map map) { String groupPattern = (String)map.get("group"); String labelPattern = (String)map.get("label"); @@ -257,18 +258,34 @@ public Builder convertDurationsTo(TimeUnit durationUnit) { * null to indicate that reporting should be skipped. Note: this * function will be called every time just before report is sent. * @return configured instance of reporter + * @deprecated use {@link #build(SolrClientCache, Supplier)} instead. */ + @Deprecated public SolrReporter build(HttpClient client, Supplier urlProvider) { return new SolrReporter(client, urlProvider, metricManager, reports, handler, reporterId, rateUnit, durationUnit, params, skipHistograms, skipAggregateValues, cloudClient, compact); } + /** + * Build it. + * @param solrClientCache an instance of {@link SolrClientCache} to be used for making calls. + * @param urlProvider function that returns the base URL of Solr instance to target. May return + * null to indicate that reporting should be skipped. Note: this + * function will be called every time just before report is sent. + * @return configured instance of reporter + */ + public SolrReporter build(SolrClientCache solrClientCache, Supplier urlProvider) { + return new SolrReporter(solrClientCache, false, urlProvider, metricManager, reports, handler, reporterId, rateUnit, durationUnit, + params, skipHistograms, skipAggregateValues, cloudClient, compact); + } + } private String reporterId; private String handler; private Supplier urlProvider; private SolrClientCache clientCache; + private boolean closeClientCache; private List compiledReports; private SolrMetricManager metricManager; private boolean skipHistograms; @@ -306,11 +323,59 @@ public String toString() { // We delegate to registries anyway, so having a dummy registry is harmless. private static final MetricRegistry dummyRegistry = new MetricRegistry(); + // back-compat constructor + + /** + * Create a SolrReporter instance. + * @param httpClient HttpClient to use for constructing SolrClient instances. + * @param urlProvider what URL to send to. + * @param metricManager metric manager + * @param metrics metric specifications to report + * @param handler handler name to report to + * @param reporterId my reporter id + * @param rateUnit rate unit + * @param durationUnit duration unit + * @param params request parameters + * @param skipHistograms if true then don't send histogram metrics + * @param skipAggregateValues if true then don't send aggregate metrics' individual values + * @param cloudClient if true then use CloudSolrClient, plain HttpSolrClient otherwise. + * @param compact if true then use compact representation. + * + * @deprecated use {@link SolrReporter#SolrReporter(SolrClientCache, boolean, Supplier, SolrMetricManager, List, String, String, TimeUnit, TimeUnit, SolrParams, boolean, boolean, boolean, boolean)} instead. + */ + @Deprecated public SolrReporter(HttpClient httpClient, Supplier urlProvider, SolrMetricManager metricManager, List metrics, String handler, String reporterId, TimeUnit rateUnit, TimeUnit durationUnit, SolrParams params, boolean skipHistograms, boolean skipAggregateValues, boolean cloudClient, boolean compact) { + this (new SolrClientCache(httpClient), true, urlProvider, metricManager, + metrics, handler, reporterId, rateUnit, durationUnit, + params, skipHistograms, skipAggregateValues, cloudClient, compact); + } + + /** + * Create a SolrReporter instance. + * @param solrClientCache client cache to use for constructing SolrClient instances. + * @param urlProvider what URL to send to. + * @param metricManager metric manager + * @param metrics metric specifications to report + * @param handler handler name to report to + * @param reporterId my reporter id + * @param rateUnit rate unit + * @param durationUnit duration unit + * @param params request parameters + * @param skipHistograms if true then don't send histogram metrics + * @param skipAggregateValues if true then don't send aggregate metrics' individual values + * @param cloudClient if true then use CloudSolrClient, plain HttpSolrClient otherwise. + * @param compact if true then use compact representation. + */ + public SolrReporter(SolrClientCache solrClientCache, boolean closeClientCache, + Supplier urlProvider, SolrMetricManager metricManager, + List metrics, String handler, + String reporterId, TimeUnit rateUnit, TimeUnit durationUnit, + SolrParams params, boolean skipHistograms, boolean skipAggregateValues, + boolean cloudClient, boolean compact) { super(dummyRegistry, "solr-reporter", MetricFilter.ALL, rateUnit, durationUnit, null, true); this.metricManager = metricManager; @@ -320,7 +385,8 @@ public SolrReporter(HttpClient httpClient, Supplier urlProvider, SolrMet handler = MetricsCollectorHandler.HANDLER_PATH; } this.handler = handler; - this.clientCache = new SolrClientCache(httpClient); + this.clientCache = solrClientCache; + this.closeClientCache = closeClientCache; this.compiledReports = new ArrayList<>(); metrics.forEach(report -> { MetricFilter filter = new SolrMetricManager.RegexFilter(report.metricFilters); @@ -347,7 +413,9 @@ public SolrReporter(HttpClient httpClient, Supplier urlProvider, SolrMet @Override public void close() { - clientCache.close(); + if (closeClientCache) { + clientCache.close(); + } super.close(); } @@ -412,6 +480,7 @@ public void report() { } @Override + @SuppressWarnings({"rawtypes"}) public void report(SortedMap gauges, SortedMap counters, SortedMap histograms, SortedMap meters, SortedMap timers) { // no-op - we do all the work in report() } diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java index 8609a238d74f..8791160e9663 100644 --- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java +++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java @@ -62,7 +62,7 @@ public class SolrShardReporter extends SolrCoreReporter { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - public static final List DEFAULT_FILTERS = new ArrayList(){{ + public static final List DEFAULT_FILTERS = new ArrayList<>(){{ add("TLOG.*"); add("CORE\\.fs.*"); add("REPLICATION.*"); @@ -154,7 +154,7 @@ public void init(PluginInfo pluginInfo, SolrCore core) { .cloudClient(false) // we want to send reports specifically to a selected leader instance .skipAggregateValues(true) // we don't want to transport details of aggregates .skipHistograms(true) // we don't want to transport histograms - .build(core.getCoreContainer().getUpdateShardHandler().getDefaultHttpClient(), new LeaderUrlSupplier(core)); + .build(core.getCoreContainer().getSolrClientCache(), new LeaderUrlSupplier(core)); reporter.start(period, TimeUnit.SECONDS); } diff --git a/solr/core/src/java/org/apache/solr/metrics/rrd/SolrRrdBackendFactory.java b/solr/core/src/java/org/apache/solr/metrics/rrd/SolrRrdBackendFactory.java index 936ee85a9346..97d28f119ea2 100644 --- a/solr/core/src/java/org/apache/solr/metrics/rrd/SolrRrdBackendFactory.java +++ b/solr/core/src/java/org/apache/solr/metrics/rrd/SolrRrdBackendFactory.java @@ -282,7 +282,7 @@ public List> list(int maxLength) throws IOException { backends.forEach((name, db) -> { long lastModifiedTime = db.getLastModifiedTime(); Pair stored = byName.get(name); - Pair inMemory = new Pair(name, lastModifiedTime); + Pair inMemory = new Pair<>(name, lastModifiedTime); if (stored != null) { if (stored.second() < lastModifiedTime) { byName.put(name, inMemory); diff --git a/solr/core/src/java/org/apache/solr/packagemanager/PackageManager.java b/solr/core/src/java/org/apache/solr/packagemanager/PackageManager.java index fe5790e6fb4a..6d9babcc7f46 100644 --- a/solr/core/src/java/org/apache/solr/packagemanager/PackageManager.java +++ b/solr/core/src/java/org/apache/solr/packagemanager/PackageManager.java @@ -81,6 +81,7 @@ public void close() throws IOException { } } + @SuppressWarnings({"unchecked", "rawtypes"}) public List fetchInstalledPackageInstances() throws SolrException { log.info("Getting packages from packages.json..."); List ret = new ArrayList(); @@ -112,6 +113,7 @@ public List fetchInstalledPackageInstances() throws SolrExc return ret; } + @SuppressWarnings({"unchecked"}) public Map getPackagesDeployed(String collection) { Map packages = null; try { @@ -145,8 +147,9 @@ private void ensureCollectionsExist(List collections) { } } + @SuppressWarnings({"unchecked"}) private boolean deployPackage(SolrPackageInstance packageInstance, boolean pegToLatest, boolean isUpdate, boolean noprompt, - List collections, String overrides[]) { + List collections, String[] overrides) { List previouslyDeployed = new ArrayList<>(); // collections where package is already deployed in for (String collection: collections) { diff --git a/solr/core/src/java/org/apache/solr/packagemanager/RepositoryManager.java b/solr/core/src/java/org/apache/solr/packagemanager/RepositoryManager.java index a0cc0e1f3a0b..aa5c7b4c7970 100644 --- a/solr/core/src/java/org/apache/solr/packagemanager/RepositoryManager.java +++ b/solr/core/src/java/org/apache/solr/packagemanager/RepositoryManager.java @@ -122,6 +122,7 @@ public void addRepository(String repoName, String uri) throws Exception { String existingRepositoriesJson = getRepositoriesJson(packageManager.zkClient); log.info(existingRepositoriesJson); + @SuppressWarnings({"unchecked"}) List repos = getMapper().readValue(existingRepositoriesJson, List.class); repos.add(new DefaultPackageRepository(repoName, uri)); if (packageManager.zkClient.exists(PackageUtils.REPOSITORIES_ZK_PATH, true) == false) { diff --git a/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageInstance.java b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageInstance.java index f913bfb22c33..25c21f7c227a 100644 --- a/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageInstance.java +++ b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageInstance.java @@ -59,6 +59,11 @@ public boolean equals(Object obj) { return name.equals(((SolrPackageInstance)obj).name) && version.equals(((SolrPackageInstance)obj).version); } + @Override + public int hashCode() { + throw new UnsupportedOperationException("TODO unimplemented"); + } + @Override public String toString() { return jsonStr(); diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java index 1b93d8944289..cc48ee74aa71 100644 --- a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java +++ b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java @@ -209,6 +209,11 @@ public boolean equals(Object obj) { return false; } + @Override + public int hashCode() { + throw new UnsupportedOperationException("TODO unimplemented"); + } + @Override public String toString() { try { @@ -250,6 +255,7 @@ public void refresh(SolrQueryRequest req, SolrQueryResponse rsp, PayloadObj payload) { if (!checkEnabled(payload)) return; Package.AddVersion add = payload.get(); @@ -271,6 +277,7 @@ public void add(SolrQueryRequest req, SolrQueryResponse rsp, PayloadObj> listeners = new ArrayList<>(); + private List> listeners = new CopyOnWriteArrayList<>(); public synchronized void addListener(Listener listener) { listeners.add(new SoftReference<>(listener)); diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java index b38628e5d5c2..8ff12a09aca4 100644 --- a/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java +++ b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java @@ -22,18 +22,9 @@ import java.lang.invoke.MethodHandles; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; +import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; - import org.apache.solr.common.MapWriter; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.core.CoreContainer; @@ -73,6 +64,7 @@ public Package getPackage(String key) { return packageClassLoaders.get(key); } + @SuppressWarnings({"unchecked"}) public Map getPackages() { return Collections.EMPTY_MAP; } @@ -222,7 +214,7 @@ public Version getLatest(String lessThan) { if (lessThan == null) { return getLatest(); } - String latest = findBiggest(lessThan, new ArrayList(sortedVersions)); + String latest = findBiggest(lessThan, new ArrayList<>(sortedVersions)); return latest == null ? null : myVersions.get(latest); } @@ -269,7 +261,7 @@ public void writeMap(EntryWriter ew) throws IOException { paths.add(coreContainer.getPackageStoreAPI().getPackageStore().getRealpath(file)); } - loader = new SolrResourceLoader( + loader = new PackageResourceLoader( "PACKAGE_LOADER: " + parent.name() + ":" + version, paths, Paths.get(coreContainer.getSolrHome()), @@ -280,6 +272,7 @@ public String getVersion() { return version.version; } + @SuppressWarnings({"rawtypes"}) public Collection getFiles() { return Collections.unmodifiableList(version.files); } @@ -301,6 +294,34 @@ public String toString() { } } } + static class PackageResourceLoader extends SolrResourceLoader { + + PackageResourceLoader(String name, List classpath, Path instanceDir, ClassLoader parent) { + super(name, classpath, instanceDir, parent); + } + + @Override + public boolean addToCoreAware(T obj) { + //do not do anything + //this class is not aware of a SolrCore and it is totally not tied to + // the lifecycle of SolrCore. So, this returns 'false' & it should be + // taken care of by the caller + return false; + } + + @Override + public boolean addToResourceLoaderAware(T obj) { + // do not do anything + // this should be invoked only after the init() is invoked. + // The caller should take care of that + return false; + } + + @Override + public void addToInfoBeans(T obj) { + //do not do anything. It should be handled externally + } + } private static String findBiggest(String lessThan, List sortedList) { String latest = null; diff --git a/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java b/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java index cf2382d6aa0c..322a1d3079d3 100644 --- a/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java +++ b/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java @@ -17,13 +17,12 @@ package org.apache.solr.pkg; +import java.io.IOException; import java.lang.invoke.MethodHandles; - -import org.apache.solr.core.PluginBag; -import org.apache.solr.core.PluginInfo; -import org.apache.solr.core.RequestParams; -import org.apache.solr.core.SolrConfig; -import org.apache.solr.core.SolrCore; +import org.apache.lucene.analysis.util.ResourceLoaderAware; +import org.apache.solr.common.SolrException; +import org.apache.solr.core.*; +import org.apache.solr.util.plugin.SolrCoreAware; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -98,7 +97,7 @@ private synchronized void reload(PackageLoader.Package pkg) { if (pkgVersion != null) { if (newest == pkgVersion) { - //I'm already using the latest classloder in the package. nothing to do + //I'm already using the latest classloader in the package. nothing to do return; } } @@ -113,10 +112,12 @@ private synchronized void reload(PackageLoader.Package pkg) { } + @SuppressWarnings({"unchecked"}) protected void initNewInstance(PackageLoader.Package.Version newest) { Object instance = SolrCore.createInstance(pluginInfo.className, pluginMeta.clazz, pluginMeta.getCleanTag(), core, newest.getLoader()); PluginBag.initInstance(instance, pluginInfo); + handleAwareCallbacks(newest.getLoader(), instance); T old = inst; inst = (T) instance; if (old instanceof AutoCloseable) { @@ -129,4 +130,24 @@ protected void initNewInstance(PackageLoader.Package.Version newest) { } } + private void handleAwareCallbacks(SolrResourceLoader loader, Object instance) { + if (instance instanceof SolrCoreAware) { + SolrCoreAware coreAware = (SolrCoreAware) instance; + if (!core.getResourceLoader().addToCoreAware(coreAware)) { + coreAware.inform(core); + } + } + if (instance instanceof ResourceLoaderAware) { + SolrResourceLoader.assertAwareCompatibility(ResourceLoaderAware.class, instance); + try { + ((ResourceLoaderAware) instance).inform(loader); + } catch (IOException e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); + } + } + if (instance instanceof SolrInfoBean) { + core.getResourceLoader().addToInfoBeans(instance); + } + } + } \ No newline at end of file diff --git a/solr/core/src/java/org/apache/solr/request/LocalSolrQueryRequest.java b/solr/core/src/java/org/apache/solr/request/LocalSolrQueryRequest.java index 427d71c933ac..7a8d093f7a9d 100644 --- a/solr/core/src/java/org/apache/solr/request/LocalSolrQueryRequest.java +++ b/solr/core/src/java/org/apache/solr/request/LocalSolrQueryRequest.java @@ -35,13 +35,16 @@ * */ public class LocalSolrQueryRequest extends SolrQueryRequestBase { + @SuppressWarnings({"rawtypes"}) public final static Map emptyArgs = new HashMap(0,1); public String userPrincipalName = null; - protected static SolrParams makeParams(String query, String qtype, int start, int limit, Map args) { + protected static SolrParams makeParams(String query, String qtype, int start, int limit, + @SuppressWarnings({"rawtypes"})Map args) { Map map = new HashMap<>(); - for (Iterator iter = args.entrySet().iterator(); iter.hasNext();) { + for (@SuppressWarnings({"rawtypes"})Iterator iter = args.entrySet().iterator(); iter.hasNext();) { + @SuppressWarnings({"rawtypes"}) Map.Entry e = (Map.Entry)iter.next(); String k = e.getKey().toString(); Object v = e.getValue(); @@ -55,11 +58,12 @@ protected static SolrParams makeParams(String query, String qtype, int start, in return new MultiMapSolrParams(map); } - public LocalSolrQueryRequest(SolrCore core, String query, String qtype, int start, int limit, Map args) { + public LocalSolrQueryRequest(SolrCore core, String query, String qtype, int start, int limit, + @SuppressWarnings({"rawtypes"})Map args) { super(core,makeParams(query,qtype,start,limit,args)); } - public LocalSolrQueryRequest(SolrCore core, NamedList args) { + public LocalSolrQueryRequest(SolrCore core, @SuppressWarnings({"rawtypes"})NamedList args) { super(core, args.toSolrParams()); } diff --git a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java index 9b79d0f74840..fc462ddcc9ae 100644 --- a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java +++ b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java @@ -267,6 +267,7 @@ protected DocSet computeDocSet(DocSet baseDocSet, List excludeTagList) t } else { return base; } + @SuppressWarnings({"rawtypes"}) AllGroupHeadsCollector allGroupHeadsCollector = grouping.getCommands().get(0).createAllGroupCollector(); searcher.search(base.getTopFilter(), allGroupHeadsCollector); return new BitDocSet(allGroupHeadsCollector.retrieveGroupHeads(searcher.maxDoc())); @@ -333,6 +334,7 @@ public int getGroupedFacetQueryCount(Query facetQuery, DocSet docSet) throws IOE ); } + @SuppressWarnings({"rawtypes"}) AllGroupsCollector collector = new AllGroupsCollector<>(new TermGroupSelector(groupField)); searcher.search(QueryUtils.combineQueryAndFilter(facetQuery, docSet.getTopFilter()), collector); return collector.getGroupCount(); @@ -516,6 +518,7 @@ private NamedList getTermCounts(String field, Integer mincount, ParsedP String warningMessage = "Raising facet.mincount from " + mincount + " to 1, because field " + field + " is Points-based."; log.warn(warningMessage); + @SuppressWarnings({"unchecked"}) List warnings = (List)rb.rsp.getResponseHeader().get("warnings"); if (null == warnings) { warnings = new ArrayList<>(); @@ -568,13 +571,16 @@ private NamedList getTermCounts(String field, Integer mincount, ParsedP //Go through the response to build the expected output for SimpleFacets counts = new NamedList<>(); if(resObj != null) { + @SuppressWarnings({"unchecked"}) NamedList res = (NamedList) resObj; + @SuppressWarnings({"unchecked"}) List> buckets = (List>)res.get("buckets"); for(NamedList b : buckets) { counts.add(b.get("val").toString(), ((Number)b.get("count")).intValue()); } if(missing) { + @SuppressWarnings({"unchecked"}) NamedList missingCounts = (NamedList) res.get("missing"); counts.add(null, ((Number)missingCounts.get("count")).intValue()); } @@ -797,6 +803,7 @@ public NamedList getFacetFieldCounts() int maxThreads = req.getParams().getInt(FacetParams.FACET_THREADS, 0); Executor executor = maxThreads == 0 ? directExecutor : facetExecutor; final Semaphore semaphore = new Semaphore((maxThreads <= 0) ? Integer.MAX_VALUE : maxThreads); + @SuppressWarnings({"rawtypes"}) List> futures = new ArrayList<>(facetFs.length); if (fdebugParent != null) { @@ -815,6 +822,7 @@ public NamedList getFacetFieldCounts() final String termList = localParams == null ? null : localParams.get(CommonParams.TERMS); final String key = parsed.key; final String facetValue = parsed.facetValue; + @SuppressWarnings({"rawtypes"}) Callable callable = () -> { try { NamedList result = new SimpleOrderedMap<>(); @@ -839,6 +847,7 @@ public NamedList getFacetFieldCounts() } }; + @SuppressWarnings({"rawtypes"}) RunnableFuture runnableFuture = new FutureTask<>(callable); semaphore.acquire();//may block and/or interrupt executor.execute(runnableFuture);//releases semaphore when done @@ -846,7 +855,7 @@ public NamedList getFacetFieldCounts() }//facetFs loop //Loop over futures to get the values. The order is the same as facetFs but shouldn't matter. - for (Future future : futures) { + for (@SuppressWarnings({"rawtypes"})Future future : futures) { res.addAll(future.get()); } assert semaphore.availablePermits() >= maxThreads; @@ -1196,6 +1205,7 @@ public NamedList getFacetIntervalCounts() throws IOException, SyntaxErro return res; } + @SuppressWarnings({"rawtypes"}) public NamedList getHeatmapCounts() throws IOException, SyntaxError { final NamedList resOuter = new SimpleOrderedMap<>(); String[] unparsedFields = rb.req.getParams().getParams(FacetParams.FACET_HEATMAP); diff --git a/solr/core/src/java/org/apache/solr/request/SolrQueryRequestBase.java b/solr/core/src/java/org/apache/solr/request/SolrQueryRequestBase.java index ed6f95345270..76e8733c550f 100644 --- a/solr/core/src/java/org/apache/solr/request/SolrQueryRequestBase.java +++ b/solr/core/src/java/org/apache/solr/request/SolrQueryRequestBase.java @@ -211,6 +211,7 @@ protected ValidatingJsonMap getSpec() { return null; } + @SuppressWarnings({"unchecked"}) protected Map getValidators(){ return Collections.EMPTY_MAP; } diff --git a/solr/core/src/java/org/apache/solr/request/SolrRequestHandler.java b/solr/core/src/java/org/apache/solr/request/SolrRequestHandler.java index 95692d33f9d1..0840af708ab6 100644 --- a/solr/core/src/java/org/apache/solr/request/SolrRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/request/SolrRequestHandler.java @@ -45,7 +45,7 @@ public interface SolrRequestHandler extends SolrInfoBean { * may be specified when declaring a request handler in * solrconfig.xml */ - public void init(NamedList args); + public void init(@SuppressWarnings({"rawtypes"})NamedList args); /** diff --git a/solr/core/src/java/org/apache/solr/request/SolrRequestInfo.java b/solr/core/src/java/org/apache/solr/request/SolrRequestInfo.java index 5d6a73b32fda..9e0850998aa8 100644 --- a/solr/core/src/java/org/apache/solr/request/SolrRequestInfo.java +++ b/solr/core/src/java/org/apache/solr/request/SolrRequestInfo.java @@ -21,6 +21,7 @@ import java.lang.invoke.MethodHandles; import java.security.Principal; import java.util.Date; +import java.util.Deque; import java.util.LinkedList; import java.util.List; import java.util.TimeZone; @@ -36,9 +37,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - +/** Information about the Solr request/response held in a {@link ThreadLocal}. */ public class SolrRequestInfo { - protected final static ThreadLocal threadLocal = new ThreadLocal<>(); + + protected static final int MAX_STACK_SIZE = 10; + + protected static final ThreadLocal> threadLocal = ThreadLocal.withInitial(LinkedList::new); protected SolrQueryRequest req; protected SolrQueryResponse rsp; @@ -52,35 +56,62 @@ public class SolrRequestInfo { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public static SolrRequestInfo getRequestInfo() { - return threadLocal.get(); + Deque stack = threadLocal.get(); + if (stack.isEmpty()) return null; + return stack.peek(); } + /** + * Adds the SolrRequestInfo onto a stack held in a {@link ThreadLocal}. + * Remember to call {@link #clearRequestInfo()}! + */ public static void setRequestInfo(SolrRequestInfo info) { - // TODO: temporary sanity check... this can be changed to just an assert in the future - SolrRequestInfo prev = threadLocal.get(); - if (prev != null) { - log.error("Previous SolrRequestInfo was not closed! req={}", prev.req.getOriginalParams()); - log.error("prev == info : {}", prev.req == info.req, new RuntimeException()); + Deque stack = threadLocal.get(); + if (info == null) { + throw new IllegalArgumentException("SolrRequestInfo is null"); + } else if (stack.size() <= MAX_STACK_SIZE) { + stack.push(info); + } else { + assert false : "SolrRequestInfo Stack is full"; + log.error("SolrRequestInfo Stack is full"); } - assert prev == null; - - threadLocal.set(info); } + /** Removes the most recent SolrRequestInfo from the stack */ public static void clearRequestInfo() { - try { - SolrRequestInfo info = threadLocal.get(); - if (info != null && info.closeHooks != null) { - for (Closeable hook : info.closeHooks) { - try { - hook.close(); - } catch (Exception e) { - SolrException.log(log, "Exception during close hook", e); - } + Deque stack = threadLocal.get(); + if (stack.isEmpty()) { + assert false : "clearRequestInfo called too many times"; + log.error("clearRequestInfo called too many times"); + } else { + SolrRequestInfo info = stack.pop(); + closeHooks(info); + } + } + + /** + * This reset method is more of a protection mechanism as + * we expect it to be empty by now because all "set" calls need to be balanced with a "clear". + */ + public static void reset() { + Deque stack = threadLocal.get(); + boolean isEmpty = stack.isEmpty(); + while (!stack.isEmpty()) { + SolrRequestInfo info = stack.pop(); + closeHooks(info); + } + assert isEmpty : "SolrRequestInfo Stack should have been cleared."; + } + + private static void closeHooks(SolrRequestInfo info) { + if (info.closeHooks != null) { + for (Closeable hook : info.closeHooks) { + try { + hook.close(); + } catch (Exception e) { + SolrException.log(log, "Exception during close hook", e); } } - } finally { - threadLocal.remove(); } } @@ -172,23 +203,27 @@ public void setAction(SolrDispatchFilter.Action action) { public static ExecutorUtil.InheritableThreadLocalProvider getInheritableThreadLocalProvider() { return new ExecutorUtil.InheritableThreadLocalProvider() { @Override - public void store(AtomicReference ctx) { + @SuppressWarnings({"unchecked"}) + public void store(@SuppressWarnings({"rawtypes"})AtomicReference ctx) { SolrRequestInfo me = SolrRequestInfo.getRequestInfo(); if (me != null) ctx.set(me); } @Override - public void set(AtomicReference ctx) { + @SuppressWarnings({"unchecked"}) + public void set(@SuppressWarnings({"rawtypes"})AtomicReference ctx) { SolrRequestInfo me = (SolrRequestInfo) ctx.get(); if (me != null) { - ctx.set(null); SolrRequestInfo.setRequestInfo(me); } } @Override - public void clean(AtomicReference ctx) { - SolrRequestInfo.clearRequestInfo(); + public void clean(@SuppressWarnings({"rawtypes"})AtomicReference ctx) { + if (ctx.get() != null) { + SolrRequestInfo.clearRequestInfo(); + } + SolrRequestInfo.reset(); } }; } diff --git a/solr/core/src/java/org/apache/solr/request/json/JsonQueryConverter.java b/solr/core/src/java/org/apache/solr/request/json/JsonQueryConverter.java index 22e15c7af2fe..c51e0d3f5a71 100644 --- a/solr/core/src/java/org/apache/solr/request/json/JsonQueryConverter.java +++ b/solr/core/src/java/org/apache/solr/request/json/JsonQueryConverter.java @@ -45,6 +45,7 @@ private String putParam(String val, Map additionalParams) { // when isQParser==true, "val" is a query object of the form {query_type:{param1:val1, param2:val2}} // when isQParser==false, "val" is a parameter on an existing qparser (which could be a simple parameter like 42, or a sub-query) + @SuppressWarnings({"unchecked"}) private void buildLocalParams(StringBuilder builder, Object val, boolean isQParser, Map additionalParams) { if (!isQParser && !(val instanceof Map)) { // val is value of a query parser, and it is not a map @@ -98,6 +99,18 @@ private void buildLocalParams(StringBuilder builder, Object val, boolean isQPars qtype = map.keySet().iterator().next(); // FUTURE: might want to recurse here instead to handle nested tags (and add tagName as a parameter?) } + } else { + if (qtype.equals("param")) { + boolean toplevel; + if (toplevel=(builder.length() == 0)) { + builder.append("{!v="); + } + builder.append("$").append(map.get("param")); + if (toplevel) { + builder.append("}"); + } + return; + } } StringBuilder subBuilder = useSubBuilder ? new StringBuilder() : builder; @@ -114,26 +127,31 @@ private void buildLocalParams(StringBuilder builder, Object val, boolean isQPars builder.append('$').append(putParam(subBuilder.toString(), additionalParams)); } } else { - for (Map.Entry entry : map.entrySet()) { - String key = entry.getKey(); - if (entry.getValue() instanceof List) { - if (key.equals("query")) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Error when parsing json query, value of query field should not be a list, found : " + entry.getValue()); - } - List l = (List) entry.getValue(); - for (Object subVal : l) { + if(map.size()==1 && map.keySet().iterator().next().equals("param")) { + builder.append("v").append("=$").append(map.get("param")).append(" "); + } else { + for (Map.Entry entry : map.entrySet()) { + String key = entry.getKey(); + if (entry.getValue() instanceof List) { + if (key.equals("query")) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Error when parsing json query, value of query field should not be a list, found : " + entry.getValue()); + } + @SuppressWarnings({"rawtypes"}) + List l = (List) entry.getValue(); + for (Object subVal : l) { + builder.append(key).append("="); + buildLocalParams(builder, subVal, true, additionalParams); + builder.append(" "); + } + } else { + if (key.equals("query")) { + key = "v"; + } builder.append(key).append("="); - buildLocalParams(builder, subVal, true, additionalParams); + buildLocalParams(builder, entry.getValue(), true, additionalParams); builder.append(" "); } - } else { - if (key.equals("query")) { - key = "v"; - } - builder.append(key).append("="); - buildLocalParams(builder, entry.getValue(), true, additionalParams); - builder.append(" "); } } } diff --git a/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java b/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java index b9c73bc4bc31..a0837a20e006 100644 --- a/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java +++ b/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java @@ -39,7 +39,9 @@ public void handleConflict(Map container, List path, Stri } if (previous instanceof Map && current instanceof Map) { + @SuppressWarnings({"unchecked"}) Map prevMap = (Map)previous; + @SuppressWarnings({"unchecked"}) Map currMap = (Map)current; if (prevMap.size() == 0) return; mergeMap(prevMap, currMap, path); @@ -70,13 +72,15 @@ public void mergeMap(Map targetMap, Map srcMap, Li } protected Object makeList(Object current, Object previous) { + @SuppressWarnings({"rawtypes"}) ArrayList lst = new ArrayList(); append(lst, previous); // make the original value(s) come first append(lst, current); return lst; } - protected void append(List lst, Object current) { + @SuppressWarnings({"unchecked"}) + protected void append(@SuppressWarnings({"rawtypes"})List lst, Object current) { if (current instanceof Collection) { lst.addAll((Collection)current); } else { @@ -89,6 +93,7 @@ protected void append(List lst, Object current) { public static void mergeObjects(Map top, List path, Object val, ConflictHandler handler) { Map outer = top; for (int i=0; i sub = (Map)outer.get(path.get(i)); if (sub == null) { sub = new LinkedHashMap(); @@ -107,6 +112,7 @@ public static void mergeObjects(Map top, List path, Objec } } else if (val instanceof Map) { // merging at top level... + @SuppressWarnings({"unchecked"}) Map newMap = (Map)val; handler.mergeMap(outer, newMap, path); } else { diff --git a/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java b/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java index 33d154151270..a78a13422955 100644 --- a/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java @@ -31,6 +31,7 @@ import org.apache.commons.io.output.ByteArrayOutputStream; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.search.TotalHits; import org.apache.solr.client.solrj.impl.BinaryResponseParser; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.params.CommonParams; @@ -80,7 +81,7 @@ public String getContentType(SolrQueryRequest request, SolrQueryResponse respons } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { /* NOOP */ } @@ -164,8 +165,8 @@ protected void writeResultsBody( ResultContext res, JavaBinCodec codec ) throws public void writeResults(ResultContext ctx, JavaBinCodec codec) throws IOException { codec.writeTag(JavaBinCodec.SOLRDOCLST); - List l = new ArrayList(3); - l.add((long) ctx.getDocList().matches()); + List l = new ArrayList<>(4); + l.add( ctx.getDocList().matches()); l.add((long) ctx.getDocList().offset()); Float maxScore = null; @@ -173,6 +174,7 @@ public void writeResults(ResultContext ctx, JavaBinCodec codec) throws IOExcepti maxScore = ctx.getDocList().maxScore(); } l.add(maxScore); + l.add(ctx.getDocList().hitCountRelation() == TotalHits.Relation.EQUAL_TO); codec.writeArray(l); // this is a seprate function so that streaming responses can use just that part @@ -237,6 +239,7 @@ public Collection getFieldValues(String name) { return null; } + @SuppressWarnings({"unchecked"}) public Collection getRawFieldValues(String name) { Object v = _fields.get(name); if (v instanceof Collection) { @@ -264,6 +267,7 @@ public boolean hasNext() { } @Override + @SuppressWarnings({"unchecked"}) public Entry next() { return convertCharSeq(it.next()); } @@ -282,6 +286,7 @@ public Entry next() { public Object getFirstValue(String name) { Object v = _fields.get(name); if (v == null || !(v instanceof Collection)) return convertCharSeq(v); + @SuppressWarnings({"rawtypes"}) Collection c = (Collection) v; if (c.size() > 0) { return convertCharSeq(c.iterator().next()); diff --git a/solr/core/src/java/org/apache/solr/response/CSVResponseWriter.java b/solr/core/src/java/org/apache/solr/response/CSVResponseWriter.java index e35e3edf2691..0d31d2df2c3c 100644 --- a/solr/core/src/java/org/apache/solr/response/CSVResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/CSVResponseWriter.java @@ -47,7 +47,7 @@ public class CSVResponseWriter implements QueryResponseWriter { @Override - public void init(NamedList n) { + public void init(@SuppressWarnings({"rawtypes"})NamedList n) { } @Override @@ -323,8 +323,10 @@ public void close() throws IOException { } //NOTE: a document cannot currently contain another document + @SuppressWarnings({"rawtypes"}) List tmpList; @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void writeSolrDocument(String name, SolrDocument doc, ReturnFields returnFields, int idx ) throws IOException { if (tmpList == null) { tmpList = new ArrayList(1); diff --git a/solr/core/src/java/org/apache/solr/response/DocsStreamer.java b/solr/core/src/java/org/apache/solr/response/DocsStreamer.java index c6010e1b3364..995685fdfb00 100644 --- a/solr/core/src/java/org/apache/solr/response/DocsStreamer.java +++ b/solr/core/src/java/org/apache/solr/response/DocsStreamer.java @@ -57,6 +57,7 @@ * This streams SolrDocuments from a DocList and applies transformer */ public class DocsStreamer implements Iterator { + @SuppressWarnings({"rawtypes"}) public static final Set KNOWN_TYPES = new HashSet<>(); private final org.apache.solr.response.ResultContext rctx; diff --git a/solr/core/src/java/org/apache/solr/response/GeoJSONResponseWriter.java b/solr/core/src/java/org/apache/solr/response/GeoJSONResponseWriter.java index 90b20ffb9efb..1afca18214be 100644 --- a/solr/core/src/java/org/apache/solr/response/GeoJSONResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/GeoJSONResponseWriter.java @@ -106,6 +106,7 @@ public void writeResponse() throws IOException { } rsp.removeResponseHeader(); + @SuppressWarnings({"unchecked"}) NamedList vals = rsp.getValues(); Object response = vals.remove("response"); if(vals.size()==0) { @@ -195,6 +196,7 @@ protected void writeFeatureGeometry(Object geo) throws IOException { // Support multi-valued geometries if(geo instanceof Iterable) { + @SuppressWarnings({"rawtypes"}) Iterator iter = ((Iterable)geo).iterator(); if(!iter.hasNext()) { return; // empty list @@ -292,11 +294,18 @@ else if(geo instanceof WriteableGeoJSON) { } } + @Deprecated @Override public void writeStartDocumentList(String name, - long start, int size, long numFound, Float maxScore) throws IOException + long start, int size, long numFound, Float maxScore) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public void writeStartDocumentList(String name, + long start, int size, long numFound, Float maxScore, Boolean numFoundExact) throws IOException { - writeMapOpener((maxScore==null) ? 3 : 4); + writeMapOpener(headerSize(maxScore, numFoundExact)); incLevel(); writeKey("type",false); writeStr(null, "FeatureCollection", false); @@ -312,6 +321,13 @@ public void writeStartDocumentList(String name, writeKey("maxScore",false); writeFloat(null,maxScore); } + + if (numFoundExact != null) { + writeMapSeparator(); + writeKey("numFoundExact",false); + writeBool(null, numFoundExact); + } + writeMapSeparator(); // if can we get bbox of all results, we should write it here diff --git a/solr/core/src/java/org/apache/solr/response/GraphMLResponseWriter.java b/solr/core/src/java/org/apache/solr/response/GraphMLResponseWriter.java index 9bb7403d2e88..568a58455bff 100644 --- a/solr/core/src/java/org/apache/solr/response/GraphMLResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/GraphMLResponseWriter.java @@ -34,7 +34,7 @@ public class GraphMLResponseWriter implements QueryResponseWriter { - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { /* NOOP */ } @@ -94,10 +94,10 @@ public void write(Writer writer, SolrQueryRequest req, SolrQueryResponse res) th printWriter.write(" outfields = new ArrayList(); - Iterator keys = tuple.fields.keySet().iterator(); + List outfields = new ArrayList<>(); + Iterator keys = tuple.getFields().keySet().iterator(); while(keys.hasNext()) { - String key = keys.next(); + String key = String.valueOf(keys.next()); if(key.equals("node") || key.equals("ancestors") || key.equals("collection")) { continue; } else { diff --git a/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java b/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java index a52aef61b3be..30a5e5c677c1 100644 --- a/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java @@ -40,7 +40,7 @@ public class JSONResponseWriter implements QueryResponseWriter { private String contentType = CONTENT_TYPE_JSON_UTF8; @Override - public void init(NamedList namedList) { + public void init(@SuppressWarnings({"rawtypes"})NamedList namedList) { String contentType = (String) namedList.get("content-type"); if (contentType != null) { this.contentType = contentType; @@ -78,7 +78,7 @@ public static PushWriter getPushWriter(Writer writer, SolrQueryRequest req, Solr return new JSONWriter(writer, req, rsp); } -} + /** * Writes NamedLists directly as an array of NameTypeValue JSON objects... @@ -103,7 +103,7 @@ public ArrayOfNameTypeValueJSONWriter(Writer writer, SolrQueryRequest req, SolrQ } @Override - public void writeNamedList(String name, NamedList val) throws IOException { + public void writeNamedList(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { if (val instanceof SimpleOrderedMap) { super.writeNamedList(name, val); @@ -215,20 +215,29 @@ public void writeSolrDocument(String name, SolrDocument doc, ReturnFields return super.writeSolrDocument(name, doc, returnFields, idx); } + @Deprecated @Override public void writeStartDocumentList(String name, long start, int size, long numFound, Float maxScore) throws IOException { ifNeededWriteTypeAndValueKey("doclist"); super.writeStartDocumentList(name, start, size, numFound, maxScore); } + + @Override + public void writeStartDocumentList(String name, long start, int size, long numFound, Float maxScore, Boolean numFoundExact) throws IOException { + ifNeededWriteTypeAndValueKey("doclist"); + super.writeStartDocumentList(name, start, size, numFound, maxScore, numFoundExact); + } + @Override - public void writeMap(String name, Map val, boolean excludeOuter, boolean isFirstVal) throws IOException { + public void writeMap(String name, @SuppressWarnings({"rawtypes"})Map val, + boolean excludeOuter, boolean isFirstVal) throws IOException { ifNeededWriteTypeAndValueKey("map"); super.writeMap(name, val, excludeOuter, isFirstVal); } @Override - public void writeArray(String name, Iterator val) throws IOException { + public void writeArray(String name, @SuppressWarnings({"rawtypes"})Iterator val) throws IOException { ifNeededWriteTypeAndValueKey("array"); super.writeArray(name, val); } @@ -240,9 +249,10 @@ public void writeNull(String name) throws IOException { } } -abstract class NaNFloatWriter extends JSONWriter { - +abstract static class NaNFloatWriter extends JSONWriter { + abstract protected String getNaN(); + abstract protected String getInf(); public NaNFloatWriter(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) { @@ -275,3 +285,4 @@ public void writeDouble(String name, double val) throws IOException { } } } +} diff --git a/solr/core/src/java/org/apache/solr/response/JSONWriter.java b/solr/core/src/java/org/apache/solr/response/JSONWriter.java index 77aee3d310f6..48587a98e045 100644 --- a/solr/core/src/java/org/apache/solr/response/JSONWriter.java +++ b/solr/core/src/java/org/apache/solr/response/JSONWriter.java @@ -132,11 +132,16 @@ public void writeSolrDocument(String name, SolrDocument doc, ReturnFields return // that the size could not be reliably determined. // + /** + * This method will be removed in Solr 9 + * @deprecated Use {{@link #writeStartDocumentList(String, long, int, long, Float, Boolean)}. + */ @Override + @Deprecated public void writeStartDocumentList(String name, long start, int size, long numFound, Float maxScore) throws IOException { - writeMapOpener((maxScore==null) ? 3 : 4); + writeMapOpener(headerSize(maxScore, null)); incLevel(); writeKey("numFound",false); writeLong(null,numFound); @@ -156,6 +161,42 @@ public void writeStartDocumentList(String name, incLevel(); } + + @Override + public void writeStartDocumentList(String name, + long start, int size, long numFound, Float maxScore, Boolean numFoundExact) throws IOException { + writeMapOpener(headerSize(maxScore, numFoundExact)); + incLevel(); + writeKey("numFound",false); + writeLong(null,numFound); + writeMapSeparator(); + writeKey("start",false); + writeLong(null,start); + + if (maxScore != null) { + writeMapSeparator(); + writeKey("maxScore",false); + writeFloat(null,maxScore); + } + + if (numFoundExact != null) { + writeMapSeparator(); + writeKey("numFoundExact",false); + writeBool(null, numFoundExact); + } + writeMapSeparator(); + writeKey("docs",false); + writeArrayOpener(size); + + incLevel(); + } + + protected int headerSize(Float maxScore, Boolean numFoundExact) { + int headerSize = 3; + if (maxScore != null) headerSize++; + if (numFoundExact != null) headerSize++; + return headerSize; + } @Override public void writeEndDocumentList() throws IOException diff --git a/solr/core/src/java/org/apache/solr/response/PHPResponseWriter.java b/solr/core/src/java/org/apache/solr/response/PHPResponseWriter.java index ad12128dc847..98ac8f4161eb 100644 --- a/solr/core/src/java/org/apache/solr/response/PHPResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/PHPResponseWriter.java @@ -29,7 +29,7 @@ public class PHPResponseWriter implements QueryResponseWriter { private String contentType = CONTENT_TYPE_PHP_UTF8; @Override - public void init(NamedList namedList) { + public void init(@SuppressWarnings({"rawtypes"})NamedList namedList) { String contentType = (String) namedList.get("content-type"); if (contentType != null) { this.contentType = contentType; @@ -58,7 +58,7 @@ public PHPWriter(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) { } @Override - public void writeNamedList(String name, NamedList val) throws IOException { + public void writeNamedList(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { writeNamedListAsMapMangled(name,val); } @@ -78,7 +78,7 @@ public void writeArrayOpener(int size) throws IOException { } @Override - public void writeArray(String name, List l) throws IOException { + public void writeArray(String name, @SuppressWarnings({"rawtypes"})List l) throws IOException { writeArray(name,l.iterator()); } diff --git a/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java b/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java index 7e090b89be4e..155f267a53ba 100644 --- a/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java @@ -44,7 +44,7 @@ public class PHPSerializedResponseWriter implements QueryResponseWriter { private String contentType = CONTENT_TYPE_PHP_UTF8; @Override - public void init(NamedList namedList) { + public void init(@SuppressWarnings({"rawtypes"})NamedList namedList) { String contentType = (String) namedList.get("content-type"); if (contentType != null) { this.contentType = contentType; @@ -85,17 +85,23 @@ public void writeResponse() throws IOException { } @Override - public void writeNamedList(String name, NamedList val) throws IOException { + public void writeNamedList(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { writeNamedListAsMapMangled(name,val); } - - + @Deprecated @Override public void writeStartDocumentList(String name, long start, int size, long numFound, Float maxScore) throws IOException { - writeMapOpener((maxScore==null) ? 3 : 4); + throw new UnsupportedOperationException(); + } + + @Override + public void writeStartDocumentList(String name, + long start, int size, long numFound, Float maxScore, Boolean numFoundExact) throws IOException + { + writeMapOpener(headerSize(maxScore, numFoundExact)); writeKey("numFound",false); writeLong(null,numFound); writeKey("start",false); @@ -105,6 +111,10 @@ public void writeStartDocumentList(String name, writeKey("maxScore",false); writeFloat(null,maxScore); } + if (numFoundExact != null) { + writeKey("numFoundExact",false); + writeBool(null, numFoundExact); + } writeKey("docs",false); writeArrayOpener(size); } @@ -176,7 +186,9 @@ public void writeArray(String name, Object[] val) throws IOException { } @Override - public void writeArray(String name, Iterator val) throws IOException { + @SuppressWarnings({"unchecked"}) + public void writeArray(String name, @SuppressWarnings({"rawtypes"})Iterator val) throws IOException { + @SuppressWarnings({"rawtypes"}) ArrayList vals = new ArrayList(); while( val.hasNext() ) { vals.add(val.next()); diff --git a/solr/core/src/java/org/apache/solr/response/PythonResponseWriter.java b/solr/core/src/java/org/apache/solr/response/PythonResponseWriter.java index 98109df042f6..5c7582a6939f 100644 --- a/solr/core/src/java/org/apache/solr/response/PythonResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/PythonResponseWriter.java @@ -26,7 +26,7 @@ public class PythonResponseWriter implements QueryResponseWriter { static String CONTENT_TYPE_PYTHON_ASCII="text/x-python;charset=US-ASCII"; @Override - public void init(NamedList n) { + public void init(@SuppressWarnings({"rawtypes"})NamedList n) { /* NOOP */ } @@ -46,7 +46,7 @@ public String getContentType(SolrQueryRequest request, SolrQueryResponse respons } } -class PythonWriter extends NaNFloatWriter { +class PythonWriter extends JSONResponseWriter.NaNFloatWriter { @Override protected String getNaN() { return "float('NaN')"; } @Override diff --git a/solr/core/src/java/org/apache/solr/response/QueryResponseWriter.java b/solr/core/src/java/org/apache/solr/response/QueryResponseWriter.java index 8f460dd6fa00..5960e36e425d 100644 --- a/solr/core/src/java/org/apache/solr/response/QueryResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/QueryResponseWriter.java @@ -80,7 +80,7 @@ public interface QueryResponseWriter extends NamedListInitializedPlugin { * solrconfig.xml */ @Override - public void init(NamedList args); + public void init(@SuppressWarnings({"rawtypes"})NamedList args); } diff --git a/solr/core/src/java/org/apache/solr/response/RawResponseWriter.java b/solr/core/src/java/org/apache/solr/response/RawResponseWriter.java index ffad4b968969..9261a556dae7 100644 --- a/solr/core/src/java/org/apache/solr/response/RawResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/RawResponseWriter.java @@ -54,7 +54,7 @@ public class RawResponseWriter implements BinaryQueryResponseWriter { private String _baseWriter = null; @Override - public void init(NamedList n) { + public void init(@SuppressWarnings({"rawtypes"})NamedList n) { if( n != null ) { Object base = n.get( "base" ); if( base != null ) { diff --git a/solr/core/src/java/org/apache/solr/response/RubyResponseWriter.java b/solr/core/src/java/org/apache/solr/response/RubyResponseWriter.java index 6b73a7cfe1ba..6eb61c12dcca 100644 --- a/solr/core/src/java/org/apache/solr/response/RubyResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/RubyResponseWriter.java @@ -26,7 +26,7 @@ public class RubyResponseWriter implements QueryResponseWriter { static String CONTENT_TYPE_RUBY_UTF8="text/x-ruby;charset=UTF-8"; @Override - public void init(NamedList n) { + public void init(@SuppressWarnings({"rawtypes"})NamedList n) { /* NOOP */ } @@ -46,7 +46,7 @@ public String getContentType(SolrQueryRequest request, SolrQueryResponse respons } } -class RubyWriter extends NaNFloatWriter { +class RubyWriter extends JSONResponseWriter.NaNFloatWriter { @Override protected String getNaN() { return "(0.0/0.0)"; } diff --git a/solr/core/src/java/org/apache/solr/response/SchemaXmlResponseWriter.java b/solr/core/src/java/org/apache/solr/response/SchemaXmlResponseWriter.java index 95a3bc761c62..a8cdc89cc2ef 100644 --- a/solr/core/src/java/org/apache/solr/response/SchemaXmlResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/SchemaXmlResponseWriter.java @@ -28,7 +28,7 @@ */ public class SchemaXmlResponseWriter implements QueryResponseWriter { @Override - public void init(NamedList n) { + public void init(@SuppressWarnings({"rawtypes"})NamedList n) { /* NOOP */ } diff --git a/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java b/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java index 8d26e0e39819..0ea30fb11386 100644 --- a/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java +++ b/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java @@ -68,6 +68,7 @@ public SchemaXmlWriter(Writer writer, SolrQueryRequest req, SolrQueryResponse rs super(writer, req, rsp); } + @SuppressWarnings({"unchecked"}) public void writeResponse() throws IOException { writer.write(XML_DECLARATION); @@ -78,7 +79,7 @@ public void writeResponse() throws IOException { writer.write(MANAGED_SCHEMA_DO_NOT_EDIT_WARNING); } - @SuppressWarnings("unchecked") Map schemaProperties + Map schemaProperties = (Map)rsp.getValues().get(IndexSchema.SCHEMA); openStartTag(IndexSchema.SCHEMA); @@ -103,7 +104,7 @@ public void writeResponse() throws IOException { } else if (schemaPropName.equals(IndexSchema.FIELD_TYPES)) { writeFieldTypes((List>) val); } else if (schemaPropName.equals(IndexSchema.FIELDS)) { - @SuppressWarnings("unchecked") List> fieldPropertiesList + List> fieldPropertiesList = (List>) val; for (SimpleOrderedMap fieldProperties : fieldPropertiesList) { openStartTag(IndexSchema.FIELD); @@ -113,7 +114,7 @@ public void writeResponse() throws IOException { closeStartTag(true); } } else if (schemaPropName.equals(IndexSchema.DYNAMIC_FIELDS)) { - @SuppressWarnings("unchecked") List> dynamicFieldPropertiesList + List> dynamicFieldPropertiesList = (List>) val; for (SimpleOrderedMap dynamicFieldProperties : dynamicFieldPropertiesList) { openStartTag(IndexSchema.DYNAMIC_FIELD); @@ -124,7 +125,7 @@ public void writeResponse() throws IOException { closeStartTag(true); } } else if (schemaPropName.equals(IndexSchema.COPY_FIELDS)) { - @SuppressWarnings("unchecked") List> copyFieldPropertiesList + List> copyFieldPropertiesList = (List>) val; for (SimpleOrderedMap copyFieldProperties : copyFieldPropertiesList) { openStartTag(IndexSchema.COPY_FIELD); @@ -142,6 +143,7 @@ public void writeResponse() throws IOException { endTag(IndexSchema.SCHEMA); } + @SuppressWarnings({"unchecked"}) private void writeFieldTypes(List> fieldTypePropertiesList) throws IOException { for (SimpleOrderedMap fieldTypeProperties : fieldTypePropertiesList) { SimpleOrderedMap analyzerProperties = null; @@ -199,6 +201,7 @@ private void writeSimilarity(SimpleOrderedMap similarityProperties) thro } } + @SuppressWarnings({"unchecked"}) private void writeAnalyzer(SimpleOrderedMap analyzerProperties, String analyzerType) throws IOException { openStartTag(FieldType.ANALYZER); if (null != analyzerType) { @@ -304,7 +307,7 @@ public void writeAttr(String name, String val, boolean escape) throws IOExceptio } @Override - public void writeNamedList(String name, NamedList val) throws IOException { + public void writeNamedList(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { // name is ignored - this method is only used for SimilarityFactory int sz = val.size(); for (int i=0; iNote on Returnable Data */ + @SuppressWarnings({"rawtypes"}) public NamedList getValues() { return values; } /** diff --git a/solr/core/src/java/org/apache/solr/response/TabularResponseWriter.java b/solr/core/src/java/org/apache/solr/response/TabularResponseWriter.java index a6e9e94e836f..2951a473ce87 100644 --- a/solr/core/src/java/org/apache/solr/response/TabularResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/TabularResponseWriter.java @@ -118,7 +118,7 @@ else if (responseObj instanceof DocList) { } @Override - public void writeNamedList(String name, NamedList val) throws IOException { + public void writeNamedList(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { } @Override @@ -127,6 +127,13 @@ public void writeStartDocumentList(String name, { // nothing } + + @Override + public void writeStartDocumentList(String name, + long start, int size, long numFound, Float maxScore, Boolean numFoundExact) throws IOException + { + // nothing + } @Override public void writeEndDocumentList() throws IOException @@ -135,11 +142,11 @@ public void writeEndDocumentList() throws IOException } @Override - public void writeMap(String name, Map val, boolean excludeOuter, boolean isFirstVal) throws IOException { + public void writeMap(String name, @SuppressWarnings({"rawtypes"})Map val, boolean excludeOuter, boolean isFirstVal) throws IOException { } @Override - public void writeArray(String name, Iterator val) throws IOException { + public void writeArray(String name, @SuppressWarnings({"rawtypes"})Iterator val) throws IOException { } @Override diff --git a/solr/core/src/java/org/apache/solr/response/TextResponseWriter.java b/solr/core/src/java/org/apache/solr/response/TextResponseWriter.java index 98ef7871220e..803c340ca753 100644 --- a/solr/core/src/java/org/apache/solr/response/TextResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/TextResponseWriter.java @@ -23,6 +23,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.BytesRef; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; @@ -156,7 +157,16 @@ public final void writeVal(String name, Object val) throws IOException { // types of formats, including those where the name may come after the value (like // some XML formats). - public abstract void writeStartDocumentList(String name, long start, int size, long numFound, Float maxScore) throws IOException; + //TODO: Make abstract in Solr 9.0 + public void writeStartDocumentList(String name, long start, int size, long numFound, Float maxScore, Boolean numFoundExact) throws IOException { + writeStartDocumentList(name, start, size, numFound, maxScore); + } + + /** + * @deprecated Use {@link #writeStartDocumentList(String, long, int, long, Float, Boolean)} + */ + @Deprecated + public abstract void writeStartDocumentList(String name, long start, int size, long numFound, Float maxScore) throws IOException; public abstract void writeSolrDocument(String name, SolrDocument doc, ReturnFields fields, int idx) throws IOException; @@ -165,7 +175,7 @@ public final void writeVal(String name, Object val) throws IOException { // Assume each SolrDocument is already transformed public final void writeSolrDocumentList(String name, SolrDocumentList docs, ReturnFields fields) throws IOException { - writeStartDocumentList(name, docs.getStart(), docs.size(), docs.getNumFound(), docs.getMaxScore() ); + writeStartDocumentList(name, docs.getStart(), docs.size(), docs.getNumFound(), docs.getMaxScore(), docs.getNumFoundExact()); for( int i=0; i docsStreamer = res.getProcessedDocuments(); writeStartDocumentList(name, ids.offset(), ids.size(), ids.matches(), - res.wantsScores() ? ids.maxScore() : null); + res.wantsScores() ? ids.maxScore() : null, ids.hitCountRelation() == TotalHits.Relation.EQUAL_TO); int idx = 0; while (docsStreamer.hasNext()) { diff --git a/solr/core/src/java/org/apache/solr/response/XMLResponseWriter.java b/solr/core/src/java/org/apache/solr/response/XMLResponseWriter.java index 09e37e3cb4c8..abab6ef484bd 100644 --- a/solr/core/src/java/org/apache/solr/response/XMLResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/XMLResponseWriter.java @@ -28,7 +28,7 @@ */ public class XMLResponseWriter implements QueryResponseWriter { @Override - public void init(NamedList n) { + public void init(@SuppressWarnings({"rawtypes"})NamedList n) { /* NOOP */ } diff --git a/solr/core/src/java/org/apache/solr/response/XMLWriter.java b/solr/core/src/java/org/apache/solr/response/XMLWriter.java index 39a31f374dfd..a5be459df3f3 100644 --- a/solr/core/src/java/org/apache/solr/response/XMLWriter.java +++ b/solr/core/src/java/org/apache/solr/response/XMLWriter.java @@ -166,6 +166,28 @@ void startTag(String tag, String name, boolean closeTag) throws IOException { @Override + public void writeStartDocumentList(String name, + long start, int size, long numFound, Float maxScore, Boolean numFoundExact) throws IOException + { + if (doIndent) indent(); + + writer.write(""); + + incLevel(); + } + + @Override + @Deprecated public void writeStartDocumentList(String name, long start, int size, long numFound, Float maxScore) throws IOException { @@ -232,7 +254,7 @@ public void writeEndDocumentList() throws IOException // @Override - public void writeNamedList(String name, NamedList val) throws IOException { + public void writeNamedList(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { int sz = val.size(); startTag("lst", name, sz<=0); @@ -249,7 +271,8 @@ public void writeNamedList(String name, NamedList val) throws IOException { } @Override - public void writeMap(String name, Map map, boolean excludeOuter, boolean isFirstVal) throws IOException { + @SuppressWarnings({"unchecked", "rawtypes"}) + public void writeMap(String name, @SuppressWarnings({"rawtypes"})Map map, boolean excludeOuter, boolean isFirstVal) throws IOException { int sz = map.size(); if (!excludeOuter) { @@ -279,7 +302,7 @@ public void writeArray(String name, Object[] val) throws IOException { } @Override - public void writeArray(String name, Iterator iter) throws IOException { + public void writeArray(String name, @SuppressWarnings({"rawtypes"})Iterator iter) throws IOException { if( iter.hasNext() ) { startTag("arr", name, false ); incLevel(); diff --git a/solr/core/src/java/org/apache/solr/response/XSLTResponseWriter.java b/solr/core/src/java/org/apache/solr/response/XSLTResponseWriter.java index 2b8de16cbbd4..8bc91523b03f 100644 --- a/solr/core/src/java/org/apache/solr/response/XSLTResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/XSLTResponseWriter.java @@ -57,7 +57,7 @@ public class XSLTResponseWriter implements QueryResponseWriter { private static final XMLErrorLogger xmllog = new XMLErrorLogger(log); @Override - public void init(NamedList n) { + public void init(@SuppressWarnings({"rawtypes"})NamedList n) { final SolrParams p = n.toSolrParams(); xsltCacheLifetimeSeconds = p.getInt(XSLT_CACHE_PARAM,XSLT_CACHE_DEFAULT); log.info("xsltCacheLifetimeSeconds={}", xsltCacheLifetimeSeconds); diff --git a/solr/core/src/java/org/apache/solr/response/transform/DocIdAugmenterFactory.java b/solr/core/src/java/org/apache/solr/response/transform/DocIdAugmenterFactory.java index 219ca5d83e7c..7e0fed577b75 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/DocIdAugmenterFactory.java +++ b/solr/core/src/java/org/apache/solr/response/transform/DocIdAugmenterFactory.java @@ -33,27 +33,24 @@ public class DocIdAugmenterFactory extends TransformerFactory public DocTransformer create(String field, SolrParams params, SolrQueryRequest req) { return new DocIdAugmenter( field ); } -} -class DocIdAugmenter extends DocTransformer -{ - final String name; + private static class DocIdAugmenter extends DocTransformer { + final String name; - public DocIdAugmenter( String display ) - { - this.name = display; - } + public DocIdAugmenter( String display ) { + this.name = display; + } - @Override - public String getName() - { - return name; - } + @Override + public String getName() { + return name; + } - @Override - public void transform(SolrDocument doc, int docid) { - assert -1 <= docid; - doc.setField( name, docid ); + @Override + public void transform(SolrDocument doc, int docid) { + assert -1 <= docid; + doc.setField( name, docid ); + } } } diff --git a/solr/core/src/java/org/apache/solr/response/transform/ExplainAugmenterFactory.java b/solr/core/src/java/org/apache/solr/response/transform/ExplainAugmenterFactory.java index bab9f032be4b..5dfaff2858af 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/ExplainAugmenterFactory.java +++ b/solr/core/src/java/org/apache/solr/response/transform/ExplainAugmenterFactory.java @@ -42,7 +42,7 @@ public enum Style { protected Style defaultStyle = Style.text; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); if( defaultUserArgs != null ) { defaultStyle = getStyle( defaultUserArgs ); diff --git a/solr/core/src/java/org/apache/solr/response/transform/GeoTransformerFactory.java b/solr/core/src/java/org/apache/solr/response/transform/GeoTransformerFactory.java index 0b50254e52f2..74d960503c1b 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/GeoTransformerFactory.java +++ b/solr/core/src/java/org/apache/solr/response/transform/GeoTransformerFactory.java @@ -191,6 +191,7 @@ void setValue(SolrDocument doc, Object val) { doc.remove(display); if(val != null) { if(val instanceof Iterable) { + @SuppressWarnings({"rawtypes"}) Iterator iter = ((Iterable)val).iterator(); while(iter.hasNext()) { addValue(doc, iter.next()); diff --git a/solr/core/src/java/org/apache/solr/response/transform/RawValueTransformerFactory.java b/solr/core/src/java/org/apache/solr/response/transform/RawValueTransformerFactory.java index 55216e5b8ecb..baa277bfb25a 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/RawValueTransformerFactory.java +++ b/solr/core/src/java/org/apache/solr/response/transform/RawValueTransformerFactory.java @@ -49,7 +49,7 @@ public RawValueTransformerFactory(String wt) { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); if(defaultUserArgs!=null&&defaultUserArgs.startsWith("wt=")) { applyToWT = defaultUserArgs.substring(3); @@ -113,6 +113,7 @@ public void transform(SolrDocument doc, int docid) { return; } if(val instanceof Collection) { + @SuppressWarnings({"rawtypes"}) Collection current = (Collection)val; ArrayList vals = new ArrayList(); for(Object v : current) { diff --git a/solr/core/src/java/org/apache/solr/response/transform/ShardAugmenterFactory.java b/solr/core/src/java/org/apache/solr/response/transform/ShardAugmenterFactory.java index e65bb93fd626..e01ba283323d 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/ShardAugmenterFactory.java +++ b/solr/core/src/java/org/apache/solr/response/transform/ShardAugmenterFactory.java @@ -38,7 +38,7 @@ public DocTransformer create(String field, SolrParams params, SolrQueryRequest r v = "[not a shard request]"; } } - return new ValueAugmenter( field, v ); + return new ValueAugmenterFactory.ValueAugmenter( field, v ); } } diff --git a/solr/core/src/java/org/apache/solr/response/transform/SubQueryAugmenterFactory.java b/solr/core/src/java/org/apache/solr/response/transform/SubQueryAugmenterFactory.java index 8784a5578d51..d6d7bf01900a 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/SubQueryAugmenterFactory.java +++ b/solr/core/src/java/org/apache/solr/response/transform/SubQueryAugmenterFactory.java @@ -20,13 +20,12 @@ import java.util.HashMap; import java.util.Iterator; import java.util.Map; -import java.util.concurrent.Callable; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; +import org.apache.lucene.search.TotalHits; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; -import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; @@ -35,9 +34,7 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.response.ResultContext; -import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.search.DocList; import org.apache.solr.search.DocSlice; import org.apache.solr.search.JoinQParserPlugin; @@ -115,6 +112,7 @@ public DocTransformer create(String field, SolrParams params, SolrQueryRequest r @SuppressWarnings("unchecked") private void checkThereIsNoDupe(String field, Map context) { // find a map + @SuppressWarnings({"rawtypes"}) final Map conflictMap; final String conflictMapKey = getClass().getSimpleName(); if (context.containsKey(conflictMapKey)) { @@ -177,7 +175,8 @@ public DocList getDocList() { return new DocSlice((int)docList.getStart(), docList.size(), new int[0], new float[docList.size()], (int) docList.getNumFound(), - docList.getMaxScore() == null ? Float.NaN : docList.getMaxScore()); + docList.getMaxScore() == null ? Float.NaN : docList.getMaxScore(), + docList.getNumFoundExact() ? TotalHits.Relation.EQUAL_TO : TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } @Override @@ -217,8 +216,8 @@ public String[] getParams(String param) { if (vals != null) { StringBuilder rez = new StringBuilder(); - for (Iterator iterator = vals.iterator(); iterator.hasNext();) { - Object object = (Object) iterator.next(); + for (@SuppressWarnings({"rawtypes"})Iterator iterator = vals.iterator(); iterator.hasNext();) { + Object object = iterator.next(); rez.append(convertFieldValue(object)); if (iterator.hasNext()) { rez.append(separator); @@ -326,51 +325,14 @@ public void transform(SolrDocument doc, int docid) { final SolrParams docWithDeprefixed = SolrParams.wrapDefaults( new DocRowParams(doc, prefix, separator), baseSubParams); try { - Callable subQuery = new Callable() { - @Override - public QueryResponse call() throws Exception { - try { - return new QueryResponse( - server.request( - new QueryRequest(docWithDeprefixed), coreName) - , server); - } finally { - } - } - }; - QueryResponse response = - SolrRequestInfoSuspender.doInSuspension(subQuery); - - final SolrDocumentList docList = (SolrDocumentList) response.getResults(); - + QueryResponse rsp = server.query(coreName, docWithDeprefixed); + SolrDocumentList docList = rsp.getResults(); doc.setField(getName(), new Result(docList)); - } catch (Exception e) { String docString = doc.toString(); throw new SolrException(ErrorCode.BAD_REQUEST, "while invoking " + name + ":[subquery"+ (coreName!=null ? "fromIndex="+coreName : "") +"] on doc=" + docString.substring(0, Math.min(100, docString.length())), e.getCause()); - } finally {} - } - - // look ma!! no hands.. - final static class SolrRequestInfoSuspender extends SolrRequestInfo { - - private SolrRequestInfoSuspender(SolrQueryRequest req, SolrQueryResponse rsp) { - super(req, rsp); - } - - /** Suspends current SolrRequestInfo invoke the given action, and resumes then */ - static T doInSuspension(Callable action) throws Exception { - - final SolrRequestInfo info = threadLocal.get(); - try { - threadLocal.remove(); - return action.call(); - } finally { - setRequestInfo(info); - } } } - } diff --git a/solr/core/src/java/org/apache/solr/response/transform/TransformerFactory.java b/solr/core/src/java/org/apache/solr/response/transform/TransformerFactory.java index acdb96a808cc..ed8a9302b989 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/TransformerFactory.java +++ b/solr/core/src/java/org/apache/solr/response/transform/TransformerFactory.java @@ -34,7 +34,7 @@ public abstract class TransformerFactory implements NamedListInitializedPlugin protected String defaultUserArgs = null; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { defaultUserArgs = (String)args.get( "args" ); } diff --git a/solr/core/src/java/org/apache/solr/response/transform/ValueAugmenterFactory.java b/solr/core/src/java/org/apache/solr/response/transform/ValueAugmenterFactory.java index d85a302f5078..0cc247684bd0 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/ValueAugmenterFactory.java +++ b/solr/core/src/java/org/apache/solr/response/transform/ValueAugmenterFactory.java @@ -28,31 +28,28 @@ * * @since solr 4.0 */ -public class ValueAugmenterFactory extends TransformerFactory -{ +public class ValueAugmenterFactory extends TransformerFactory { protected Object value = null; protected Object defaultValue = null; @Override - public void init(NamedList args) { - value = args.get( "value" ); - if( value == null ) { - defaultValue = args.get( "defaultValue" ); + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { + value = args.get("value"); + if (value == null) { + defaultValue = args.get("defaultValue"); } } - public static Object getObjectFrom( String val, String type ) - { - if( type != null ) { + public static Object getObjectFrom(String val, String type) { + if (type != null) { try { - if( "int".equals( type ) ) return Integer.valueOf( val ); - if( "double".equals( type ) ) return Double.valueOf( val ); - if( "float".equals( type ) ) return Float.valueOf( val ); - if( "date".equals( type ) ) return DateMathParser.parseMath(null, val ); - } - catch( Exception ex ) { - throw new SolrException( ErrorCode.BAD_REQUEST, - "Unable to parse "+type+"="+val, ex ); + if ("int".equals(type)) return Integer.valueOf(val); + if ("double".equals(type)) return Double.valueOf(val); + if ("float".equals(type)) return Float.valueOf(val); + if ("date".equals(type)) return DateMathParser.parseMath(null, val); + } catch (Exception ex) { + throw new SolrException(ErrorCode.BAD_REQUEST, + "Unable to parse " + type + "=" + val, ex); } } return val; @@ -61,43 +58,40 @@ public static Object getObjectFrom( String val, String type ) @Override public DocTransformer create(String field, SolrParams params, SolrQueryRequest req) { Object val = value; - if( val == null ) { + if (val == null) { String v = params.get("v"); - if( v == null ) { + if (v == null) { val = defaultValue; - } - else { + } else { val = getObjectFrom(v, params.get("t")); } - if( val == null ) { - throw new SolrException( ErrorCode.BAD_REQUEST, - "ValueAugmenter is missing a value -- should be defined in solrconfig or inline" ); + if (val == null) { + throw new SolrException(ErrorCode.BAD_REQUEST, + "ValueAugmenter is missing a value -- should be defined in solrconfig or inline"); } } - return new ValueAugmenter( field, val ); + return new ValueAugmenter(field, val); } -} -class ValueAugmenter extends DocTransformer -{ - final String name; - final Object value; - public ValueAugmenter( String name, Object value ) - { - this.name = name; - this.value = value; - } + static class ValueAugmenter extends DocTransformer { + final String name; + final Object value; - @Override - public String getName() - { - return name; - } + public ValueAugmenter(String name, Object value) { + this.name = name; + this.value = value; + } - @Override - public void transform(SolrDocument doc, int docid) { - doc.setField( name, value ); + @Override + public String getName() { + return name; + } + + @Override + public void transform(SolrDocument doc, int docid) { + doc.setField(name, value); + } } } diff --git a/solr/core/src/java/org/apache/solr/response/transform/ValueSourceAugmenter.java b/solr/core/src/java/org/apache/solr/response/transform/ValueSourceAugmenter.java index 8fcf56fc5116..372d11180dbc 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/ValueSourceAugmenter.java +++ b/solr/core/src/java/org/apache/solr/response/transform/ValueSourceAugmenter.java @@ -60,6 +60,7 @@ public String getName() } @Override + @SuppressWarnings({"unchecked"}) public void setContext( ResultContext context ) { super.setContext(context); try { @@ -72,6 +73,7 @@ public void setContext( ResultContext context ) { } } + @SuppressWarnings({"rawtypes"}) Map fcontext; SolrIndexSearcher searcher; List readerContexts; @@ -85,6 +87,7 @@ public void transform(SolrDocument doc, int docid) { // TODO: calculate this stuff just once across diff functions int idx = ReaderUtil.subIndex(docid, readerContexts); LeafReaderContext rcontext = readerContexts.get(idx); + @SuppressWarnings({"unchecked"}) FunctionValues values = valueSource.getValues(fcontext, rcontext); int localId = docid - rcontext.docBase; setValue(doc,values.objectVal(localId)); diff --git a/solr/core/src/java/org/apache/solr/rest/BaseSolrResource.java b/solr/core/src/java/org/apache/solr/rest/BaseSolrResource.java index 5a9310d84e9b..01db581dc67a 100644 --- a/solr/core/src/java/org/apache/solr/rest/BaseSolrResource.java +++ b/solr/core/src/java/org/apache/solr/rest/BaseSolrResource.java @@ -125,12 +125,9 @@ public void doInit() throws ResourceException { SolrCore.preDecorateResponse(solrRequest, solrResponse); // client application can set a timeout for update requests - Object updateTimeoutSecsParam = getSolrRequest().getParams().get(UPDATE_TIMEOUT_SECS); + String updateTimeoutSecsParam = getSolrRequest().getParams().get(UPDATE_TIMEOUT_SECS); if (updateTimeoutSecsParam != null) - updateTimeoutSecs = (updateTimeoutSecsParam instanceof Number) - ? ((Number) updateTimeoutSecsParam).intValue() - : Integer.parseInt(updateTimeoutSecsParam.toString()); - + updateTimeoutSecs = Integer.parseInt(updateTimeoutSecsParam); } } } catch (Throwable t) { @@ -198,6 +195,7 @@ protected void addDeprecatedWarning(){ protected void handleException(Logger log) { Exception exception = getSolrResponse().getException(); if (null != exception) { + @SuppressWarnings({"rawtypes"}) NamedList info = new SimpleOrderedMap(); int code = ResponseUtils.getErrorInfo(exception, info, log); setStatus(Status.valueOf(code)); diff --git a/solr/core/src/java/org/apache/solr/rest/ManagedResource.java b/solr/core/src/java/org/apache/solr/rest/ManagedResource.java index 5a933d34fb20..f9da549cd162 100644 --- a/solr/core/src/java/org/apache/solr/rest/ManagedResource.java +++ b/solr/core/src/java/org/apache/solr/rest/ManagedResource.java @@ -205,7 +205,9 @@ protected Object processStoredData(Object data) throws SolrException { "Stored data for "+resourceId+" is not a valid JSON object!"); } + @SuppressWarnings({"unchecked"}) Map jsonMap = (Map)data; + @SuppressWarnings({"unchecked"}) Map initArgsMap = (Map)jsonMap.get(INIT_ARGS_JSON_FIELD); managedInitArgs = new NamedList<>(initArgsMap); log.info("Loaded initArgs {} for {}", managedInitArgs, resourceId); diff --git a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java index b62dd10ff2a2..c9b4a7a959cf 100644 --- a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java +++ b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java @@ -235,6 +235,7 @@ protected boolean applyMapUpdates(Map jsonMap, boolean ignoreCase madeChanges = true; } } else if (val instanceof List) { + @SuppressWarnings({"unchecked"}) List vals = (List)val; if (output == null) { diff --git a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java index d96c320808ac..78d875737817 100644 --- a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java +++ b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java @@ -230,6 +230,7 @@ protected boolean applyMapUpdates(Map jsonMap, boolean ignoreCase madeChanges = true; } } else if (val instanceof List) { + @SuppressWarnings({"unchecked"}) List vals = (List)val; if (output == null) { diff --git a/solr/core/src/java/org/apache/solr/schema/BoolField.java b/solr/core/src/java/org/apache/solr/schema/BoolField.java index 5fb2d85fbc22..83b4395e0202 100644 --- a/solr/core/src/java/org/apache/solr/schema/BoolField.java +++ b/solr/core/src/java/org/apache/solr/schema/BoolField.java @@ -215,94 +215,95 @@ public Object toNativeType(Object val) { } return super.toNativeType(val); } -} -// TODO - this can be much more efficient - use FixedBitSet or Bits -class BoolFieldSource extends ValueSource { - protected String field; + // TODO - this can be much more efficient - use FixedBitSet or Bits + private static class BoolFieldSource extends ValueSource { + protected String field; - public BoolFieldSource(String field) { - this.field = field; - } + public BoolFieldSource(String field) { + this.field = field; + } - @Override - public String description() { - return "bool(" + field + ')'; - } + @Override + public String description() { + return "bool(" + field + ')'; + } - @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { - final SortedDocValues sindex = DocValues.getSorted(readerContext.reader(), field); - - // figure out what ord maps to true - int nord = sindex.getValueCount(); - // if no values in the segment, default trueOrd to something other then -1 (missing) - int tord = -2; - for (int i=0; i sindex.docID()) { - sindex.advance(doc); - } - if (doc == sindex.docID()) { - return sindex.ordValue(); - } else { - return -1; + private int getOrdForDoc(int doc) throws IOException { + if (doc > sindex.docID()) { + sindex.advance(doc); + } + if (doc == sindex.docID()) { + return sindex.ordValue(); + } else { + return -1; + } } - } - @Override - public boolean boolVal(int doc) throws IOException { - return getOrdForDoc(doc) == trueOrd; - } - - @Override - public boolean exists(int doc) throws IOException { - return getOrdForDoc(doc) != -1; - } + @Override + public boolean boolVal(int doc) throws IOException { + return getOrdForDoc(doc) == trueOrd; + } - @Override - public ValueFiller getValueFiller() { - return new ValueFiller() { - private final MutableValueBool mval = new MutableValueBool(); + @Override + public boolean exists(int doc) throws IOException { + return getOrdForDoc(doc) != -1; + } - @Override - public MutableValue getValue() { - return mval; - } + @Override + public ValueFiller getValueFiller() { + return new ValueFiller() { + private final MutableValueBool mval = new MutableValueBool(); + + @Override + public MutableValue getValue() { + return mval; + } + + @Override + public void fillValue(int doc) throws IOException { + int ord = getOrdForDoc(doc); + mval.value = (ord == trueOrd); + mval.exists = (ord != -1); + } + }; + } + }; + } - @Override - public void fillValue(int doc) throws IOException { - int ord = getOrdForDoc(doc); - mval.value = (ord == trueOrd); - mval.exists = (ord != -1); - } - }; - } - }; - } + @Override + public boolean equals(Object o) { + return o.getClass() == BoolFieldSource.class && this.field.equals(((BoolFieldSource)o).field); + } - @Override - public boolean equals(Object o) { - return o.getClass() == BoolFieldSource.class && this.field.equals(((BoolFieldSource)o).field); - } + private static final int hcode = OrdFieldSource.class.hashCode(); - private static final int hcode = OrdFieldSource.class.hashCode(); + @Override + public int hashCode() { + return hcode + field.hashCode(); + } - @Override - public int hashCode() { - return hcode + field.hashCode(); } - } + diff --git a/solr/core/src/java/org/apache/solr/schema/ClassicIndexSchemaFactory.java b/solr/core/src/java/org/apache/solr/schema/ClassicIndexSchemaFactory.java index 5bca9c4c38f2..8e31fa536eef 100644 --- a/solr/core/src/java/org/apache/solr/schema/ClassicIndexSchemaFactory.java +++ b/solr/core/src/java/org/apache/solr/schema/ClassicIndexSchemaFactory.java @@ -27,7 +27,7 @@ public class ClassicIndexSchemaFactory extends IndexSchemaFactory { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { // no arguments expected if (args.size() > 0) { String msg = "Unexpected arg(s): " + args; diff --git a/solr/core/src/java/org/apache/solr/schema/CurrencyFieldType.java b/solr/core/src/java/org/apache/solr/schema/CurrencyFieldType.java index f28fb38afed5..f7f5dbf06690 100644 --- a/solr/core/src/java/org/apache/solr/schema/CurrencyFieldType.java +++ b/solr/core/src/java/org/apache/solr/schema/CurrencyFieldType.java @@ -402,7 +402,7 @@ public ConvertedCurrencyValueSource(String targetCurrencyCode, } @Override - public FunctionValues getValues(Map context, LeafReaderContext reader) + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, LeafReaderContext reader) throws IOException { final FunctionValues amounts = source.getValues(context, reader); // the target digits & currency of our source, @@ -514,7 +514,8 @@ public RawCurrencyValueSource(SchemaField sfield, String targetCurrencyCode, QPa public Currency getTargetCurrency() { return targetCurrency; } @Override - public FunctionValues getValues(Map context, LeafReaderContext reader) throws IOException { + @SuppressWarnings({"unchecked"}) + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, LeafReaderContext reader) throws IOException { final FunctionValues amounts = amountValues.getValues(context, reader); final FunctionValues currencies = currencyValues.getValues(context, reader); diff --git a/solr/core/src/java/org/apache/solr/schema/DatePointField.java b/solr/core/src/java/org/apache/solr/schema/DatePointField.java index 9360aa4534b2..184dde152b31 100644 --- a/solr/core/src/java/org/apache/solr/schema/DatePointField.java +++ b/solr/core/src/java/org/apache/solr/schema/DatePointField.java @@ -219,36 +219,37 @@ public IndexableField createField(SchemaField field, Object value) { protected StoredField getStoredField(SchemaField sf, Object value) { return new StoredField(sf.getName(), ((Date) this.toNativeType(value)).getTime()); } -} -class DatePointFieldSource extends LongFieldSource { + private static class DatePointFieldSource extends LongFieldSource { - public DatePointFieldSource(String field) { - super(field); - } + public DatePointFieldSource(String field) { + super(field); + } - @Override - public String description() { - return "date(" + field + ')'; - } + @Override + public String description() { + return "date(" + field + ')'; + } - @Override - protected MutableValueLong newMutableValueLong() { - return new MutableValueDate(); - } + @Override + protected MutableValueLong newMutableValueLong() { + return new MutableValueDate(); + } - @Override - public Date longToObject(long val) { - return new Date(val); - } + @Override + public Date longToObject(long val) { + return new Date(val); + } - @Override - public String longToString(long val) { - return longToObject(val).toInstant().toString(); - } + @Override + public String longToString(long val) { + return longToObject(val).toInstant().toString(); + } - @Override - public long externalToLong(String extVal) { - return DateMathParser.parseMath(null, extVal).getTime(); + @Override + public long externalToLong(String extVal) { + return DateMathParser.parseMath(null, extVal).getTime(); + } } } + diff --git a/solr/core/src/java/org/apache/solr/schema/ExternalFileFieldReloader.java b/solr/core/src/java/org/apache/solr/schema/ExternalFileFieldReloader.java index 26aef9c966f3..8c23135052f5 100644 --- a/solr/core/src/java/org/apache/solr/schema/ExternalFileFieldReloader.java +++ b/solr/core/src/java/org/apache/solr/schema/ExternalFileFieldReloader.java @@ -61,7 +61,7 @@ public ExternalFileFieldReloader(SolrCore core) { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { cacheFieldSources(getCore().getLatestSchema()); } diff --git a/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java b/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java index fcaf00145078..f4b0b50e3531 100644 --- a/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java +++ b/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java @@ -254,6 +254,7 @@ private Analyzer readAnalyzer(Node node) throws XPathExpressionException { ("[schema.xml] analyzer/charFilter", CharFilterFactory.class, false, false) { @Override + @SuppressWarnings({"rawtypes"}) protected CharFilterFactory create(SolrResourceLoader loader, String name, String className, Node node) throws Exception { final Map params = DOMUtil.toMap(node.getAttributes()); String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM); @@ -304,6 +305,7 @@ protected CharFilterFactory register(String name, ("[schema.xml] analyzer/tokenizer", TokenizerFactory.class, false, false) { @Override + @SuppressWarnings({"rawtypes"}) protected TokenizerFactory create(SolrResourceLoader loader, String name, String className, Node node) throws Exception { final Map params = DOMUtil.toMap(node.getAttributes()); String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM); @@ -358,6 +360,7 @@ protected TokenizerFactory register(String name, TokenizerFactory plugin) { new AbstractPluginLoader("[schema.xml] analyzer/filter", TokenFilterFactory.class, false, false) { @Override + @SuppressWarnings({"rawtypes"}) protected TokenFilterFactory create(SolrResourceLoader loader, String name, String className, Node node) throws Exception { final Map params = DOMUtil.toMap(node.getAttributes()); String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM); @@ -404,11 +407,11 @@ private Version parseConfiguredVersion(String configuredVersion, String pluginCl SolrConfig.parseLuceneVersionString(configuredVersion) : schema.getDefaultLuceneMatchVersion(); if (!version.onOrAfter(Version.LUCENE_8_0_0)) { - log.warn("{} is using deprecated {}{}{}" + log.warn("{} is using deprecated {}" + + " emulation. You should at some point declare and reindex to at least 8.0, because " + + "7.x emulation is deprecated and will be removed in 9.0" , pluginClassName - , version - , " emulation. You should at some point declare and reindex to at least 8.0, because " - , "7.x emulation is deprecated and will be removed in 9.0"); + , version); } return version; } diff --git a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java index 7fac59b88330..bd1827d836f2 100644 --- a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java +++ b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java @@ -146,6 +146,7 @@ public class IndexSchema { public DynamicField[] getDynamicFields() { return dynamicFields; } + @SuppressWarnings({"unchecked", "rawtypes"}) protected Cache dynamicFieldCache = new ConcurrentLRUCache(10000, 8000, 9000,100, false,false, null); private Analyzer indexAnalyzer; @@ -1385,6 +1386,7 @@ public boolean isCopyFieldTarget( SchemaField f ) { /** * Get a map of property name -> value for the whole schema. */ + @SuppressWarnings({"unchecked", "rawtypes"}) public Map getNamedPropertyValues() { return getNamedPropertyValues(null, new MapSolrParams(Collections.EMPTY_MAP)); } @@ -1413,6 +1415,7 @@ public enum Handler { .map(it -> it.getNamedPropertyValues(sp.showDefaults)) .collect(Collectors.toList())), + @SuppressWarnings({"unchecked", "rawtypes"}) FIELDS(IndexSchema.FIELDS, sp -> { List result = (sp.requestedFields != null ? sp.requestedFields : new TreeSet<>(sp.schema.fields.keySet())) .stream() @@ -1465,6 +1468,7 @@ public String getNameLower(){ requestedFields = readMultiVals(CommonParams.FL); } + @SuppressWarnings({"rawtypes"}) public Collection applyDynamic(){ return (Collection) Handler.DYNAMIC_FIELDS.fun.apply(this); } @@ -1484,6 +1488,7 @@ private Set readMultiVals(String name) { } + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap getProperties(SchemaField sf) { SimpleOrderedMap result = sf.getNamedPropertyValues(showDefaults); if (schema.isDynamicField(sf.name)) { diff --git a/solr/core/src/java/org/apache/solr/schema/LatLonPointSpatialField.java b/solr/core/src/java/org/apache/solr/schema/LatLonPointSpatialField.java index 386502740f9d..d2d1212c2738 100644 --- a/solr/core/src/java/org/apache/solr/schema/LatLonPointSpatialField.java +++ b/solr/core/src/java/org/apache/solr/schema/LatLonPointSpatialField.java @@ -58,6 +58,7 @@ * coordinates in lat/lon decimal degrees. The accuracy is about a centimeter (1.042cm). */ // TODO once LLP & LLDVF are out of Lucene Sandbox, we should be able to javadoc reference them. +@SuppressWarnings({"rawtypes"}) public class LatLonPointSpatialField extends AbstractSpatialFieldType implements SchemaAware { private IndexSchema schema; diff --git a/solr/core/src/java/org/apache/solr/schema/LatLonType.java b/solr/core/src/java/org/apache/solr/schema/LatLonType.java index ecebd13373c8..ea2711eeef2d 100644 --- a/solr/core/src/java/org/apache/solr/schema/LatLonType.java +++ b/solr/core/src/java/org/apache/solr/schema/LatLonType.java @@ -318,9 +318,12 @@ public Query rewrite(IndexReader reader) throws IOException { protected class SpatialWeight extends ConstantScoreWeight { protected IndexSearcher searcher; + @SuppressWarnings({"rawtypes"}) protected Map latContext; + @SuppressWarnings({"rawtypes"}) protected Map lonContext; + @SuppressWarnings({"unchecked"}) public SpatialWeight(IndexSearcher searcher, float boost) throws IOException { super(SpatialDistanceQuery.this, boost); this.searcher = searcher; @@ -369,6 +372,7 @@ protected class SpatialScorer extends Scorer { int lastDistDoc; double lastDist; + @SuppressWarnings({"unchecked"}) public SpatialScorer(LeafReaderContext readerContext, SpatialWeight w, float qWeight) throws IOException { super(w); this.weight = w; diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java index 35895e41634d..7f114d937a92 100644 --- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java +++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java @@ -314,6 +314,7 @@ protected static List getActiveReplicaCoreUrls(ZkController zkController return activeReplicaCoreUrls; } + @SuppressWarnings({"rawtypes"}) private static class GetZkSchemaVersionCallable extends SolrRequest implements Callable { private String coreUrl; @@ -485,6 +486,7 @@ public ManagedIndexSchema deleteFields(Collection names) { } @Override + @SuppressWarnings({"unchecked"}) public ManagedIndexSchema replaceField (String fieldName, FieldType replacementFieldType, Map replacementArgs) { ManagedIndexSchema newSchema; @@ -690,6 +692,7 @@ public ManagedIndexSchema deleteDynamicFields(Collection fieldNamePatter } @Override + @SuppressWarnings({"unchecked"}) public ManagedIndexSchema replaceDynamicField (String fieldNamePattern, FieldType replacementFieldType, Map replacementArgs) { ManagedIndexSchema newSchema; @@ -809,6 +812,7 @@ public ManagedIndexSchema addCopyFields(String source, Collection destin } @Override + @SuppressWarnings({"unchecked"}) public ManagedIndexSchema deleteCopyFields(Map> copyFields) { ManagedIndexSchema newSchema; if (isMutable) { @@ -956,6 +960,7 @@ public ManagedIndexSchema addFieldTypes(List fieldTypeList, boolean p // we shallow copied fieldTypes, but since we're changing them, we need to do a true // deep copy before adding the new field types + @SuppressWarnings({"unchecked"}) HashMap clone = (HashMap)((HashMap)newSchema.fieldTypes).clone(); newSchema.fieldTypes = clone; @@ -1044,6 +1049,7 @@ private Map> cloneCopyFieldsMap(Map replacementArgs) { ManagedIndexSchema newSchema; if (isMutable) { diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java index 300bd97791ac..ef5df423b876 100644 --- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java +++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java @@ -67,7 +67,7 @@ public class ManagedIndexSchemaFactory extends IndexSchemaFactory implements Sol private boolean shouldUpgrade = false; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { SolrParams params = args.toSolrParams(); isMutable = params.getBool("mutable", true); args.remove("mutable"); diff --git a/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java b/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java index 8492b7afc2fa..d0a204be0c54 100644 --- a/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java +++ b/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java @@ -88,8 +88,8 @@ public double getExchangeRate(String sourceCurrencyCode, String targetCurrencyCo reloadIfExpired(); - Double source = (Double) rates.getRates().get(sourceCurrencyCode); - Double target = (Double) rates.getRates().get(targetCurrencyCode); + Double source = rates.getRates().get(sourceCurrencyCode); + Double target = rates.getRates().get(targetCurrencyCode); if (source == null || target == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, diff --git a/solr/core/src/java/org/apache/solr/schema/RandomSortField.java b/solr/core/src/java/org/apache/solr/schema/RandomSortField.java index 44bb420947fb..1a8ec24410af 100644 --- a/solr/core/src/java/org/apache/solr/schema/RandomSortField.java +++ b/solr/core/src/java/org/apache/solr/schema/RandomSortField.java @@ -173,7 +173,7 @@ public String description() { } @Override - public FunctionValues getValues(Map context, final LeafReaderContext readerContext) throws IOException { + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, final LeafReaderContext readerContext) throws IOException { return new IntDocValues(this) { private final int seed = getSeed(field, readerContext); @Override diff --git a/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java b/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java index abb3f0dbbee4..5ad0b5d137be 100644 --- a/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java +++ b/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java @@ -145,6 +145,7 @@ public int hashCode() { public ShapeValues getValues(LeafReaderContext readerContext) throws IOException { final ShapeValues targetFuncValues = targetValueSource.getValues(readerContext); // The key is a pair of leaf reader with a docId relative to that reader. The value is a Map from field to Shape. + @SuppressWarnings({"unchecked"}) final SolrCache cache = SolrRequestInfo.getRequestInfo().getReq().getSearcher().getCache(CACHE_KEY_PREFIX + fieldName); if (cache == null) { diff --git a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java index 617a8adaf6b3..2402e1587dd3 100644 --- a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java +++ b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java @@ -82,6 +82,7 @@ public SchemaManager(SolrQueryRequest req){ * as possible instead of failing at the first error it encounters * @return List of errors. If the List is empty then the operation was successful. */ + @SuppressWarnings({"rawtypes"}) public List performOperations() throws Exception { List ops = req.getCommands(false); List errs = CommandOperation.captureErrors(ops); @@ -95,6 +96,7 @@ public List performOperations() throws Exception { } } + @SuppressWarnings({"rawtypes"}) private List doOperations(List operations) throws InterruptedException, IOException, KeeperException { TimeOut timeOut = new TimeOut(timeout, TimeUnit.SECONDS, TimeSource.NANO_TIME); SolrCore core = req.getCore(); diff --git a/solr/core/src/java/org/apache/solr/schema/StrFieldSource.java b/solr/core/src/java/org/apache/solr/schema/StrFieldSource.java index 5326a76bba6d..002ac3aaa256 100644 --- a/solr/core/src/java/org/apache/solr/schema/StrFieldSource.java +++ b/solr/core/src/java/org/apache/solr/schema/StrFieldSource.java @@ -36,7 +36,7 @@ public String description() { } @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, LeafReaderContext readerContext) throws IOException { return new DocTermsIndexDocValues(this, readerContext, field) { @Override diff --git a/solr/core/src/java/org/apache/solr/schema/TrieDoubleField.java b/solr/core/src/java/org/apache/solr/schema/TrieDoubleField.java index 8e622c6d286f..aa6b9361bb2f 100644 --- a/solr/core/src/java/org/apache/solr/schema/TrieDoubleField.java +++ b/solr/core/src/java/org/apache/solr/schema/TrieDoubleField.java @@ -70,7 +70,7 @@ protected ValueSource getSingleValueSource(SortedSetSelector.Type choice, Schema return new SortedSetFieldSource(f.getName(), choice) { @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, LeafReaderContext readerContext) throws IOException { SortedSetFieldSource thisAsSortedSetFieldSource = this; // needed for nested anon class ref SortedSetDocValues sortedSet = DocValues.getSortedSet(readerContext.reader(), field); diff --git a/solr/core/src/java/org/apache/solr/schema/TrieFloatField.java b/solr/core/src/java/org/apache/solr/schema/TrieFloatField.java index b7895649073a..00f9378a44a8 100644 --- a/solr/core/src/java/org/apache/solr/schema/TrieFloatField.java +++ b/solr/core/src/java/org/apache/solr/schema/TrieFloatField.java @@ -70,7 +70,7 @@ protected ValueSource getSingleValueSource(SortedSetSelector.Type choice, Schema return new SortedSetFieldSource(f.getName(), choice) { @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, LeafReaderContext readerContext) throws IOException { SortedSetFieldSource thisAsSortedSetFieldSource = this; // needed for nested anon class ref SortedSetDocValues sortedSet = DocValues.getSortedSet(readerContext.reader(), field); diff --git a/solr/core/src/java/org/apache/solr/schema/TrieIntField.java b/solr/core/src/java/org/apache/solr/schema/TrieIntField.java index 8acb66d59e0e..c4692e30aa17 100644 --- a/solr/core/src/java/org/apache/solr/schema/TrieIntField.java +++ b/solr/core/src/java/org/apache/solr/schema/TrieIntField.java @@ -68,7 +68,7 @@ protected ValueSource getSingleValueSource(SortedSetSelector.Type choice, Schema return new SortedSetFieldSource(f.getName(), choice) { @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, LeafReaderContext readerContext) throws IOException { SortedSetFieldSource thisAsSortedSetFieldSource = this; // needed for nested anon class ref SortedSetDocValues sortedSet = DocValues.getSortedSet(readerContext.reader(), field); diff --git a/solr/core/src/java/org/apache/solr/schema/TrieLongField.java b/solr/core/src/java/org/apache/solr/schema/TrieLongField.java index bf6d39320cc2..2d60cae0aac5 100644 --- a/solr/core/src/java/org/apache/solr/schema/TrieLongField.java +++ b/solr/core/src/java/org/apache/solr/schema/TrieLongField.java @@ -68,7 +68,7 @@ protected ValueSource getSingleValueSource(SortedSetSelector.Type choice, Schema return new SortedSetFieldSource(f.getName(), choice) { @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, LeafReaderContext readerContext) throws IOException { SortedSetFieldSource thisAsSortedSetFieldSource = this; // needed for nested anon class ref SortedSetDocValues sortedSet = DocValues.getSortedSet(readerContext.reader(), field); diff --git a/solr/core/src/java/org/apache/solr/search/AbstractReRankQuery.java b/solr/core/src/java/org/apache/solr/search/AbstractReRankQuery.java index c87565813e7a..f0e670258864 100644 --- a/solr/core/src/java/org/apache/solr/search/AbstractReRankQuery.java +++ b/solr/core/src/java/org/apache/solr/search/AbstractReRankQuery.java @@ -56,7 +56,7 @@ public MergeStrategy getMergeStrategy() { return null; } - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) public TopDocsCollector getTopDocsCollector(int len, QueryCommand cmd, IndexSearcher searcher) throws IOException { if(this.boostedPriority == null) { SolrRequestInfo info = SolrRequestInfo.getRequestInfo(); diff --git a/solr/core/src/java/org/apache/solr/search/CacheConfig.java b/solr/core/src/java/org/apache/solr/search/CacheConfig.java index 864054fd1c78..647091540c34 100644 --- a/solr/core/src/java/org/apache/solr/search/CacheConfig.java +++ b/solr/core/src/java/org/apache/solr/search/CacheConfig.java @@ -49,6 +49,7 @@ public class CacheConfig implements MapSerializable{ private String nodeName; + @SuppressWarnings({"rawtypes"}) private Class clazz; private Map args; private CacheRegenerator regenerator; @@ -61,6 +62,7 @@ public class CacheConfig implements MapSerializable{ public CacheConfig() {} + @SuppressWarnings({"rawtypes"}) public CacheConfig(Class clazz, Map args, CacheRegenerator regenerator) { this.clazz = clazz; this.args = args; @@ -91,6 +93,7 @@ public static Map getMultipleConfigs(SolrConfig solrConfig, } + @SuppressWarnings({"unchecked"}) public static CacheConfig getConfig(SolrConfig solrConfig, String xpath) { Node node = solrConfig.getNode(xpath, false); if(node == null || !"true".equals(DOMUtil.getAttrOrDefault(node, "enabled", "true"))) { @@ -103,9 +106,11 @@ public static CacheConfig getConfig(SolrConfig solrConfig, String xpath) { } + @SuppressWarnings({"unchecked"}) public static CacheConfig getConfig(SolrConfig solrConfig, String nodeName, Map attrs, String xpath) { CacheConfig config = new CacheConfig(); config.nodeName = nodeName; + @SuppressWarnings({"rawtypes"}) Map attrsCopy = new LinkedHashMap<>(attrs.size()); for (Map.Entry e : attrs.entrySet()) { attrsCopy.put(e.getKey(), String.valueOf(e.getValue())); @@ -138,6 +143,7 @@ public static CacheConfig getConfig(SolrConfig solrConfig, String nodeName, Map< return config; } + @SuppressWarnings({"rawtypes"}) public SolrCache newInstance() { try { SolrCache cache = clazz.getConstructor().newInstance(); @@ -152,7 +158,9 @@ public SolrCache newInstance() { } @Override + @SuppressWarnings({"unchecked"}) public Map toMap(Map map) { + @SuppressWarnings({"rawtypes"}) Map result = Collections.unmodifiableMap(args); return result; } diff --git a/solr/core/src/java/org/apache/solr/search/CacheRegenerator.java b/solr/core/src/java/org/apache/solr/search/CacheRegenerator.java index 4daaa5259095..f958bf2f4919 100644 --- a/solr/core/src/java/org/apache/solr/search/CacheRegenerator.java +++ b/solr/core/src/java/org/apache/solr/search/CacheRegenerator.java @@ -38,5 +38,7 @@ public interface CacheRegenerator { * @param oldVal the old value of the cache item * @return true to continue with autowarming, false to stop */ - public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache, Object oldKey, Object oldVal) throws IOException; + public boolean regenerateItem(SolrIndexSearcher newSearcher, + @SuppressWarnings({"rawtypes"})SolrCache newCache, + @SuppressWarnings({"rawtypes"})SolrCache oldCache, Object oldKey, Object oldVal) throws IOException; } diff --git a/solr/core/src/java/org/apache/solr/search/CaffeineCache.java b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java index 82271ade225d..8da244b972e5 100644 --- a/solr/core/src/java/org/apache/solr/search/CaffeineCache.java +++ b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search; +import java.io.IOException; import java.lang.invoke.MethodHandles; import java.time.Duration; import java.util.Collections; @@ -135,7 +136,9 @@ public Object init(Map args, Object persistence, CacheRegenerator regenerator) { return persistence; } + @SuppressWarnings({"unchecked"}) private Cache buildCache(Cache prev) { + @SuppressWarnings({"rawtypes"}) Caffeine builder = Caffeine.newBuilder() .initialCapacity(initialSize) .executor(executor) @@ -228,7 +231,7 @@ public int size() { } @Override - public void close() throws Exception { + public void close() throws IOException { SolrCache.super.close(); cache.invalidateAll(); cache.cleanUp(); diff --git a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java index e7125772e33f..05ae26eb8163 100644 --- a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java @@ -366,6 +366,7 @@ public CollapsingPostFilter(SolrParams localParams, SolrParams params, SolrQuery } } + @SuppressWarnings({"unchecked"}) public DelegatingCollector getFilterCollector(IndexSearcher indexSearcher) { try { @@ -376,6 +377,7 @@ public DelegatingCollector getFilterCollector(IndexSearcher indexSearcher) { //because the QueryElevationComponent runs after the Queries are constructed. IntIntHashMap boostDocsMap = null; + @SuppressWarnings({"rawtypes"}) Map context = null; SolrRequestInfo info = SolrRequestInfo.getRequestInfo(); if(info != null) { @@ -1451,8 +1453,9 @@ public static final class CollapseScore { * If it is, then "this" will be added to the readerContext * using the "CSCORE" key, and true will be returned. If not returns false. */ + @SuppressWarnings({"unchecked"}) public boolean setupIfNeeded(final GroupHeadSelector groupHeadSelector, - final Map readerContext) { + @SuppressWarnings({"rawtypes"})final Map readerContext) { // HACK, but not really any better options until/unless we can recursively // ask value sources if they depend on score if (wantsCScore(groupHeadSelector.selectorText)) { @@ -1832,6 +1835,7 @@ private static class OrdValueSourceStrategy extends OrdFieldValueStrategy { private ValueSource valueSource; private FunctionValues functionValues; private IntFloatDynamicMap ordVals; + @SuppressWarnings({"rawtypes"}) private Map rcontext; private final CollapseScore collapseScore = new CollapseScore(); private boolean needsScores4Collapsing; @@ -1865,6 +1869,7 @@ public OrdValueSourceStrategy(int maxDoc, collapseScore.setupIfNeeded(groupHeadSelector, rcontext); } + @SuppressWarnings({"unchecked"}) public void setNextReader(LeafReaderContext context) throws IOException { functionValues = this.valueSource.getValues(rcontext, context); } @@ -2353,6 +2358,7 @@ private static class IntValueSourceStrategy extends IntFieldValueStrategy { private ValueSource valueSource; private FunctionValues functionValues; + @SuppressWarnings({"rawtypes"}) private Map rcontext; private final CollapseScore collapseScore = new CollapseScore(); private int index=-1; @@ -2391,6 +2397,7 @@ public IntValueSourceStrategy(int maxDoc, collapseScore.setupIfNeeded(groupHeadSelector, rcontext); } + @SuppressWarnings({"unchecked"}) public void setNextReader(LeafReaderContext context) throws IOException { functionValues = this.valueSource.getValues(rcontext, context); } @@ -2632,6 +2639,7 @@ private static class SortFieldsCompare { final private int numClauses; final private SortField[] sorts; final private int[] reverseMul; + @SuppressWarnings({"rawtypes"}) final private FieldComparator[] fieldComparators; final private LeafFieldComparator[] leafFieldComparators; @@ -2642,6 +2650,7 @@ private static class SortFieldsCompare { * Constructs an instance based on the the (raw, un-rewritten) SortFields to be used, * and an initial number of expected groups (will grow as needed). */ + @SuppressWarnings({"rawtypes"}) public SortFieldsCompare(SortField[] sorts, int initNumGroups) { this.sorts = sorts; numClauses = sorts.length; @@ -2757,6 +2766,7 @@ public boolean testAndSetNullGroupValues(int contextDoc) throws IOException { * accordance with the SortFields. * (otherwise returns false) */ + @SuppressWarnings({"unchecked", "rawtypes"}) private boolean testAndSetGroupValues(Object[] values, int contextDoc) throws IOException { Object[] stash = new Object[numClauses]; int lastCompare = 0; diff --git a/solr/core/src/java/org/apache/solr/search/ComplexPhraseQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ComplexPhraseQParserPlugin.java index 85b40cdb7d05..6313e5bb2a2c 100644 --- a/solr/core/src/java/org/apache/solr/search/ComplexPhraseQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/ComplexPhraseQParserPlugin.java @@ -39,7 +39,7 @@ public class ComplexPhraseQParserPlugin extends QParserPlugin { private boolean inOrder = true; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); if (args != null) { Object val = args.get("inOrder"); diff --git a/solr/core/src/java/org/apache/solr/search/CursorMark.java b/solr/core/src/java/org/apache/solr/search/CursorMark.java index 2a63da53255e..804847141163 100644 --- a/solr/core/src/java/org/apache/solr/search/CursorMark.java +++ b/solr/core/src/java/org/apache/solr/search/CursorMark.java @@ -172,6 +172,7 @@ public SortSpec getSortSpec() { * * @see #getSerializedTotem */ + @SuppressWarnings({"unchecked"}) public void parseSerializedTotem(final String serialized) { if (CURSOR_MARK_START.equals(serialized)) { values = null; diff --git a/solr/core/src/java/org/apache/solr/search/DocList.java b/solr/core/src/java/org/apache/solr/search/DocList.java index b136d280a93c..0abc24340c5a 100644 --- a/solr/core/src/java/org/apache/solr/search/DocList.java +++ b/solr/core/src/java/org/apache/solr/search/DocList.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search; +import org.apache.lucene.search.TotalHits; /** * DocList represents the result of a query: an ordered list of document ids with optional score. @@ -45,6 +46,8 @@ public interface DocList { * @return number of matches for the search(query & any filters) */ public long matches(); + + public TotalHits.Relation hitCountRelation(); /*** diff --git a/solr/core/src/java/org/apache/solr/search/DocSlice.java b/solr/core/src/java/org/apache/solr/search/DocSlice.java index ba8fb833167a..489f950c5303 100644 --- a/solr/core/src/java/org/apache/solr/search/DocSlice.java +++ b/solr/core/src/java/org/apache/solr/search/DocSlice.java @@ -19,6 +19,7 @@ import java.util.Collection; import java.util.Collections; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; @@ -37,6 +38,7 @@ public class DocSlice implements DocList, Accountable { final float[] scores; // optional score list final long matches; + final TotalHits.Relation matchesRelation; final float maxScore; final long ramBytesUsed; // cached value @@ -48,8 +50,9 @@ public class DocSlice implements DocList, Accountable { * @param docs array of docids starting at position 0 * @param scores array of scores that corresponds to docs, may be null * @param matches total number of matches for the query + * @param matchesRelation Indicates if {@code matches} is exact or an approximation */ - public DocSlice(int offset, int len, int[] docs, float[] scores, long matches, float maxScore) { + public DocSlice(int offset, int len, int[] docs, float[] scores, long matches, float maxScore, TotalHits.Relation matchesRelation) { this.offset=offset; this.len=len; this.docs=docs; @@ -57,6 +60,7 @@ public DocSlice(int offset, int len, int[] docs, float[] scores, long matches, f this.matches=matches; this.maxScore=maxScore; this.ramBytesUsed = BASE_RAM_BYTES_USED + (docs == null ? 0 : ((long)docs.length << 2)) + (scores == null ? 0 : ((long)scores.length<<2)+RamUsageEstimator.NUM_BYTES_ARRAY_HEADER); + this.matchesRelation = matchesRelation; } @Override @@ -70,7 +74,7 @@ public DocList subset(int offset, int len) { int realEndDoc = Math.min(requestedEnd, docs.length); int realLen = Math.max(realEndDoc-offset,0); if (this.offset == offset && this.len == realLen) return this; - return new DocSlice(offset, realLen, docs, scores, matches, maxScore); + return new DocSlice(offset, realLen, docs, scores, matches, maxScore, matchesRelation); } @Override @@ -139,4 +143,9 @@ public long ramBytesUsed() { public Collection getChildResources() { return Collections.emptyList(); } + + @Override + public TotalHits.Relation hitCountRelation() { + return matchesRelation; + } } diff --git a/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java index bdf943e05514..097f1fcf0783 100644 --- a/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java @@ -95,6 +95,7 @@ public Query rewrite(IndexReader reader) throws IOException { } } + @SuppressWarnings({"rawtypes"}) public TopDocsCollector getTopDocsCollector(int len, QueryCommand cmd, IndexSearcher searcher) throws IOException { @@ -137,10 +138,12 @@ public ExportQuery(SolrParams localParams, SolrParams params, SolrQueryRequest r } } + @SuppressWarnings({"rawtypes"}) private static class ExportCollector extends TopDocsCollector { private FixedBitSet[] sets; + @SuppressWarnings({"unchecked"}) public ExportCollector(FixedBitSet[] sets) { super(null); this.sets = sets; @@ -172,6 +175,7 @@ private ScoreDoc[] getScoreDocs(int howMany) { return docs; } + @SuppressWarnings({"unchecked"}) public TopDocs topDocs(int start, int howMany) { assert(sets != null); @@ -180,6 +184,7 @@ public TopDocs topDocs(int start, int howMany) { SolrQueryRequest req = null; if(info != null && ((req = info.getReq()) != null)) { + @SuppressWarnings({"rawtypes"}) Map context = req.getContext(); context.put("export", sets); context.put("totalHits", totalHits); diff --git a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java index 93aaf28f1dd2..fce9416ad59a 100644 --- a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java +++ b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java @@ -51,6 +51,7 @@ import org.apache.lucene.search.spans.SpanQuery; import org.apache.solr.analysis.TokenizerChain; import org.apache.solr.common.SolrException; +import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.params.DisMaxParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; @@ -412,7 +413,7 @@ protected Query parseOriginalQuery(ExtendedSolrQueryParser up, String mmSpec = config.minShouldMatch; if (foundOperators(clauses, config.lowercaseOperators)) { - mmSpec = params.get(DisMaxParams.MM, "0%"); // Use provided mm spec if present, otherwise turn off mm processing + mmSpec = config.solrParams.get(DisMaxParams.MM, "0%"); // Use provided mm spec if present, otherwise turn off mm processing } query = SolrPluginUtils.setMinShouldMatch((BooleanQuery)query, mmSpec, config.mmAutoRelax); } @@ -1609,7 +1610,7 @@ else if (wildcard.endsWith("*")) { str=wildcard.substring(0,wildcard.length()-1); } else { - throw new RuntimeException("dynamic field name must start or end with *"); + throw new SolrException(ErrorCode.BAD_REQUEST, "dynamic field name must start or end with *"); } } diff --git a/solr/core/src/java/org/apache/solr/search/FloatPayloadValueSource.java b/solr/core/src/java/org/apache/solr/search/FloatPayloadValueSource.java index c5484185ce37..130c1efeea0c 100644 --- a/solr/core/src/java/org/apache/solr/search/FloatPayloadValueSource.java +++ b/solr/core/src/java/org/apache/solr/search/FloatPayloadValueSource.java @@ -53,10 +53,12 @@ public FloatPayloadValueSource(String field, String val, String indexedField, By } @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context + , LeafReaderContext readerContext) throws IOException { final Terms terms = readerContext.reader().terms(indexedField); + @SuppressWarnings({"unchecked"}) FunctionValues defaultValues = defaultValueSource.getValues(context, readerContext); // copied the bulk of this from TFValueSource - TODO: this is a very repeated pattern - base-class this advance logic stuff? diff --git a/solr/core/src/java/org/apache/solr/search/FunctionQParser.java b/solr/core/src/java/org/apache/solr/search/FunctionQParser.java index a212f050729a..7743640ea3f4 100644 --- a/solr/core/src/java/org/apache/solr/search/FunctionQParser.java +++ b/solr/core/src/java/org/apache/solr/search/FunctionQParser.java @@ -327,7 +327,7 @@ protected ValueSource parseValueSource(int flags) throws SyntaxError { if (ch>='0' && ch<='9' || ch=='.' || ch=='+' || ch=='-') { Number num = sp.getNumber(); if (num instanceof Long) { - valueSource = new LongConstValueSource(num.longValue()); + valueSource = new ValueSourceParser.LongConstValueSource(num.longValue()); } else if (num instanceof Double) { valueSource = new DoubleConstValueSource(num.doubleValue()); } else { @@ -399,9 +399,9 @@ protected ValueSource parseValueSource(int flags) throws SyntaxError { sp.expect(")"); } else { if ("true".equals(id)) { - valueSource = new BoolConstValueSource(true); + valueSource = ValueSourceParser.BoolConstValueSource.TRUE; } else if ("false".equals(id)) { - valueSource = new BoolConstValueSource(false); + valueSource = ValueSourceParser.BoolConstValueSource.FALSE; } else { if ((flags & FLAG_USE_FIELDNAME_SOURCE) != 0) { // Don't try to create a ValueSource for the field, just use a placeholder. diff --git a/solr/core/src/java/org/apache/solr/search/FunctionRangeQuery.java b/solr/core/src/java/org/apache/solr/search/FunctionRangeQuery.java index fdcdfc3eba8a..ff293223ea9f 100644 --- a/solr/core/src/java/org/apache/solr/search/FunctionRangeQuery.java +++ b/solr/core/src/java/org/apache/solr/search/FunctionRangeQuery.java @@ -41,17 +41,20 @@ public FunctionRangeQuery(ValueSourceRangeFilter filter) { @Override public DelegatingCollector getFilterCollector(IndexSearcher searcher) { + @SuppressWarnings({"rawtypes"}) Map fcontext = ValueSource.newContext(searcher); Weight weight = rangeFilt.createWeight(searcher, ScoreMode.COMPLETE, 1); return new FunctionRangeCollector(fcontext, weight); } class FunctionRangeCollector extends DelegatingCollector { + @SuppressWarnings({"rawtypes"}) final Map fcontext; final Weight weight; ValueSourceScorer scorer; int maxdoc; + @SuppressWarnings({"rawtypes"}) public FunctionRangeCollector(Map fcontext, Weight weight) { this.fcontext = fcontext; this.weight = weight; @@ -69,6 +72,7 @@ public void collect(int doc) throws IOException { protected void doSetNextReader(LeafReaderContext context) throws IOException { super.doSetNextReader(context); maxdoc = context.reader().maxDoc(); + @SuppressWarnings({"unchecked"}) FunctionValues dv = rangeFilt.getValueSource().getValues(fcontext, context); scorer = dv.getRangeScorer(weight, context, rangeFilt.getLowerVal(), rangeFilt.getUpperVal(), rangeFilt.isIncludeLower(), rangeFilt.isIncludeUpper()); } diff --git a/solr/core/src/java/org/apache/solr/search/Grouping.java b/solr/core/src/java/org/apache/solr/search/Grouping.java index 11cbf303fdd6..60c166de1df2 100644 --- a/solr/core/src/java/org/apache/solr/search/Grouping.java +++ b/solr/core/src/java/org/apache/solr/search/Grouping.java @@ -82,6 +82,7 @@ public class Grouping { private final SolrIndexSearcher searcher; private final QueryResult qr; private final QueryCommand cmd; + @SuppressWarnings({"rawtypes"}) private final List commands = new ArrayList<>(); private final boolean main; private final boolean cacheSecondPassSearch; @@ -103,6 +104,7 @@ public class Grouping { private Query query; private DocSet filter; private Filter luceneFilter; + @SuppressWarnings({"rawtypes"}) private NamedList grouped = new SimpleOrderedMap(); private Set idSet = new LinkedHashSet<>(); // used for tracking unique docs when we need a doclist private int maxMatches; // max number of matches from any grouping command @@ -134,7 +136,7 @@ public Grouping(SolrIndexSearcher searcher, this.main = main; } - public void add(Grouping.Command groupingCommand) { + public void add(@SuppressWarnings({"rawtypes"})Grouping.Command groupingCommand) { commands.add(groupingCommand); } @@ -180,6 +182,7 @@ public void addFieldCommand(String field, SolrQueryRequest request) throws Synta public void addFunctionCommand(String groupByStr, SolrQueryRequest request) throws SyntaxError { QParser parser = QParser.getParser(groupByStr, FunctionQParserPlugin.NAME, request); Query q = parser.getQuery(); + @SuppressWarnings({"rawtypes"}) final Grouping.Command gc; if (q instanceof FunctionQuery) { ValueSource valueSource = ((FunctionQuery) q).getValueSource(); @@ -288,6 +291,7 @@ public Grouping setGetGroupedDocSet(boolean getGroupedDocSet) { return this; } + @SuppressWarnings({"rawtypes"}) public List getCommands() { return commands; } @@ -317,13 +321,13 @@ public void execute() throws IOException { getDocList = (cmd.getFlags() & SolrIndexSearcher.GET_DOCLIST) != 0; query = QueryUtils.makeQueryable(cmd.getQuery()); - for (Command cmd : commands) { + for (@SuppressWarnings({"rawtypes"})Command cmd : commands) { cmd.prepare(); } AllGroupHeadsCollector allGroupHeadsCollector = null; List collectors = new ArrayList<>(commands.size()); - for (Command cmd : commands) { + for (@SuppressWarnings({"rawtypes"})Command cmd : commands) { Collector collector = cmd.createFirstPassCollector(); if (collector != null) { collectors.add(collector); @@ -370,7 +374,7 @@ public void execute() throws IOException { } collectors.clear(); - for (Command cmd : commands) { + for (@SuppressWarnings({"rawtypes"})Command cmd : commands) { Collector collector = cmd.createSecondPassCollector(); if (collector != null) collectors.add(collector); @@ -401,7 +405,7 @@ public void execute() throws IOException { } } - for (Command cmd : commands) { + for (@SuppressWarnings({"rawtypes"})Command cmd : commands) { cmd.finish(); } @@ -414,7 +418,7 @@ public void execute() throws IOException { for (int val : idSet) { ids[idx++] = val; } - qr.setDocList(new DocSlice(0, sz, ids, null, maxMatches, maxScore)); + qr.setDocList(new DocSlice(0, sz, ids, null, maxMatches, maxScore, TotalHits.Relation.EQUAL_TO)); } } @@ -592,6 +596,7 @@ protected void populateScoresIfNecessary() throws IOException { } } + @SuppressWarnings({"unchecked", "rawtypes"}) protected NamedList commonResponse() { NamedList groupResult = new SimpleOrderedMap(); grouped.add(key, groupResult); // grouped={ key={ @@ -606,7 +611,7 @@ protected NamedList commonResponse() { return groupResult; } - protected DocList getDocList(GroupDocs groups) { + protected DocList getDocList(@SuppressWarnings({"rawtypes"})GroupDocs groups) { assert groups.totalHits.relation == TotalHits.Relation.EQUAL_TO; int max = Math.toIntExact(groups.totalHits.value); int off = groupOffset; @@ -630,7 +635,7 @@ protected DocList getDocList(GroupDocs groups) { float score = groups.maxScore; maxScore = maxAvoidNaN(score, maxScore); - DocSlice docs = new DocSlice(off, Math.max(0, ids.length - off), ids, scores, groups.totalHits.value, score); + DocSlice docs = new DocSlice(off, Math.max(0, ids.length - off), ids, scores, groups.totalHits.value, score, TotalHits.Relation.EQUAL_TO); if (getDocList) { DocIterator iter = docs.iterator(); @@ -640,12 +645,15 @@ protected DocList getDocList(GroupDocs groups) { return docs; } - protected void addDocList(NamedList rsp, GroupDocs groups) { + @SuppressWarnings({"unchecked"}) + protected void addDocList(@SuppressWarnings({"rawtypes"})NamedList rsp + , @SuppressWarnings({"rawtypes"})GroupDocs groups) { rsp.add("doclist", getDocList(groups)); } // Flatten the groups and get up offset + rows documents protected DocList createSimpleResponse() { + @SuppressWarnings({"rawtypes"}) GroupDocs[] groups = result != null ? result.groups : new GroupDocs[0]; List ids = new ArrayList<>(); @@ -655,7 +663,7 @@ protected DocList createSimpleResponse() { float maxScore = Float.NaN; outer: - for (GroupDocs group : groups) { + for (@SuppressWarnings({"rawtypes"})GroupDocs group : groups) { maxScore = maxAvoidNaN(maxScore, group.maxScore); for (ScoreDoc scoreDoc : group.scoreDocs) { @@ -672,7 +680,7 @@ protected DocList createSimpleResponse() { int len = docsGathered > offset ? docsGathered - offset : 0; int[] docs = ArrayUtils.toPrimitive(ids.toArray(new Integer[ids.size()])); float[] docScores = ArrayUtils.toPrimitive(scores.toArray(new Float[scores.size()])); - DocSlice docSlice = new DocSlice(offset, len, docs, docScores, getMatches(), maxScore); + DocSlice docSlice = new DocSlice(offset, len, docs, docScores, getMatches(), maxScore, TotalHits.Relation.EQUAL_TO); if (getDocList) { for (int i = offset; i < docs.length; i++) { @@ -768,6 +776,7 @@ public AllGroupHeadsCollector createAllGroupCollector() throws IOException { } @Override + @SuppressWarnings({"unchecked"}) protected void finish() throws IOException { if (secondPass != null) { result = secondPass.getTopGroups(0); @@ -778,6 +787,7 @@ protected void finish() throws IOException { return; } + @SuppressWarnings({"rawtypes"}) NamedList groupResult = commonResponse(); if (format == Format.simple) { @@ -785,6 +795,7 @@ protected void finish() throws IOException { return; } + @SuppressWarnings({"rawtypes"}) List groupList = new ArrayList(); groupResult.add("groups", groupList); // grouped={ key={ groups=[ @@ -796,6 +807,7 @@ protected void finish() throws IOException { if (numGroups == 0) return; for (GroupDocs group : result.groups) { + @SuppressWarnings({"rawtypes"}) NamedList nl = new SimpleOrderedMap(); groupList.add(nl); // grouped={ key={ groups=[ { @@ -844,6 +856,7 @@ protected Integer getNumberOfGroups() { * A group command for grouping on a query. */ //NOTE: doesn't need to be generic. Maybe Command interface --> First / Second pass abstract impl. + @SuppressWarnings({"rawtypes"}) public class CommandQuery extends Command { public Query query; @@ -911,8 +924,10 @@ public int getMatches() { public class CommandFunc extends Command { public ValueSource groupBy; + @SuppressWarnings({"rawtypes"}) Map context; + @SuppressWarnings({"unchecked"}) private ValueSourceGroupSelector newSelector() { return new ValueSourceGroupSelector(groupBy, context); } @@ -925,6 +940,7 @@ private ValueSourceGroupSelector newSelector() { Collection> topGroups; @Override + @SuppressWarnings({"unchecked"}) protected void prepare() throws IOException { context = ValueSource.newContext(searcher); groupBy.createWeight(context, searcher); @@ -985,6 +1001,7 @@ public AllGroupHeadsCollector createAllGroupCollector() throws IOException { } @Override + @SuppressWarnings({"unchecked"}) protected void finish() throws IOException { if (secondPass != null) { result = secondPass.getTopGroups(0); @@ -995,6 +1012,7 @@ protected void finish() throws IOException { return; } + @SuppressWarnings({"rawtypes"}) NamedList groupResult = commonResponse(); if (format == Format.simple) { @@ -1002,6 +1020,7 @@ protected void finish() throws IOException { return; } + @SuppressWarnings({"rawtypes"}) List groupList = new ArrayList(); groupResult.add("groups", groupList); // grouped={ key={ groups=[ @@ -1013,6 +1032,7 @@ protected void finish() throws IOException { if (numGroups == 0) return; for (GroupDocs group : result.groups) { + @SuppressWarnings({"rawtypes"}) NamedList nl = new SimpleOrderedMap(); groupList.add(nl); // grouped={ key={ groups=[ { nl.add("groupValue", group.groupValue.toObject()); diff --git a/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java index c9e143446325..5364b1ba22a8 100644 --- a/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java @@ -143,8 +143,10 @@ public void collect(int doc) throws IOException { @Override public void finish() throws IOException { NamedList analytics = new NamedList(); + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList topFreq = new NamedList(); + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList allFreq = new NamedList(); rb.rsp.add("featuredTerms", analytics); diff --git a/solr/core/src/java/org/apache/solr/search/MaxScoreCollector.java b/solr/core/src/java/org/apache/solr/search/MaxScoreCollector.java index abb2243fae28..1b8987dbfcc9 100644 --- a/solr/core/src/java/org/apache/solr/search/MaxScoreCollector.java +++ b/solr/core/src/java/org/apache/solr/search/MaxScoreCollector.java @@ -37,9 +37,7 @@ public float getMaxScore() { @Override public ScoreMode scoreMode() { - // Should be TOP_SCORES but this would wrap the scorer unnecessarily since - // this collector is only used in a MultiCollector. - return ScoreMode.COMPLETE; + return ScoreMode.TOP_SCORES; } @Override diff --git a/solr/core/src/java/org/apache/solr/search/QParserPlugin.java b/solr/core/src/java/org/apache/solr/search/QParserPlugin.java index 1fab7e66d6d5..43076822dc3a 100644 --- a/solr/core/src/java/org/apache/solr/search/QParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/QParserPlugin.java @@ -97,7 +97,7 @@ public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrI public abstract QParser createParser(String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req); @Override - public void init( NamedList args ) { + public void init( @SuppressWarnings({"rawtypes"})NamedList args ) { } @Override diff --git a/solr/core/src/java/org/apache/solr/search/QueryCommand.java b/solr/core/src/java/org/apache/solr/search/QueryCommand.java index 553e022fdbf2..e0b42569df01 100755 --- a/solr/core/src/java/org/apache/solr/search/QueryCommand.java +++ b/solr/core/src/java/org/apache/solr/search/QueryCommand.java @@ -37,6 +37,7 @@ public class QueryCommand { private int supersetMaxDoc; private int flags; private long timeAllowed = -1; + private int minExactCount = Integer.MAX_VALUE; private CursorMark cursorMark; public CursorMark getCursorMark() { @@ -182,6 +183,15 @@ public QueryCommand setTimeAllowed(long timeAllowed) { this.timeAllowed = timeAllowed; return this; } + + public int getMinExactCount() { + return minExactCount; + } + + public QueryCommand setMinExactCount(int count) { + this.minExactCount = count; + return this; + } public boolean isNeedDocSet() { return (flags & SolrIndexSearcher.GET_DOCSET) != 0; diff --git a/solr/core/src/java/org/apache/solr/search/QueryContext.java b/solr/core/src/java/org/apache/solr/search/QueryContext.java index 487feb4689cc..48c7ace0e0a2 100644 --- a/solr/core/src/java/org/apache/solr/search/QueryContext.java +++ b/solr/core/src/java/org/apache/solr/search/QueryContext.java @@ -33,6 +33,7 @@ * instantiate it on demand (and the need to put "searcher" in the map) * @lucene.experimental */ +@SuppressWarnings("rawtypes") public class QueryContext extends IdentityHashMap implements Closeable { // private IdentityHashMap map; // we are the map for now (for compat w/ ValueSource) private final SolrIndexSearcher searcher; @@ -47,6 +48,7 @@ public static QueryContext newContext(IndexSearcher searcher) { return context; } + @SuppressWarnings({"unchecked"}) public QueryContext(IndexSearcher searcher) { this.searcher = searcher instanceof SolrIndexSearcher ? (SolrIndexSearcher)searcher : null; indexSearcher = searcher; diff --git a/solr/core/src/java/org/apache/solr/search/QueryParsing.java b/solr/core/src/java/org/apache/solr/search/QueryParsing.java index cc35c7811a06..22ad4213f0b6 100644 --- a/solr/core/src/java/org/apache/solr/search/QueryParsing.java +++ b/solr/core/src/java/org/apache/solr/search/QueryParsing.java @@ -251,6 +251,7 @@ public static void toString(Query query, IndexSchema schema, Appendable out, int out.append(q.includesUpper() ? ']' : '}'); } else if (query instanceof LegacyNumericRangeQuery) { + @SuppressWarnings({"rawtypes"}) LegacyNumericRangeQuery q = (LegacyNumericRangeQuery) query; String fname = q.getField(); FieldType ft = writeFieldName(fname, schema, out, flags); diff --git a/solr/core/src/java/org/apache/solr/search/QueryResultKey.java b/solr/core/src/java/org/apache/solr/search/QueryResultKey.java index 2db1f9c57f71..eba36ae3be51 100644 --- a/solr/core/src/java/org/apache/solr/search/QueryResultKey.java +++ b/solr/core/src/java/org/apache/solr/search/QueryResultKey.java @@ -16,15 +16,15 @@ */ package org.apache.solr.search; +import java.util.ArrayList; +import java.util.List; + import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; -import java.util.List; -import java.util.ArrayList; - /** A hash key encapsulating a query, a list of filters, and a sort * */ @@ -37,18 +37,23 @@ public final class QueryResultKey implements Accountable { final SortField[] sfields; final List filters; final int nc_flags; // non-comparable flags... ignored by hashCode and equals + final int minExactCount; private final int hc; // cached hashCode private final long ramBytesUsed; // cached private static SortField[] defaultSort = new SortField[0]; - public QueryResultKey(Query query, List filters, Sort sort, int nc_flags) { + this(query, filters, sort, nc_flags, Integer.MAX_VALUE); + } + + public QueryResultKey(Query query, List filters, Sort sort, int nc_flags, int minExactCount) { this.query = query; this.sort = sort; this.filters = filters; this.nc_flags = nc_flags; + this.minExactCount = minExactCount; int h = query.hashCode(); @@ -65,6 +70,7 @@ public QueryResultKey(Query query, List filters, Sort sort, int nc_flags) h = h*29 + sf.hashCode(); ramSfields += BASE_SF_RAM_BYTES_USED + RamUsageEstimator.sizeOfObject(sf.getField()); } + h = h*31 + minExactCount; hc = h; @@ -96,6 +102,7 @@ public boolean equals(Object o) { if (this.sfields.length != other.sfields.length) return false; if (!this.query.equals(other.query)) return false; if (!unorderedCompare(this.filters, other.filters)) return false; + if (this.minExactCount != other.minExactCount) return false; for (int i=0; i mainCollector; final private IndexSearcher searcher; final private int reRankDocs; final private int length; @@ -54,6 +55,7 @@ public class ReRankCollector extends TopDocsCollector { final private Query query; + @SuppressWarnings({"unchecked"}) public ReRankCollector(int reRankDocs, int length, Rescorer reRankQueryRescorer, @@ -68,11 +70,11 @@ public ReRankCollector(int reRankDocs, Sort sort = cmd.getSort(); if(sort == null) { this.sort = null; - this.mainCollector = TopScoreDocCollector.create(Math.max(this.reRankDocs, length), Integer.MAX_VALUE); + this.mainCollector = TopScoreDocCollector.create(Math.max(this.reRankDocs, length), cmd.getMinExactCount()); } else { this.sort = sort = sort.rewrite(searcher); //scores are needed for Rescorer (regardless of whether sort needs it) - this.mainCollector = TopFieldCollector.create(sort, Math.max(this.reRankDocs, length), Integer.MAX_VALUE); + this.mainCollector = TopFieldCollector.create(sort, Math.max(this.reRankDocs, length), cmd.getMinExactCount()); } this.searcher = searcher; this.reRankQueryRescorer = reRankQueryRescorer; @@ -89,9 +91,10 @@ public LeafCollector getLeafCollector(LeafReaderContext context) throws IOExcept @Override public ScoreMode scoreMode() { - return sort == null || sort.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; + return this.mainCollector.scoreMode(); } + @SuppressWarnings({"unchecked"}) public TopDocs topDocs(int start, int howMany) { try { @@ -152,6 +155,7 @@ public TopDocs topDocs(int start, int howMany) { } } + @SuppressWarnings({"rawtypes"}) public static class BoostedComp implements Comparator { IntFloatHashMap boostedMap; diff --git a/solr/core/src/java/org/apache/solr/search/SignificantTermsQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/SignificantTermsQParserPlugin.java index d5776a849cf9..130490fe9b85 100644 --- a/solr/core/src/java/org/apache/solr/search/SignificantTermsQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/SignificantTermsQParserPlugin.java @@ -134,12 +134,18 @@ public void collect(int doc) throws IOException { @Override public void finish() throws IOException { + @SuppressWarnings({"unchecked", "rawtypes"}) List outTerms = new ArrayList(); + @SuppressWarnings({"unchecked", "rawtypes"}) List outFreq = new ArrayList(); + @SuppressWarnings({"unchecked", "rawtypes"}) List outQueryFreq = new ArrayList(); + @SuppressWarnings({"unchecked", "rawtypes"}) List scores = new ArrayList(); + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList allFreq = new NamedList(); + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList allQueryFreq = new NamedList(); LinkedHashMap response = new LinkedHashMap<>(); diff --git a/solr/core/src/java/org/apache/solr/search/SolrCache.java b/solr/core/src/java/org/apache/solr/search/SolrCache.java index c37cf9ea26e1..47a45e389dfa 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrCache.java +++ b/solr/core/src/java/org/apache/solr/search/SolrCache.java @@ -18,6 +18,7 @@ import org.apache.solr.core.SolrInfoBean; +import java.io.IOException; import java.util.Map; import java.util.function.Function; @@ -63,7 +64,7 @@ public interface SolrCache extends SolrInfoBean { * regenerate an item in the new cache from an entry in the old cache. * */ - public Object init(Map args, Object persistence, CacheRegenerator regenerator); + public Object init(@SuppressWarnings({"rawtypes"})Map args, Object persistence, CacheRegenerator regenerator); // I don't think we need a factory for faster creation given that these // will be associated with slow-to-create SolrIndexSearchers. // change to NamedList when other plugins do? @@ -150,7 +151,7 @@ public enum State { /** Frees any non-memory resources */ - default void close() throws Exception { + default void close() throws IOException { SolrInfoBean.super.close(); } diff --git a/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java b/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java index 0d37abb95b28..82beecbad4e4 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java +++ b/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java @@ -84,6 +84,7 @@ public int getCost() { } protected class ConstantWeight extends ConstantScoreWeight { + @SuppressWarnings({"rawtypes"}) private Map context; private ScoreMode scoreMode; diff --git a/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java b/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java index def2919c04c4..23d3a9345cc8 100755 --- a/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java +++ b/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java @@ -52,6 +52,7 @@ public SolrCoreParser(String defaultField, Analyzer analyzer, } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void init(NamedList initArgs) { if (initArgs == null || initArgs.size() == 0) { return; diff --git a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java index c2a80c68be78..443e1438e89c 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java @@ -108,6 +108,7 @@ public class SolrDocumentFetcher { private Collection storedHighlightFieldNames; // lazy populated; use getter + @SuppressWarnings({"unchecked"}) SolrDocumentFetcher(SolrIndexSearcher searcher, SolrConfig solrConfig, boolean cachingEnabled) { this.searcher = searcher; this.enableLazyFieldLoading = solrConfig.enableLazyFieldLoading; diff --git a/solr/core/src/java/org/apache/solr/search/SolrFilter.java b/solr/core/src/java/org/apache/solr/search/SolrFilter.java index f1bf5054c19d..0dc255a6043f 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrFilter.java +++ b/solr/core/src/java/org/apache/solr/search/SolrFilter.java @@ -34,9 +34,11 @@ public abstract class SolrFilter extends Filter { /** Implementations should propagate createWeight to sub-ValueSources which can store weight info in the context. * The context object will be passed to getDocIdSet() where this info can be retrieved. */ - public abstract void createWeight(Map context, IndexSearcher searcher) throws IOException; + public abstract void createWeight(@SuppressWarnings({"rawtypes"})Map context + , IndexSearcher searcher) throws IOException; - public abstract DocIdSet getDocIdSet(Map context, LeafReaderContext readerContext, Bits acceptDocs) throws IOException; + public abstract DocIdSet getDocIdSet(@SuppressWarnings({"rawtypes"})Map context + , LeafReaderContext readerContext, Bits acceptDocs) throws IOException; @Override public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException { diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java index e53cf0c0100f..a6b4ed91305a 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -50,6 +50,7 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.*; +import org.apache.lucene.search.TotalHits.Relation; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; @@ -96,7 +97,9 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI // These should *only* be used for debugging or monitoring purposes public static final AtomicLong numOpens = new AtomicLong(); public static final AtomicLong numCloses = new AtomicLong(); + @SuppressWarnings({"rawtypes"}) private static final Map NO_GENERIC_CACHES = Collections.emptyMap(); + @SuppressWarnings({"rawtypes"}) private static final SolrCache[] NO_CACHES = new SolrCache[0]; private final SolrCore core; @@ -121,9 +124,11 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI private final SolrCache fieldValueCache; // map of generic caches - not synchronized since it's read-only after the constructor. + @SuppressWarnings({"rawtypes"}) private final Map cacheMap; // list of all caches associated with this searcher. + @SuppressWarnings({"rawtypes"}) private final SolrCache[] cacheList; private DirectoryFactory directoryFactory; @@ -159,13 +164,14 @@ private static DirectoryReader wrapReader(SolrCore core, DirectoryReader reader) UninvertingReader.wrap(reader, core.getLatestSchema().getUninversionMapper()), SolrQueryTimeoutImpl.getInstance()); } - + /** * Builds the necessary collector chain (via delegate wrapping) and executes the query against it. This method takes * into consideration both the explicitly provided collector and postFilter as well as any needed collector wrappers * for dealing with options specified in the QueryCommand. + * @return The collector used for search */ - private void buildAndRunCollectorChain(QueryResult qr, Query query, Collector collector, QueryCommand cmd, + private Collector buildAndRunCollectorChain(QueryResult qr, Query query, Collector collector, QueryCommand cmd, DelegatingCollector postFilter) throws IOException { EarlyTerminatingSortingCollector earlyTerminatingSortingCollector = null; @@ -215,6 +221,7 @@ private void buildAndRunCollectorChain(QueryResult qr, Query query, Collector co if (collector instanceof DelegatingCollector) { ((DelegatingCollector) collector).finish(); } + return collector; } public SolrIndexSearcher(SolrCore core, String path, IndexSchema schema, SolrIndexConfig config, String name, @@ -226,6 +233,7 @@ public SolrIndexSearcher(SolrCore core, String path, IndexSchema schema, SolrInd this.releaseDirectory = true; } + @SuppressWarnings({"unchecked", "rawtypes"}) public SolrIndexSearcher(SolrCore core, String path, IndexSchema schema, String name, DirectoryReader r, boolean closeReader, boolean enableCache, boolean reserveDirectory, DirectoryFactory directoryFactory) throws IOException { @@ -241,7 +249,7 @@ public SolrIndexSearcher(SolrCore core, String path, IndexSchema schema, String this.schema = schema; this.name = "Searcher@" + Integer.toHexString(hashCode()) + "[" + core.getName() + "]" + (name != null ? " " + name : ""); - log.info("Opening [{}]", this.name); + log.debug("Opening [{}]", this.name); if (directoryFactory.searchersReserveCommitPoints()) { // reserve commit point for life of searcher @@ -425,12 +433,12 @@ public void register() { // register self infoRegistry.put(STATISTICS_KEY, this); infoRegistry.put(name, this); - for (SolrCache cache : cacheList) { + for (@SuppressWarnings({"rawtypes"})SolrCache cache : cacheList) { cache.setState(SolrCache.State.LIVE); infoRegistry.put(cache.name(), cache); } this.solrMetricsContext = core.getSolrMetricsContext().getChildContext(this); - for (SolrCache cache : cacheList) { + for (@SuppressWarnings({"rawtypes"})SolrCache cache : cacheList) { cache.initializeMetrics(solrMetricsContext, SolrMetricManager.mkName(cache.name(), STATISTICS_KEY)); } initializeMetrics(solrMetricsContext, STATISTICS_KEY); @@ -448,7 +456,7 @@ public void close() throws IOException { if (cachingEnabled) { final StringBuilder sb = new StringBuilder(); sb.append("Closing ").append(name); - for (SolrCache cache : cacheList) { + for (@SuppressWarnings({"rawtypes"})SolrCache cache : cacheList) { sb.append("\n\t"); sb.append(cache); } @@ -475,7 +483,7 @@ public void close() throws IOException { core.getDeletionPolicy().releaseCommitPoint(cpg); } - for (SolrCache cache : cacheList) { + for (@SuppressWarnings({"rawtypes"})SolrCache cache : cacheList) { try { cache.close(); } catch (Exception e) { @@ -521,7 +529,9 @@ public static void initRegenerators(SolrConfig solrConfig) { if (solrConfig.fieldValueCacheConfig != null && solrConfig.fieldValueCacheConfig.getRegenerator() == null) { solrConfig.fieldValueCacheConfig.setRegenerator(new CacheRegenerator() { @Override - public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache, + public boolean regenerateItem(SolrIndexSearcher newSearcher, + @SuppressWarnings({"rawtypes"})SolrCache newCache, + @SuppressWarnings({"rawtypes"})SolrCache oldCache, Object oldKey, Object oldVal) throws IOException { if (oldVal instanceof UnInvertedField) { UnInvertedField.getUnInvertedField((String) oldKey, newSearcher); @@ -534,7 +544,9 @@ public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, if (solrConfig.filterCacheConfig != null && solrConfig.filterCacheConfig.getRegenerator() == null) { solrConfig.filterCacheConfig.setRegenerator(new CacheRegenerator() { @Override - public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache, + @SuppressWarnings({"rawtypes"})public boolean regenerateItem(SolrIndexSearcher newSearcher + , @SuppressWarnings({"rawtypes"})SolrCache newCache + , @SuppressWarnings({"rawtypes"})SolrCache oldCache, Object oldKey, Object oldVal) throws IOException { newSearcher.cacheDocSet((Query) oldKey, null, false); return true; @@ -546,6 +558,7 @@ public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, final int queryResultWindowSize = solrConfig.queryResultWindowSize; solrConfig.queryResultCacheConfig.setRegenerator(new CacheRegenerator() { @Override + @SuppressWarnings({"rawtypes"}) public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache, Object oldKey, Object oldVal) throws IOException { QueryResultKey key = (QueryResultKey) oldKey; @@ -1305,7 +1318,7 @@ private void getDocListC(QueryResult qr, QueryCommand cmd) throws IOException { && (flags & (NO_CHECK_QCACHE | NO_SET_QCACHE)) != ((NO_CHECK_QCACHE | NO_SET_QCACHE))) { // all of the current flags can be reused during warming, // so set all of them on the cache key. - key = new QueryResultKey(q, cmd.getFilterList(), cmd.getSort(), flags); + key = new QueryResultKey(q, cmd.getFilterList(), cmd.getSort(), flags, cmd.getMinExactCount()); if ((flags & NO_CHECK_QCACHE) == 0) { superset = queryResultCache.get(key); @@ -1481,8 +1494,9 @@ private void populateNextCursorMarkFromTopDocs(QueryResult qr, QueryCommand qc, * @param cmd * The Command whose properties should determine the type of TopDocsCollector to use. */ + @SuppressWarnings({"rawtypes"}) private TopDocsCollector buildTopDocsCollector(int len, QueryCommand cmd) throws IOException { - + int minNumFound = cmd.getMinExactCount(); Query q = cmd.getQuery(); if (q instanceof RankQuery) { RankQuery rq = (RankQuery) q; @@ -1491,14 +1505,14 @@ private TopDocsCollector buildTopDocsCollector(int len, QueryCommand cmd) throws if (null == cmd.getSort()) { assert null == cmd.getCursorMark() : "have cursor but no sort"; - return TopScoreDocCollector.create(len, Integer.MAX_VALUE); + return TopScoreDocCollector.create(len, minNumFound); } else { // we have a sort final Sort weightedSort = weightSort(cmd.getSort()); final CursorMark cursor = cmd.getCursorMark(); final FieldDoc searchAfter = (null != cursor ? cursor.getSearchAfterFieldDoc() : null); - return TopFieldCollector.create(weightedSort, len, searchAfter, Integer.MAX_VALUE); + return TopFieldCollector.create(weightedSort, len, searchAfter, minNumFound); } } @@ -1517,6 +1531,7 @@ private void getDocListNC(QueryResult qr, QueryCommand cmd) throws IOException { ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList()); final Query query = QueryUtils.combineQueryAndFilter(QueryUtils.makeQueryable(cmd.getQuery()), pf.filter); + Relation hitsRelation; // handle zero case... if (lastDocRequested <= 0) { @@ -1569,18 +1584,24 @@ public ScoreMode scoreMode() { maxScore = totalHits > 0 ? topscore[0] : 0.0f; // no docs on this page, so cursor doesn't change qr.setNextCursorMark(cmd.getCursorMark()); + hitsRelation = Relation.EQUAL_TO; } else { - final TopDocsCollector topCollector = buildTopDocsCollector(len, cmd); + final TopDocsCollector topCollector = buildTopDocsCollector(len, cmd); MaxScoreCollector maxScoreCollector = null; Collector collector = topCollector; if ((cmd.getFlags() & GET_SCORES) != 0) { maxScoreCollector = new MaxScoreCollector(); collector = MultiCollector.wrap(topCollector, maxScoreCollector); } - buildAndRunCollectorChain(qr, query, collector, cmd, pf.postFilter); + ScoreMode scoreModeUsed = buildAndRunCollectorChain(qr, query, collector, cmd, pf.postFilter).scoreMode(); totalHits = topCollector.getTotalHits(); TopDocs topDocs = topCollector.topDocs(0, len); + if (scoreModeUsed == ScoreMode.COMPLETE || scoreModeUsed == ScoreMode.COMPLETE_NO_SCORES) { + hitsRelation = TotalHits.Relation.EQUAL_TO; + } else { + hitsRelation = topDocs.totalHits.relation; + } if (cmd.getSort() != null && query instanceof RankQuery == false && (cmd.getFlags() & GET_SCORES) != 0) { TopFieldCollector.populateScores(topDocs.scoreDocs, this, query); } @@ -1599,7 +1620,7 @@ public ScoreMode scoreMode() { int sliceLen = Math.min(lastDocRequested, nDocsReturned); if (sliceLen < 0) sliceLen = 0; - qr.setDocList(new DocSlice(0, sliceLen, ids, scores, totalHits, maxScore)); + qr.setDocList(new DocSlice(0, sliceLen, ids, scores, totalHits, maxScore, hitsRelation)); } // any DocSet returned is for the query only, without any filtering... that way it may @@ -1618,6 +1639,7 @@ private DocSet getDocListAndSetNC(QueryResult qr, QueryCommand cmd) throws IOExc boolean needScores = (cmd.getFlags() & GET_SCORES) != 0; int maxDoc = maxDoc(); + cmd.setMinExactCount(Integer.MAX_VALUE);// We need the full DocSet ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList()); final Query query = QueryUtils.combineQueryAndFilter(QueryUtils.makeQueryable(cmd.getQuery()), pf.filter); @@ -1668,7 +1690,7 @@ public ScoreMode scoreMode() { // no docs on this page, so cursor doesn't change qr.setNextCursorMark(cmd.getCursorMark()); } else { - + @SuppressWarnings({"rawtypes"}) final TopDocsCollector topCollector = buildTopDocsCollector(len, cmd); DocSetCollector setCollector = new DocSetCollector(maxDoc); MaxScoreCollector maxScoreCollector = null; @@ -1708,7 +1730,7 @@ public ScoreMode scoreMode() { int sliceLen = Math.min(lastDocRequested, nDocsReturned); if (sliceLen < 0) sliceLen = 0; - qr.setDocList(new DocSlice(0, sliceLen, ids, scores, totalHits, maxScore)); + qr.setDocList(new DocSlice(0, sliceLen, ids, scores, totalHits, maxScore, TotalHits.Relation.EQUAL_TO)); // TODO: if we collect results before the filter, we just need to intersect with // that filter to generate the DocSet for qr.setDocSet() qr.setDocSet(set); @@ -1981,7 +2003,7 @@ protected void sortDocSet(QueryResult qr, QueryCommand cmd) throws IOException { int nDocs = cmd.getSupersetMaxDoc(); if (nDocs == 0) { // SOLR-2923 - qr.getDocListAndSet().docList = new DocSlice(0, 0, new int[0], null, set.size(), 0f); + qr.getDocListAndSet().docList = new DocSlice(0, 0, new int[0], null, set.size(), 0f, TotalHits.Relation.EQUAL_TO); qr.setNextCursorMark(cmd.getCursorMark()); return; } @@ -1989,6 +2011,7 @@ protected void sortDocSet(QueryResult qr, QueryCommand cmd) throws IOException { // bit of a hack to tell if a set is sorted - do it better in the future. boolean inOrder = set instanceof BitDocSet || set instanceof SortedIntDocSet; + @SuppressWarnings({"rawtypes"}) TopDocsCollector topCollector = buildTopDocsCollector(nDocs, cmd); DocIterator iter = set.iterator(); @@ -2020,7 +2043,7 @@ protected void sortDocSet(QueryResult qr, QueryCommand cmd) throws IOException { } assert topDocs.totalHits.relation == TotalHits.Relation.EQUAL_TO; - qr.getDocListAndSet().docList = new DocSlice(0, nDocsReturned, ids, null, topDocs.totalHits.value, 0.0f); + qr.getDocListAndSet().docList = new DocSlice(0, nDocsReturned, ids, null, topDocs.totalHits.value, 0.0f, topDocs.totalHits.relation); populateNextCursorMarkFromTopDocs(qr, cmd, topDocs); } @@ -2111,6 +2134,7 @@ public boolean intersects(DocSet a, DocsEnumState deState) throws IOException { /** * Warm this searcher based on an old one (primarily for auto-cache warming). */ + @SuppressWarnings({"unchecked"}) public void warm(SolrIndexSearcher old) { // Make sure this is first! filters can help queryResults execute! long warmingStartTime = System.nanoTime(); @@ -2133,7 +2157,6 @@ public void close() {} }; final SolrQueryResponse rsp = new SolrQueryResponse(); - SolrRequestInfo.clearRequestInfo(); SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); try { cacheList[i].warm(this, old.cacheList[i]); @@ -2155,6 +2178,7 @@ public void close() {} /** * return the named generic cache */ + @SuppressWarnings({"rawtypes"}) public SolrCache getCache(String cacheName) { return cacheMap.get(cacheName); } @@ -2162,7 +2186,9 @@ public SolrCache getCache(String cacheName) { /** * lookup an entry in a generic cache */ + @SuppressWarnings({"unchecked"}) public Object cacheLookup(String cacheName, Object key) { + @SuppressWarnings({"rawtypes"}) SolrCache cache = cacheMap.get(cacheName); return cache == null ? null : cache.get(key); } @@ -2170,7 +2196,9 @@ public Object cacheLookup(String cacheName, Object key) { /** * insert an entry in a generic cache */ + @SuppressWarnings({"unchecked"}) public Object cacheInsert(String cacheName, Object key, Object val) { + @SuppressWarnings({"rawtypes"}) SolrCache cache = cacheMap.get(cacheName); return cache == null ? null : cache.put(key, val); } @@ -2444,5 +2472,8 @@ public int hashCode() { + 31 * Objects.hashCode(weights); } } + public long getWarmupTime() { + return warmupTime; + } } diff --git a/solr/core/src/java/org/apache/solr/search/TermsQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/TermsQParserPlugin.java index 1d92b7caa606..9a8a12ad564e 100644 --- a/solr/core/src/java/org/apache/solr/search/TermsQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/TermsQParserPlugin.java @@ -123,6 +123,9 @@ public QParser createParser(String qstr, SolrParams localParams, SolrParams para @Override public Query parse() throws SyntaxError { String fname = localParams.get(QueryParsing.F); + if (fname == null || fname.isEmpty()) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Missing field to query"); + } FieldType ft = req.getSchema().getFieldType(fname); String separator = localParams.get(SEPARATOR, ","); String qstr = localParams.get(QueryParsing.V);//never null diff --git a/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java index 5d3bb46d4f2e..0fd0f6ab0442 100644 --- a/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java @@ -53,7 +53,7 @@ public class TextLogisticRegressionQParserPlugin extends QParserPlugin { public static final String NAME = "tlogit"; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { } @Override @@ -165,6 +165,7 @@ public void collect(int doc) throws IOException{ } + @SuppressWarnings({"unchecked"}) public void finish() throws IOException { Map docVectors = new HashMap<>(); @@ -211,6 +212,7 @@ public void finish() throws IOException { } } + @SuppressWarnings({"rawtypes"}) NamedList analytics = new NamedList(); rbsp.rsp.add("logit", analytics); diff --git a/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java b/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java index d054bc8d8868..b1f78301ae2e 100644 --- a/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java +++ b/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java @@ -98,7 +98,7 @@ public abstract class ValueSourceParser implements NamedListInitializedPlugin { * Initialize the plugin. */ @Override - public void init(NamedList args) {} + public void init(@SuppressWarnings({"rawtypes"})NamedList args) {} /** * Parse the user input into a ValueSource. @@ -764,14 +764,14 @@ public ValueSource parse(FunctionQParser fp) throws SyntaxError { addParser("true", new ValueSourceParser() { @Override public ValueSource parse(FunctionQParser fp) { - return new BoolConstValueSource(true); + return BoolConstValueSource.TRUE; } }); addParser("false", new ValueSourceParser() { @Override public ValueSource parse(FunctionQParser fp) { - return new BoolConstValueSource(false); + return BoolConstValueSource.FALSE; } }); @@ -1184,430 +1184,435 @@ private static class TInfo { BytesRefBuilder indexedBytes; } -} - + static class DateValueSourceParser extends ValueSourceParser { + @Override + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { + } -class DateValueSourceParser extends ValueSourceParser { - @Override - public void init(NamedList args) { - } + public Date getDate(FunctionQParser fp, String arg) { + if (arg == null) return null; + // check character index 1 to be a digit. Index 0 might be a +/-. + if (arg.startsWith("NOW") || (arg.length() > 1 && Character.isDigit(arg.charAt(1)))) { + Date now = null;//TODO pull from params? + return DateMathParser.parseMath(now, arg); + } + return null; + } - public Date getDate(FunctionQParser fp, String arg) { - if (arg == null) return null; - // check character index 1 to be a digit. Index 0 might be a +/-. - if (arg.startsWith("NOW") || (arg.length() > 1 && Character.isDigit(arg.charAt(1)))) { - Date now = null;//TODO pull from params? - return DateMathParser.parseMath(now, arg); + public ValueSource getValueSource(FunctionQParser fp, String arg) { + if (arg == null) return null; + SchemaField f = fp.req.getSchema().getField(arg); + return f.getType().getValueSource(f, fp); } - return null; - } - public ValueSource getValueSource(FunctionQParser fp, String arg) { - if (arg == null) return null; - SchemaField f = fp.req.getSchema().getField(arg); - return f.getType().getValueSource(f, fp); - } + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + String first = fp.parseArg(); + String second = fp.parseArg(); + if (first == null) first = "NOW"; - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - String first = fp.parseArg(); - String second = fp.parseArg(); - if (first == null) first = "NOW"; + Date d1 = getDate(fp, first); + ValueSource v1 = d1 == null ? getValueSource(fp, first) : null; + + Date d2 = getDate(fp, second); + ValueSource v2 = d2 == null ? getValueSource(fp, second) : null; - Date d1 = getDate(fp, first); - ValueSource v1 = d1 == null ? getValueSource(fp, first) : null; + // d constant + // v field + // dd constant + // dv subtract field from constant + // vd subtract constant from field + // vv subtract fields - Date d2 = getDate(fp, second); - ValueSource v2 = d2 == null ? getValueSource(fp, second) : null; + final long ms1 = (d1 == null) ? 0 : d1.getTime(); + final long ms2 = (d2 == null) ? 0 : d2.getTime(); - // d constant - // v field - // dd constant - // dv subtract field from constant - // vd subtract constant from field - // vv subtract fields + // "d,dd" handle both constant cases - final long ms1 = (d1 == null) ? 0 : d1.getTime(); - final long ms2 = (d2 == null) ? 0 : d2.getTime(); + if (d1 != null && v2 == null) { + return new LongConstValueSource(ms1 - ms2); + } + + // "v" just the date field + if (v1 != null && v2 == null && d2 == null) { + return v1; + } - // "d,dd" handle both constant cases - if (d1 != null && v2 == null) { - return new LongConstValueSource(ms1 - ms2); - } + // "dv" + if (d1 != null && v2 != null) + return new DualFloatFunction(new LongConstValueSource(ms1), v2) { + @Override + protected String name() { + return "ms"; + } + + @Override + protected float func(int doc, FunctionValues aVals, FunctionValues bVals) throws IOException { + return ms1 - bVals.longVal(doc); + } + }; + + // "vd" + if (v1 != null && d2 != null) + return new DualFloatFunction(v1, new LongConstValueSource(ms2)) { + @Override + protected String name() { + return "ms"; + } + + @Override + protected float func(int doc, FunctionValues aVals, FunctionValues bVals) throws IOException { + return aVals.longVal(doc) - ms2; + } + }; + + // "vv" + if (v1 != null && v2 != null) + return new DualFloatFunction(v1, v2) { + @Override + protected String name() { + return "ms"; + } + + @Override + protected float func(int doc, FunctionValues aVals, FunctionValues bVals) throws IOException { + return aVals.longVal(doc) - bVals.longVal(doc); + } + }; - // "v" just the date field - if (v1 != null && v2 == null && d2 == null) { - return v1; + return null; // shouldn't happen } + } - // "dv" - if (d1 != null && v2 != null) - return new DualFloatFunction(new LongConstValueSource(ms1), v2) { - @Override - protected String name() { - return "ms"; - } + // Private for now - we need to revisit how to handle typing in function queries + static class LongConstValueSource extends ConstNumberSource { + final long constant; + final double dv; + final float fv; + + public LongConstValueSource(long constant) { + this.constant = constant; + this.dv = constant; + this.fv = constant; + } + @Override + public String description() { + return "const(" + constant + ")"; + } + + @Override + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context + , LeafReaderContext readerContext) throws IOException { + return new LongDocValues(this) { @Override - protected float func(int doc, FunctionValues aVals, FunctionValues bVals) throws IOException { - return ms1 - bVals.longVal(doc); + public float floatVal(int doc) { + return fv; } - }; - // "vd" - if (v1 != null && d2 != null) - return new DualFloatFunction(v1, new LongConstValueSource(ms2)) { @Override - protected String name() { - return "ms"; + public int intVal(int doc) { + return (int) constant; } @Override - protected float func(int doc, FunctionValues aVals, FunctionValues bVals) throws IOException { - return aVals.longVal(doc) - ms2; + public long longVal(int doc) { + return constant; } - }; - // "vv" - if (v1 != null && v2 != null) - return new DualFloatFunction(v1, v2) { @Override - protected String name() { - return "ms"; + public double doubleVal(int doc) { + return dv; } @Override - protected float func(int doc, FunctionValues aVals, FunctionValues bVals) throws IOException { - return aVals.longVal(doc) - bVals.longVal(doc); + public String toString(int doc) { + return description(); } }; + } - return null; // shouldn't happen - } + @Override + public int hashCode() { + return (int) constant + (int) (constant >>> 32); + } -} + @Override + public boolean equals(Object o) { + if (LongConstValueSource.class != o.getClass()) return false; + LongConstValueSource other = (LongConstValueSource) o; + return this.constant == other.constant; + } + + @Override + public int getInt() { + return (int)constant; + } + + @Override + public long getLong() { + return constant; + } + + @Override + public float getFloat() { + return fv; + } + @Override + public double getDouble() { + return dv; + } -// Private for now - we need to revisit how to handle typing in function queries -class LongConstValueSource extends ConstNumberSource { - final long constant; - final double dv; - final float fv; + @Override + public Number getNumber() { + return constant; + } - public LongConstValueSource(long constant) { - this.constant = constant; - this.dv = constant; - this.fv = constant; + @Override + public boolean getBool() { + return constant != 0; + } } - @Override - public String description() { - return "const(" + constant + ")"; + abstract static class NamedParser extends ValueSourceParser { + private final String name; + public NamedParser(String name) { + this.name = name; + } + public String name() { + return name; + } } - @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { - return new LongDocValues(this) { - @Override - public float floatVal(int doc) { - return fv; - } + abstract static class DoubleParser extends NamedParser { + public DoubleParser(String name) { + super(name); + } - @Override - public int intVal(int doc) { - return (int) constant; - } + public abstract double func(int doc, FunctionValues vals) throws IOException; - @Override - public long longVal(int doc) { - return constant; + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new Function(fp.parseValueSource()); + } + + class Function extends SingleFunction { + public Function(ValueSource source) { + super(source); } @Override - public double doubleVal(int doc) { - return dv; + public String name() { + return DoubleParser.this.name(); } @Override - public String toString(int doc) { - return description(); + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, LeafReaderContext readerContext) throws IOException { + @SuppressWarnings({"unchecked"}) + final FunctionValues vals = source.getValues(context, readerContext); + return new DoubleDocValues(this) { + @Override + public double doubleVal(int doc) throws IOException { + return func(doc, vals); + } + @Override + public String toString(int doc) throws IOException { + return name() + '(' + vals.toString(doc) + ')'; + } + }; } - }; - } - - @Override - public int hashCode() { - return (int) constant + (int) (constant >>> 32); + } } - @Override - public boolean equals(Object o) { - if (LongConstValueSource.class != o.getClass()) return false; - LongConstValueSource other = (LongConstValueSource) o; - return this.constant == other.constant; - } + abstract static class Double2Parser extends NamedParser { + public Double2Parser(String name) { + super(name); + } - @Override - public int getInt() { - return (int)constant; - } + public abstract double func(int doc, FunctionValues a, FunctionValues b) throws IOException; - @Override - public long getLong() { - return constant; - } + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new Function(fp.parseValueSource(), fp.parseValueSource()); + } - @Override - public float getFloat() { - return fv; - } + class Function extends ValueSource { + private final ValueSource a; + private final ValueSource b; - @Override - public double getDouble() { - return dv; - } + /** + * @param a the base. + * @param b the exponent. + */ + public Function(ValueSource a, ValueSource b) { + this.a = a; + this.b = b; + } - @Override - public Number getNumber() { - return constant; - } + @Override + public String description() { + return name() + "(" + a.description() + "," + b.description() + ")"; + } - @Override - public boolean getBool() { - return constant != 0; - } -} + @Override + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, LeafReaderContext readerContext) throws IOException { + @SuppressWarnings({"unchecked"}) + final FunctionValues aVals = a.getValues(context, readerContext); + @SuppressWarnings({"unchecked"}) + final FunctionValues bVals = b.getValues(context, readerContext); + return new DoubleDocValues(this) { + @Override + public double doubleVal(int doc) throws IOException { + return func(doc, aVals, bVals); + } + @Override + public String toString(int doc) throws IOException { + return name() + '(' + aVals.toString(doc) + ',' + bVals.toString(doc) + ')'; + } + }; + } + @Override + public void createWeight(@SuppressWarnings({"rawtypes"})Map context, IndexSearcher searcher) throws IOException { + } -abstract class NamedParser extends ValueSourceParser { - private final String name; - public NamedParser(String name) { - this.name = name; - } - public String name() { - return name; - } -} + @Override + public int hashCode() { + int h = a.hashCode(); + h ^= (h << 13) | (h >>> 20); + h += b.hashCode(); + h ^= (h << 23) | (h >>> 10); + h += name().hashCode(); + return h; + } + @Override + public boolean equals(Object o) { + if (this.getClass() != o.getClass()) return false; + Function other = (Function)o; + return this.a.equals(other.a) + && this.b.equals(other.b); + } + } -abstract class DoubleParser extends NamedParser { - public DoubleParser(String name) { - super(name); } - public abstract double func(int doc, FunctionValues vals) throws IOException; + static class BoolConstValueSource extends ConstNumberSource { + public static final BoolConstValueSource TRUE = new BoolConstValueSource(true); + public static final BoolConstValueSource FALSE = new BoolConstValueSource(false); - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new Function(fp.parseValueSource()); - } + final boolean constant; - class Function extends SingleFunction { - public Function(ValueSource source) { - super(source); + private BoolConstValueSource(boolean constant) { + this.constant = constant; } @Override - public String name() { - return DoubleParser.this.name(); + public String description() { + return "const(" + constant + ")"; } @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { - final FunctionValues vals = source.getValues(context, readerContext); - return new DoubleDocValues(this) { - @Override - public double doubleVal(int doc) throws IOException { - return func(doc, vals); - } + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, + LeafReaderContext readerContext) throws IOException { + return new BoolDocValues(this) { @Override - public String toString(int doc) throws IOException { - return name() + '(' + vals.toString(doc) + ')'; + public boolean boolVal(int doc) { + return constant; } }; } - } -} - - -abstract class Double2Parser extends NamedParser { - public Double2Parser(String name) { - super(name); - } - - public abstract double func(int doc, FunctionValues a, FunctionValues b) throws IOException; - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new Function(fp.parseValueSource(), fp.parseValueSource()); - } - - class Function extends ValueSource { - private final ValueSource a; - private final ValueSource b; - - /** - * @param a the base. - * @param b the exponent. - */ - public Function(ValueSource a, ValueSource b) { - this.a = a; - this.b = b; + @Override + public int hashCode() { + return constant ? 0x12345678 : 0x87654321; } @Override - public String description() { - return name() + "(" + a.description() + "," + b.description() + ")"; + public boolean equals(Object o) { + if (BoolConstValueSource.class != o.getClass()) return false; + BoolConstValueSource other = (BoolConstValueSource) o; + return this.constant == other.constant; } @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { - final FunctionValues aVals = a.getValues(context, readerContext); - final FunctionValues bVals = b.getValues(context, readerContext); - return new DoubleDocValues(this) { - @Override - public double doubleVal(int doc) throws IOException { - return func(doc, aVals, bVals); - } - @Override - public String toString(int doc) throws IOException { - return name() + '(' + aVals.toString(doc) + ',' + bVals.toString(doc) + ')'; - } - }; + public int getInt() { + return constant ? 1 : 0; } @Override - public void createWeight(Map context, IndexSearcher searcher) throws IOException { + public long getLong() { + return constant ? 1 : 0; } @Override - public int hashCode() { - int h = a.hashCode(); - h ^= (h << 13) | (h >>> 20); - h += b.hashCode(); - h ^= (h << 23) | (h >>> 10); - h += name().hashCode(); - return h; + public float getFloat() { + return constant ? 1 : 0; } @Override - public boolean equals(Object o) { - if (this.getClass() != o.getClass()) return false; - Function other = (Function)o; - return this.a.equals(other.a) - && this.b.equals(other.b); + public double getDouble() { + return constant ? 1 : 0; } - } - -} - - -class BoolConstValueSource extends ConstNumberSource { - final boolean constant; - - public BoolConstValueSource(boolean constant) { - this.constant = constant; - } - - @Override - public String description() { - return "const(" + constant + ")"; - } - @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { - return new BoolDocValues(this) { - @Override - public boolean boolVal(int doc) { - return constant; - } - }; - } - - @Override - public int hashCode() { - return constant ? 0x12345678 : 0x87654321; - } - - @Override - public boolean equals(Object o) { - if (BoolConstValueSource.class != o.getClass()) return false; - BoolConstValueSource other = (BoolConstValueSource) o; - return this.constant == other.constant; - } - - @Override - public int getInt() { - return constant ? 1 : 0; - } - - @Override - public long getLong() { - return constant ? 1 : 0; - } + @Override + public Number getNumber() { + return constant ? 1 : 0; + } - @Override - public float getFloat() { - return constant ? 1 : 0; + @Override + public boolean getBool() { + return constant; + } } - @Override - public double getDouble() { - return constant ? 1 : 0; - } + static class TestValueSource extends ValueSource { + ValueSource source; - @Override - public Number getNumber() { - return constant ? 1 : 0; - } + public TestValueSource(ValueSource source) { + this.source = source; + } - @Override - public boolean getBool() { - return constant; - } -} + @Override + @SuppressWarnings({"unchecked"}) + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context + , LeafReaderContext readerContext) throws IOException { + if (context.get(this) == null) { + SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo(); + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "testfunc: unweighted value source detected. delegate="+source + " request=" + (requestInfo==null ? "null" : requestInfo.getReq())); + } + return source.getValues(context, readerContext); + } + @Override + public boolean equals(Object o) { + return o instanceof TestValueSource && source.equals(((TestValueSource)o).source); + } -class TestValueSource extends ValueSource { - ValueSource source; - - public TestValueSource(ValueSource source) { - this.source = source; - } - - @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { - if (context.get(this) == null) { - SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo(); - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "testfunc: unweighted value source detected. delegate="+source + " request=" + (requestInfo==null ? "null" : requestInfo.getReq())); + @Override + public int hashCode() { + return source.hashCode() + TestValueSource.class.hashCode(); } - return source.getValues(context, readerContext); - } - @Override - public boolean equals(Object o) { - return o instanceof TestValueSource && source.equals(((TestValueSource)o).source); - } + @Override + public String description() { + return "testfunc(" + source.description() + ')'; + } - @Override - public int hashCode() { - return source.hashCode() + TestValueSource.class.hashCode(); - } + @Override + @SuppressWarnings({"unchecked"}) + public void createWeight(@SuppressWarnings({"rawtypes"})Map context, IndexSearcher searcher) throws IOException { + context.put(this, this); + } - @Override - public String description() { - return "testfunc(" + source.description() + ')'; + @Override + public SortField getSortField(boolean reverse) { + return super.getSortField(reverse); + } } +} - @Override - public void createWeight(Map context, IndexSearcher searcher) throws IOException { - context.put(this, this); - } - @Override - public SortField getSortField(boolean reverse) { - return super.getSortField(reverse); - } -} diff --git a/solr/core/src/java/org/apache/solr/search/XmlQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/XmlQParserPlugin.java index 26ee523f3660..a09f621ca4f6 100755 --- a/solr/core/src/java/org/apache/solr/search/XmlQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/XmlQParserPlugin.java @@ -66,10 +66,11 @@ public class XmlQParserPlugin extends QParserPlugin { public static final String NAME = "xmlparser"; + @SuppressWarnings({"rawtypes"}) private NamedList args; @Override - public void init( NamedList args ) { + public void init( @SuppressWarnings({"rawtypes"})NamedList args ) { super.init(args); this.args = args; } diff --git a/solr/core/src/java/org/apache/solr/search/facet/AggValueSource.java b/solr/core/src/java/org/apache/solr/search/facet/AggValueSource.java index 1678c6fa0970..da83b9125b7e 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/AggValueSource.java +++ b/solr/core/src/java/org/apache/solr/search/facet/AggValueSource.java @@ -44,6 +44,7 @@ public boolean equals(Object o) { } @Override + @SuppressWarnings({"rawtypes"}) public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { // FUTURE throw new UnsupportedOperationException("NOT IMPLEMENTED " + name + " " + this); diff --git a/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java b/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java index 3b6cee03d56c..7036c3027719 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java @@ -62,7 +62,7 @@ public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) } vs = sf.getType().getValueSource(sf, null); } - return new AvgSlotAcc(vs, fcontext, numSlots); + return new SlotAcc.AvgSlotAcc(vs, fcontext, numSlots); } @Override @@ -70,12 +70,13 @@ public FacetMerger createFacetMerger(Object prototype) { return new Merger(); } - private static class Merger extends FacetDoubleMerger { + private static class Merger extends FacetModule.FacetDoubleMerger { long num; double sum; @Override public void merge(Object facetResult, Context mcontext1) { + @SuppressWarnings({"unchecked"}) List numberList = (List) facetResult; num += numberList.get(0).longValue(); sum += numberList.get(1).doubleValue(); @@ -88,7 +89,7 @@ protected double getDouble() { } } - class AvgSortedNumericAcc extends DoubleSortedNumericDVAcc { + class AvgSortedNumericAcc extends DocValuesAcc.DoubleSortedNumericDVAcc { int[] counts; public AvgSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { @@ -114,6 +115,7 @@ public int compare(int slotA, int slotB) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Object getValue(int slot) { if (fcontext.isShard()) { ArrayList lst = new ArrayList(2); @@ -134,11 +136,11 @@ public void reset() throws IOException { @Override public void resize(Resizer resizer) { super.resize(resizer); - resizer.resize(counts, 0); + this.counts = resizer.resize(counts, 0); } } - class AvgSortedSetAcc extends DoubleSortedSetDVAcc { + class AvgSortedSetAcc extends DocValuesAcc.DoubleSortedSetDVAcc { int[] counts; public AvgSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { @@ -168,6 +170,7 @@ public int compare(int slotA, int slotB) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Object getValue(int slot) { if (fcontext.isShard()) { ArrayList lst = new ArrayList(2); @@ -188,11 +191,11 @@ public void reset() throws IOException { @Override public void resize(Resizer resizer) { super.resize(resizer); - resizer.resize(counts, 0); + this.counts = resizer.resize(counts, 0); } } - class AvgUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc { + class AvgUnInvertedFieldAcc extends UnInvertedFieldAcc.DoubleUnInvertedFieldAcc { int[] counts; public AvgUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { @@ -224,6 +227,7 @@ public int compare(int slotA, int slotB) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Object getValue(int slot) { if (fcontext.isShard()) { ArrayList lst = new ArrayList(2); @@ -244,7 +248,7 @@ public void reset() throws IOException { @Override public void resize(Resizer resizer) { super.resize(resizer); - resizer.resize(counts, 0); + this.counts = resizer.resize(counts, 0); } } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/CountAgg.java b/solr/core/src/java/org/apache/solr/search/facet/CountAgg.java index 491622d2afea..e2f4e9105a13 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/CountAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/CountAgg.java @@ -25,11 +25,11 @@ public CountAgg() { @Override public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException { - return new CountSlotArrAcc(fcontext, numSlots); + return new SlotAcc.CountSlotArrAcc(fcontext, numSlots); } @Override public FacetMerger createFacetMerger(Object prototype) { - return new FacetLongMerger(); + return new FacetModule.FacetLongMerger(); } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java b/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java index 81fa983b926c..6415ff635291 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java @@ -64,10 +64,10 @@ public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) @Override public FacetMerger createFacetMerger(Object prototype) { - return new FacetLongMerger(); + return new FacetModule.FacetLongMerger(); } - class CountValSlotAcc extends LongFuncSlotAcc { + class CountValSlotAcc extends SlotAcc.LongFuncSlotAcc { public CountValSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { super(values, fcontext, numSlots, 0); @@ -81,7 +81,7 @@ public void collect(int doc, int slot, IntFunction slotContext) thr } } - class CountSortedNumericDVAcc extends LongSortedNumericDVAcc { + class CountSortedNumericDVAcc extends DocValuesAcc.LongSortedNumericDVAcc { public CountSortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots, 0); @@ -93,7 +93,7 @@ protected void collectValues(int doc, int slot) throws IOException { } } - class CountSortedSetDVAcc extends LongSortedSetDVAcc { + class CountSortedSetDVAcc extends DocValuesAcc.LongSortedSetDVAcc { public CountSortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots, 0); @@ -140,7 +140,7 @@ public void reset() throws IOException { @Override public void resize(Resizer resizer) { - resizer.resize(result, 0); + this.result = resizer.resize(result, 0); } @Override diff --git a/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java b/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java index bd52c42c50b7..547040e01124 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java +++ b/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java @@ -58,368 +58,371 @@ public void collect(int doc, int slot, IntFunction slotContext) thr * returns whether or not given {@code doc} has value */ protected abstract boolean advanceExact(int doc) throws IOException; -} -/** - * Accumulator for {@link NumericDocValues} - */ -abstract class NumericDVAcc extends DocValuesAcc { - NumericDocValues values; - public NumericDVAcc(FacetContext fcontext, SchemaField sf) throws IOException { - super(fcontext, sf); - } + /** + * Accumulator for {@link NumericDocValues} + */ + abstract class NumericDVAcc extends DocValuesAcc { + NumericDocValues values; - @Override - public void setNextReader(LeafReaderContext readerContext) throws IOException { - super.setNextReader(readerContext); - values = DocValues.getNumeric(readerContext.reader(), sf.getName()); - } + public NumericDVAcc(FacetContext fcontext, SchemaField sf) throws IOException { + super(fcontext, sf); + } - @Override - protected boolean advanceExact(int doc) throws IOException { - return values.advanceExact(doc); + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + super.setNextReader(readerContext); + values = DocValues.getNumeric(readerContext.reader(), sf.getName()); + } + + @Override + protected boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } } -} -/** - * Accumulator for {@link SortedNumericDocValues} - */ -abstract class SortedNumericDVAcc extends DocValuesAcc { - SortedNumericDocValues values; + /** + * Accumulator for {@link SortedNumericDocValues} + */ + abstract static class SortedNumericDVAcc extends DocValuesAcc { + SortedNumericDocValues values; - public SortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { - super(fcontext, sf); - } + public SortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { + super(fcontext, sf); + } - @Override - public void setNextReader(LeafReaderContext readerContext) throws IOException { - super.setNextReader(readerContext); - values = DocValues.getSortedNumeric(readerContext.reader(), sf.getName()); - } + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + super.setNextReader(readerContext); + values = DocValues.getSortedNumeric(readerContext.reader(), sf.getName()); + } - @Override - protected boolean advanceExact(int doc) throws IOException { - return values.advanceExact(doc); + @Override + protected boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } } -} -abstract class LongSortedNumericDVAcc extends SortedNumericDVAcc { - long[] result; - long initialValue; + abstract static class LongSortedNumericDVAcc extends SortedNumericDVAcc { + long[] result; + long initialValue; + + public LongSortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException { + super(fcontext, sf, numSlots); + this.result = new long[numSlots]; + this.initialValue = initialValue; + if (initialValue != 0) { + Arrays.fill(result, initialValue); + } + } + + @Override + public int compare(int slotA, int slotB) { + return Long.compare(result[slotA], result[slotB]); + } - public LongSortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException { - super(fcontext, sf, numSlots); - this.result = new long[numSlots]; - this.initialValue = initialValue; - if (initialValue != 0) { + @Override + public Object getValue(int slotNum) throws IOException { + return result[slotNum]; + } + + @Override + public void reset() throws IOException { Arrays.fill(result, initialValue); } - } - @Override - public int compare(int slotA, int slotB) { - return Long.compare(result[slotA], result[slotB]); - } + @Override + public void resize(Resizer resizer) { + this.result = resizer.resize(result, initialValue); + } - @Override - public Object getValue(int slotNum) throws IOException { - return result[slotNum]; } - @Override - public void reset() throws IOException { - Arrays.fill(result, initialValue); - } + abstract static class DoubleSortedNumericDVAcc extends SortedNumericDVAcc { + double[] result; + double initialValue; - @Override - public void resize(Resizer resizer) { - resizer.resize(result, initialValue); - } + public DoubleSortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException { + super(fcontext, sf, numSlots); + this.result = new double[numSlots]; + this.initialValue = initialValue; + if (initialValue != 0) { + Arrays.fill(result, initialValue); + } + } -} + @Override + public int compare(int slotA, int slotB) { + return Double.compare(result[slotA], result[slotB]); + } -abstract class DoubleSortedNumericDVAcc extends SortedNumericDVAcc { - double[] result; - double initialValue; + @Override + public Object getValue(int slotNum) throws IOException { + return result[slotNum]; + } - public DoubleSortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException { - super(fcontext, sf, numSlots); - this.result = new double[numSlots]; - this.initialValue = initialValue; - if (initialValue != 0) { + @Override + public void reset() throws IOException { Arrays.fill(result, initialValue); } - } - @Override - public int compare(int slotA, int slotB) { - return Double.compare(result[slotA], result[slotB]); - } - - @Override - public Object getValue(int slotNum) throws IOException { - return result[slotNum]; - } + @Override + public void resize(Resizer resizer) { + this.result = resizer.resize(result, initialValue); + } - @Override - public void reset() throws IOException { - Arrays.fill(result, initialValue); - } + /** + * converts given long value to double based on field type + */ + protected double getDouble(long val) { + switch (sf.getType().getNumberType()) { + case INTEGER: + case LONG: + case DATE: + return val; + case FLOAT: + return NumericUtils.sortableIntToFloat((int) val); + case DOUBLE: + return NumericUtils.sortableLongToDouble(val); + default: + // this would never happen + return 0.0d; + } + } - @Override - public void resize(Resizer resizer) { - resizer.resize(result, initialValue); } /** - * converts given long value to double based on field type + * Base class for standard deviation and variance computation for fields with {@link SortedNumericDocValues} */ - protected double getDouble(long val) { - switch (sf.getType().getNumberType()) { - case INTEGER: - case LONG: - case DATE: - return val; - case FLOAT: - return NumericUtils.sortableIntToFloat((int) val); - case DOUBLE: - return NumericUtils.sortableLongToDouble(val); - default: - // this would never happen - return 0.0d; + abstract static class SDVSortedNumericAcc extends DoubleSortedNumericDVAcc { + int[] counts; + double[] sum; + + public SDVSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { + super(fcontext, sf, numSlots, 0); + this.counts = new int[numSlots]; + this.sum = new double[numSlots]; } - } -} + @Override + protected void collectValues(int doc, int slot) throws IOException { + for (int i = 0, count = values.docValueCount(); i < count; i++) { + double val = getDouble(values.nextValue()); + result[slot] += val * val; + sum[slot] += val; + counts[slot]++; + } + } -/** - * Base class for standard deviation and variance computation for fields with {@link SortedNumericDocValues} - */ -abstract class SDVSortedNumericAcc extends DoubleSortedNumericDVAcc { - int[] counts; - double[] sum; - - public SDVSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { - super(fcontext, sf, numSlots, 0); - this.counts = new int[numSlots]; - this.sum = new double[numSlots]; - } + protected abstract double computeVal(int slot); - @Override - protected void collectValues(int doc, int slot) throws IOException { - for (int i = 0, count = values.docValueCount(); i < count; i++) { - double val = getDouble(values.nextValue()); - result[slot]+= val * val; - sum[slot]+= val; - counts[slot]++; + @Override + public int compare(int slotA, int slotB) { + return Double.compare(computeVal(slotA), computeVal(slotB)); } - } - protected abstract double computeVal(int slot); + @Override + @SuppressWarnings({"unchecked", "rawtypes"}) + public Object getValue(int slot) { + if (fcontext.isShard()) { + ArrayList lst = new ArrayList(3); + lst.add(counts[slot]); + lst.add(result[slot]); + lst.add(sum[slot]); + return lst; + } else { + return computeVal(slot); + } + } - @Override - public int compare(int slotA, int slotB) { - return Double.compare(computeVal(slotA), computeVal(slotB)); - } + @Override + public void reset() throws IOException { + super.reset(); + Arrays.fill(counts, 0); + Arrays.fill(sum, 0); + } - @Override - public Object getValue(int slot) { - if (fcontext.isShard()) { - ArrayList lst = new ArrayList(3); - lst.add(counts[slot]); - lst.add(result[slot]); - lst.add(sum[slot]); - return lst; - } else { - return computeVal(slot); + @Override + public void resize(Resizer resizer) { + super.resize(resizer); + this.counts = resizer.resize(counts, 0); + this.sum = resizer.resize(sum, 0); } } - @Override - public void reset() throws IOException { - super.reset(); - Arrays.fill(counts, 0); - Arrays.fill(sum, 0); - } + /** + * Accumulator for {@link SortedDocValues} + */ + abstract class SortedDVAcc extends DocValuesAcc { + SortedDocValues values; - @Override - public void resize(Resizer resizer) { - super.resize(resizer); - resizer.resize(counts, 0); - resizer.resize(sum, 0); - } -} + public SortedDVAcc(FacetContext fcontext, SchemaField sf) throws IOException { + super(fcontext, sf); + } -/** - * Accumulator for {@link SortedDocValues} - */ -abstract class SortedDVAcc extends DocValuesAcc { - SortedDocValues values; + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + super.setNextReader(readerContext); + values = DocValues.getSorted(readerContext.reader(), sf.getName()); + } - public SortedDVAcc(FacetContext fcontext, SchemaField sf) throws IOException { - super(fcontext, sf); + @Override + protected boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } } - @Override - public void setNextReader(LeafReaderContext readerContext) throws IOException { - super.setNextReader(readerContext); - values = DocValues.getSorted(readerContext.reader(), sf.getName()); - } + /** + * Accumulator for {@link SortedSetDocValues} + */ + abstract static class SortedSetDVAcc extends DocValuesAcc { + SortedSetDocValues values; - @Override - protected boolean advanceExact(int doc) throws IOException { - return values.advanceExact(doc); - } -} + public SortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { + super(fcontext, sf); + } -/** - * Accumulator for {@link SortedSetDocValues} - */ -abstract class SortedSetDVAcc extends DocValuesAcc { - SortedSetDocValues values; + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + super.setNextReader(readerContext); + values = DocValues.getSortedSet(readerContext.reader(), sf.getName()); + } - public SortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { - super(fcontext, sf); + @Override + protected boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } } - @Override - public void setNextReader(LeafReaderContext readerContext) throws IOException { - super.setNextReader(readerContext); - values = DocValues.getSortedSet(readerContext.reader(), sf.getName()); - } + abstract static class LongSortedSetDVAcc extends SortedSetDVAcc { + long[] result; + long initialValue; - @Override - protected boolean advanceExact(int doc) throws IOException { - return values.advanceExact(doc); - } -} + public LongSortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException { + super(fcontext, sf, numSlots); + result = new long[numSlots]; + this.initialValue = initialValue; + if (initialValue != 0) { + Arrays.fill(result, initialValue); + } + } + + @Override + public int compare(int slotA, int slotB) { + return Long.compare(result[slotA], result[slotB]); + } -abstract class LongSortedSetDVAcc extends SortedSetDVAcc { - long[] result; - long initialValue; + @Override + public Object getValue(int slotNum) throws IOException { + return result[slotNum]; + } - public LongSortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException { - super(fcontext, sf, numSlots); - result = new long[numSlots]; - this.initialValue = initialValue; - if (initialValue != 0) { + @Override + public void reset() throws IOException { Arrays.fill(result, initialValue); } - } - @Override - public int compare(int slotA, int slotB) { - return Long.compare(result[slotA], result[slotB]); + @Override + public void resize(Resizer resizer) { + this.result = resizer.resize(result, initialValue); + } } - @Override - public Object getValue(int slotNum) throws IOException { - return result[slotNum]; - } + abstract static class DoubleSortedSetDVAcc extends SortedSetDVAcc { + double[] result; + double initialValue; - @Override - public void reset() throws IOException { - Arrays.fill(result, initialValue); - } + public DoubleSortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException { + super(fcontext, sf, numSlots); + result = new double[numSlots]; + this.initialValue = initialValue; + if (initialValue != 0) { + Arrays.fill(result, initialValue); + } + } - @Override - public void resize(Resizer resizer) { - resizer.resize(result, initialValue); - } -} + @Override + public int compare(int slotA, int slotB) { + return Double.compare(result[slotA], result[slotB]); + } -abstract class DoubleSortedSetDVAcc extends SortedSetDVAcc { - double[] result; - double initialValue; + @Override + public Object getValue(int slotNum) throws IOException { + return result[slotNum]; + } - public DoubleSortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException { - super(fcontext, sf, numSlots); - result = new double[numSlots]; - this.initialValue = initialValue; - if (initialValue != 0) { + @Override + public void reset() throws IOException { Arrays.fill(result, initialValue); } - } - - @Override - public int compare(int slotA, int slotB) { - return Double.compare(result[slotA], result[slotB]); - } - @Override - public Object getValue(int slotNum) throws IOException { - return result[slotNum]; - } - - @Override - public void reset() throws IOException { - Arrays.fill(result, initialValue); - } - - @Override - public void resize(Resizer resizer) { - resizer.resize(result, initialValue); + @Override + public void resize(Resizer resizer) { + this.result = resizer.resize(result, initialValue); + } } -} -/** - * Base class for standard deviation and variance computation for fields with {@link SortedSetDocValues} - */ -abstract class SDVSortedSetAcc extends DoubleSortedSetDVAcc { - int[] counts; - double[] sum; - - public SDVSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { - super(fcontext, sf, numSlots, 0); - this.counts = new int[numSlots]; - this.sum = new double[numSlots]; - } + /** + * Base class for standard deviation and variance computation for fields with {@link SortedSetDocValues} + */ + abstract static class SDVSortedSetAcc extends DoubleSortedSetDVAcc { + int[] counts; + double[] sum; + + public SDVSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { + super(fcontext, sf, numSlots, 0); + this.counts = new int[numSlots]; + this.sum = new double[numSlots]; + } - @Override - protected void collectValues(int doc, int slot) throws IOException { - long ord; - while ((ord = values.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { - BytesRef term = values.lookupOrd(ord); - Object obj = sf.getType().toObject(sf, term); - double val = obj instanceof Date ? ((Date)obj).getTime(): ((Number)obj).doubleValue(); - result[slot] += val * val; - sum[slot] += val; - counts[slot]++; + @Override + protected void collectValues(int doc, int slot) throws IOException { + long ord; + while ((ord = values.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { + BytesRef term = values.lookupOrd(ord); + Object obj = sf.getType().toObject(sf, term); + double val = obj instanceof Date ? ((Date) obj).getTime() : ((Number) obj).doubleValue(); + result[slot] += val * val; + sum[slot] += val; + counts[slot]++; + } } - } - protected abstract double computeVal(int slot); + protected abstract double computeVal(int slot); - @Override - public int compare(int slotA, int slotB) { - return Double.compare(computeVal(slotA), computeVal(slotB)); - } + @Override + public int compare(int slotA, int slotB) { + return Double.compare(computeVal(slotA), computeVal(slotB)); + } - @Override - public Object getValue(int slot) { - if (fcontext.isShard()) { - ArrayList lst = new ArrayList(3); - lst.add(counts[slot]); - lst.add(result[slot]); - lst.add(sum[slot]); - return lst; - } else { - return computeVal(slot); + @Override + @SuppressWarnings({"unchecked", "rawtypes"}) + public Object getValue(int slot) { + if (fcontext.isShard()) { + ArrayList lst = new ArrayList(3); + lst.add(counts[slot]); + lst.add(result[slot]); + lst.add(sum[slot]); + return lst; + } else { + return computeVal(slot); + } } - } - @Override - public void reset() throws IOException { - super.reset(); - Arrays.fill(counts, 0); - Arrays.fill(sum, 0); - } + @Override + public void reset() throws IOException { + super.reset(); + Arrays.fill(counts, 0); + Arrays.fill(sum, 0); + } - @Override - public void resize(Resizer resizer) { - super.resize(resizer); - resizer.resize(counts, 0); - resizer.resize(sum, 0); + @Override + public void resize(Resizer resizer) { + super.resize(resizer); + this.counts = resizer.resize(counts, 0); + this.sum = resizer.resize(sum, 0); + } } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetBucket.java b/solr/core/src/java/org/apache/solr/search/facet/FacetBucket.java index ae1eba68488d..675a141dff84 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetBucket.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetBucket.java @@ -24,14 +24,17 @@ import org.apache.solr.common.util.SimpleOrderedMap; public class FacetBucket { - final FacetBucketMerger parent; + @SuppressWarnings("rawtypes") + final FacetModule.FacetBucketMerger parent; + @SuppressWarnings({"rawtypes"}) final Comparable bucketValue; final int bucketNumber; // this is just for internal correlation (the first bucket created is bucket 0, the next bucket 1, across all field buckets) long count; Map subs; - public FacetBucket(FacetBucketMerger parent, Comparable bucketValue, FacetMerger.Context mcontext) { + public FacetBucket(@SuppressWarnings("rawtypes") FacetModule.FacetBucketMerger parent + , @SuppressWarnings("rawtypes") Comparable bucketValue, FacetMerger.Context mcontext) { this.parent = parent; this.bucketValue = bucketValue; this.bucketNumber = mcontext.getNewBucketNumber(); // TODO: we don't need bucket numbers for all buckets... @@ -66,7 +69,7 @@ private FacetMerger getMerger(String key, Object prototype) { return merger; } - public void mergeBucket(SimpleOrderedMap bucket, FacetMerger.Context mcontext) { + public void mergeBucket(@SuppressWarnings("rawtypes") SimpleOrderedMap bucket, FacetMerger.Context mcontext) { // todo: for refinements, we want to recurse, but not re-do stats for intermediate buckets mcontext.setShardFlag(bucketNumber); @@ -93,6 +96,7 @@ public void mergeBucket(SimpleOrderedMap bucket, FacetMerger.Context mcontext) { } + @SuppressWarnings({"rawtypes", "unchecked"}) public SimpleOrderedMap getMergedBucket() { SimpleOrderedMap out = new SimpleOrderedMap( (subs == null ? 0 : subs.size()) + 2 ); if (bucketValue != null) { @@ -102,7 +106,10 @@ public SimpleOrderedMap getMergedBucket() { if (subs != null) { for (Map.Entry mergerEntry : subs.entrySet()) { FacetMerger subMerger = mergerEntry.getValue(); - out.add(mergerEntry.getKey(), subMerger.getMergedResult()); + Object mergedResult = subMerger.getMergedResult(); + if (null != mergedResult) { + out.add(mergerEntry.getKey(), mergedResult); + } } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetContext.java b/solr/core/src/java/org/apache/solr/search/facet/FacetContext.java new file mode 100644 index 000000000000..86aa3add7fc2 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetContext.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.search.facet; + +import java.util.Map; + +import org.apache.lucene.search.Query; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.search.DocSet; +import org.apache.solr.search.QueryContext; +import org.apache.solr.search.SolrIndexSearcher; + +public class FacetContext { + // Context info for actually executing a local facet command + public static final int IS_SHARD=0x01; + public static final int IS_REFINEMENT=0x02; + public static final int SKIP_FACET=0x04; // refinement: skip calculating this immediate facet, but proceed to specific sub-facets based on facetInfo + + FacetProcessor processor; + Map facetInfo; // refinement info for this node + QueryContext qcontext; + SolrQueryRequest req; // TODO: replace with params? + SolrIndexSearcher searcher; + Query filter; // TODO: keep track of as a DocSet or as a Query? + DocSet base; + FacetContext parent; + int flags; + FacetDebugInfo debugInfo; + + public void setDebugInfo(FacetDebugInfo debugInfo) { + this.debugInfo = debugInfo; + } + + public FacetDebugInfo getDebugInfo() { + return debugInfo; + } + + public boolean isShard() { + return (flags & IS_SHARD) != 0; + } + + /** + * @param filter The filter for the bucket that resulted in this context/domain. Can be null if this is the root context. + * @param domain The resulting set of documents for this facet. + */ + public FacetContext sub(Query filter, DocSet domain) { + FacetContext ctx = new FacetContext(); + ctx.parent = this; + ctx.base = domain; + ctx.filter = filter; + + // carry over from parent + ctx.flags = flags; + ctx.qcontext = qcontext; + ctx.req = req; + ctx.searcher = searcher; + + return ctx; + } +} diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetDebugInfo.java b/solr/core/src/java/org/apache/solr/search/facet/FacetDebugInfo.java index 2be2fef2d5dc..d6a36509d29f 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetDebugInfo.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetDebugInfo.java @@ -65,7 +65,7 @@ public Map getInfo() { return info; } - public SimpleOrderedMap getFacetDebugInfo() { + public SimpleOrderedMap getFacetDebugInfo() { SimpleOrderedMap info = new SimpleOrderedMap<>(); if (filter != null) info.add("filter", filter); diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetField.java b/solr/core/src/java/org/apache/solr/search/facet/FacetField.java index f2a3c2d07d0e..728cd6ea9660 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetField.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetField.java @@ -24,50 +24,6 @@ import org.apache.solr.schema.NumberType; import org.apache.solr.schema.SchemaField; - -// Any type of facet request that generates a variable number of buckets -// and the ability to sort by those generated buckets. -abstract class FacetRequestSorted extends FacetRequest { - long offset; - long limit; - /** - * Number of buckets to request beyond the limit to do internally during initial distributed search. - * -1 means default heuristic. - */ - int overrequest = -1; - /** - * Number of buckets to fill in beyond the limit to do internally during refinement of distributed search. - * -1 means default heuristic. - */ - int overrefine = -1; - long mincount; - /** - * The basic sorting to do on buckets, defaults to {@link FacetRequest.FacetSort#COUNT_DESC} - * @see #prelim_sort - */ - FacetSort sort; - /** - * An optional "Pre-Sort" that defaults to null. - * If specified, then the prelim_sort is used as an optimization in place of {@link #sort} - * during collection, and the full {@link #sort} values are only computed for the top candidate buckets - * (after refinement) - */ - FacetSort prelim_sort; - RefineMethod refine; // null, NONE, or SIMPLE - - @Override - public RefineMethod getRefineMethod() { - return refine; - } - - @Override - public boolean returnsPartial() { - return super.returnsPartial() || (limit > 0); - } - -} - - public class FacetField extends FacetRequestSorted { public static final int DEFAULT_FACET_LIMIT = 10; String field; @@ -114,6 +70,7 @@ public static FacetMethod fromString(String method) { } @Override + @SuppressWarnings("rawtypes") public FacetProcessor createFacetProcessor(FacetContext fcontext) { SchemaField sf = fcontext.searcher.getSchema().getField(field); FieldType ft = sf.getType(); diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java index f6276b5fd2c5..a1c39cff3a56 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java @@ -45,6 +45,7 @@ public FacetFieldMerger(FacetField freq) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void merge(Object facetResult, Context mcontext) { super.merge(facetResult, mcontext); if (numReturnedPerShard == null) { @@ -53,7 +54,7 @@ public void merge(Object facetResult, Context mcontext) { merge((SimpleOrderedMap)facetResult, mcontext); } - protected void merge(SimpleOrderedMap facetResult, Context mcontext) { + protected void merge(@SuppressWarnings("rawtypes") SimpleOrderedMap facetResult, Context mcontext) { if (freq.missing) { Object o = facetResult.get("missing"); if (o != null) { @@ -74,6 +75,8 @@ protected void merge(SimpleOrderedMap facetResult, Context mcontext) { } } + + @SuppressWarnings({"unchecked", "rawtypes"}) List bucketList = (List) facetResult.get("buckets"); numReturnedPerShard[mcontext.shardNum] = bucketList.size(); numReturnedBuckets += bucketList.size(); @@ -95,6 +98,7 @@ protected void merge(SimpleOrderedMap facetResult, Context mcontext) { @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Object getMergedResult() { SimpleOrderedMap result = new SimpleOrderedMap(); @@ -199,6 +203,7 @@ private static class FacetNumBucketsMerger extends FacetMerger { Set values; @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void merge(Object facetResult, Context mcontext) { SimpleOrderedMap map = (SimpleOrderedMap)facetResult; long numBuckets = ((Number)map.get("numBuckets")).longValue(); diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java index fe418f1b385c..e3af5b37a58c 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java @@ -115,7 +115,7 @@ protected void createAccs(long docCount, int slotCount) throws IOException { // allow a custom count acc to be used if (countAcc == null) { - countAcc = new CountSlotArrAcc(fcontext, slotCount); + countAcc = new SlotAcc.CountSlotArrAcc(fcontext, slotCount); countAcc.key = "count"; } @@ -162,7 +162,7 @@ private SlotAcc getTrivialSortingSlotAcc(FacetRequest.FacetSort fsort) { if (indexOrderAcc == null) { // This sorting accumulator just goes by the slot number, so does not need to be collected // and hence does not need to find it's way into the accMap or accs array. - indexOrderAcc = new SortSlotAcc(fcontext); + indexOrderAcc = new SlotAcc.SortSlotAcc(fcontext); } return indexOrderAcc; } @@ -178,7 +178,7 @@ void createCollectAcc(int numDocs, int numSlots) throws IOException { // we always count... // allow a subclass to set a custom counter. if (countAcc == null) { - countAcc = new CountSlotArrAcc(fcontext, numSlots); + countAcc = new SlotAcc.CountSlotArrAcc(fcontext, numSlots); } sortAcc = getTrivialSortingSlotAcc(this.sort); @@ -292,8 +292,8 @@ void collectFirstPhase(int segDoc, int slot, IntFunction slotContex /** Processes the collected data to finds the top slots, and composes it in the response NamedList. */ SimpleOrderedMap findTopSlots(final int numSlots, final int slotCardinality, - IntFunction bucketValFromSlotNumFunc, - Function fieldQueryValFunc) throws IOException { + @SuppressWarnings("rawtypes") IntFunction bucketValFromSlotNumFunc, + @SuppressWarnings("rawtypes") Function fieldQueryValFunc) throws IOException { assert this.sortAcc != null; long numBuckets = 0; @@ -437,6 +437,7 @@ SimpleOrderedMap findTopSlots(final int numSlots, final int slotCardinal sortedSlots = Arrays.copyOfRange(sortedSlots, off, endOffset); } } + @SuppressWarnings({"rawtypes"}) List bucketList = new ArrayList<>(sortedSlots.length); for (Slot slot : sortedSlots) { @@ -492,6 +493,7 @@ private static class Slot { int slot; /** filled in only once we know the bucket will either be involved in resorting, or returned */ + @SuppressWarnings({"rawtypes"}) Comparable bucketVal; /** Filled in if and only if needed for resorting, deferred stats, or subfacets */ @@ -741,6 +743,22 @@ public void setValues(SimpleOrderedMap bucket, int slotNum) throws IOExc } } + private static final SlotContext ALL_BUCKETS_SLOT_CONTEXT = new SlotContext(null) { + @Override + public Query getSlotQuery() { + throw new IllegalStateException("getSlotQuery() is mutually exclusive with isAllBuckets==true"); + } + @Override + public boolean isAllBuckets() { + return true; + } + }; + private static final IntFunction ALL_BUCKETS_SLOT_FUNCTION = new IntFunction() { + @Override + public SlotContext apply(int value) { + return ALL_BUCKETS_SLOT_CONTEXT; + } + }; static class SpecialSlotAcc extends SlotAcc { SlotAcc collectAcc; @@ -769,11 +787,11 @@ public void collect(int doc, int slot, IntFunction slotContext) thr assert slot != collectAccSlot || slot < 0; count++; if (collectAcc != null) { - collectAcc.collect(doc, collectAccSlot, slotContext); + collectAcc.collect(doc, collectAccSlot, ALL_BUCKETS_SLOT_FUNCTION); } if (otherAccs != null) { for (SlotAcc otherAcc : otherAccs) { - otherAcc.collect(doc, otherAccsSlot, slotContext); + otherAcc.collect(doc, otherAccsSlot, ALL_BUCKETS_SLOT_FUNCTION); } } } @@ -839,10 +857,12 @@ public void resize(Resizer resizer) { "cat1":{"_l":["A"]}}} */ + @SuppressWarnings({"unchecked"}) static List asList(Object list) { return list != null ? (List)list : Collections.EMPTY_LIST; } + @SuppressWarnings({"rawtypes", "unchecked"}) protected SimpleOrderedMap refineFacets() throws IOException { boolean skipThisFacet = (fcontext.flags & SKIP_FACET) != 0; @@ -874,6 +894,7 @@ protected SimpleOrderedMap refineFacets() throws IOException { } // The only difference between skip and missing is the value of "skip" passed to refineBucket + for (List bucketAndFacetInfo : partial) { assert bucketAndFacetInfo.size() == 2; Object bucketVal = bucketAndFacetInfo.get(0); diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java index e5ee181b5cb1..dff72b474922 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java @@ -85,7 +85,11 @@ private SimpleOrderedMap calcFacets() throws IOException { if (refineResult != null) { if (freq.allBuckets) { + // count is irrelevant, but hardcoded in collect(...), so intercept/mask normal counts. + // Set here to prevent createAccs(...) from creating a 1-slot countAcc that will fail with AIOOBE + countAcc = SlotAcc.DEV_NULL_SLOT_ACC; createAccs(nDocs, 1); + otherAccs = accs; // accs is created above and set on allBucketsAcc; but during collection, setNextReader is called on otherAccs. allBucketsAcc = new SpecialSlotAcc(fcontext, null, -1, accs, 0); collectDocs(); @@ -135,7 +139,7 @@ private static String valueObjToString(Object obj) { */ public IntFunction slotContext = (slotNum) -> { try { - Object value = sf.getType().toObject(sf, lookupOrd(slotNum)); + Object value = sf.getType().toObject(sf, lookupOrd(slotNum + startTermIndex)); Query q = makeBucketQuery(valueObjToString(value)); assert null != q : "null query for: '" + value + "'"; return new SlotContext(q); diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java index 8151aec41e7f..746915b4a3bc 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java @@ -85,6 +85,7 @@ public void close() throws IOException { } @Override + @SuppressWarnings({"rawtypes"}) public void process() throws IOException { super.process(); diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java index 966da872a6d4..e39055b8db67 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java @@ -162,6 +162,7 @@ public long bitsToSortableBits(long globalOrd) { /** To be returned in "buckets"/"val" */ @Override + @SuppressWarnings({"rawtypes"}) public Comparable bitsToValue(long globalOrd) { BytesRef bytesRef = lookupOrdFunction.apply((int) globalOrd); // note FacetFieldProcessorByArray.findTopSlots also calls SchemaFieldType.toObject @@ -169,16 +170,19 @@ public Comparable bitsToValue(long globalOrd) { } @Override + @SuppressWarnings({"rawtypes"}) public String formatValue(Comparable val) { return (String) val; } @Override + @SuppressWarnings({"rawtypes"}) protected Comparable parseStr(String rawval) throws ParseException { throw new UnsupportedOperationException(); } @Override + @SuppressWarnings({"rawtypes"}) protected Comparable parseAndAddGap(Comparable value, String gap) throws ParseException { throw new UnsupportedOperationException(); } @@ -285,7 +289,7 @@ public void resize(Resizer resizer) { } }; - countAcc = new CountSlotAcc(fcontext) { + countAcc = new SlotAcc.CountSlotAcc(fcontext) { @Override public void incrementCount(int slot, long count) { throw new UnsupportedOperationException(); @@ -437,6 +441,7 @@ private void collectValFirstPhase(int segDoc, long val) throws IOException { */ private IntFunction slotContext = (slotNum) -> { long val = table.vals[slotNum]; + @SuppressWarnings({"rawtypes"}) Comparable value = calc.bitsToValue(val); return new SlotContext(sf.getType().getFieldQuery(null, sf, calc.formatValue(value))); }; diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetHeatmap.java b/solr/core/src/java/org/apache/solr/search/facet/FacetHeatmap.java index a87e9f2fe19c..4d17df528ada 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetHeatmap.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetHeatmap.java @@ -94,6 +94,7 @@ public class FacetHeatmap extends FacetRequest { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); static class Parser extends FacetParser { + @SuppressWarnings({"rawtypes"}) Parser(FacetParser parent, String key) { super(parent, key); } @@ -117,6 +118,7 @@ public FacetHeatmap parse(Object argsObj) { final DistanceUnits distanceUnits; // note: the two instanceof conditions is not ideal, versus one. If we start needing to add more then refactor. if ((type instanceof AbstractSpatialPrefixTreeFieldType)) { + @SuppressWarnings({"rawtypes"}) AbstractSpatialPrefixTreeFieldType rptType = (AbstractSpatialPrefixTreeFieldType) type; strategy = (PrefixTreeStrategy) rptType.getStrategy(fieldName); distanceUnits = rptType.getDistanceUnits(); @@ -204,17 +206,21 @@ public Map getFacetDescription() { } @Override + @SuppressWarnings({"rawtypes"}) public FacetProcessor createFacetProcessor(FacetContext fcontext) { return new FacetHeatmapProcessor(fcontext); } // don't use an anonymous class since the getSimpleName() isn't friendly in debug output + @SuppressWarnings({"rawtypes"}) private class FacetHeatmapProcessor extends FacetProcessor { + @SuppressWarnings({"unchecked"}) public FacetHeatmapProcessor(FacetContext fcontext) { super(fcontext, FacetHeatmap.this); } @Override + @SuppressWarnings({"unchecked"}) public void process() throws IOException { super.process(); // handles domain changes @@ -233,7 +239,7 @@ public void process() throws IOException { } //Populate response - response = new SimpleOrderedMap(); + response = new SimpleOrderedMap<>(); response.add("gridLevel", gridLevel); response.add("columns", heatmap.columns); response.add("rows", heatmap.rows); diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java b/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java index 847e651f98da..0bd6651de1ed 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java @@ -51,8 +51,8 @@ public class FacetModule extends SearchComponent { // The largest current flag in ShardRequest is 0x00002000 // We'll put our bits in the middle to avoid future ones in ShardRequest and // custom ones that may start at the top. - public final static int PURPOSE_GET_JSON_FACETS = 0x00100000; - public final static int PURPOSE_REFINE_JSON_FACETS = 0x00200000; + public final static int PURPOSE_GET_JSON_FACETS = 0x00100000; + public final static int PURPOSE_REFINE_JSON_FACETS = 0x00200000; // Internal information passed down from the top level to shards for distributed faceting. private final static String FACET_INFO = "_facet_"; @@ -67,11 +67,12 @@ public FacetComponentState getFacetComponentState(ResponseBuilder rb) { @Override + @SuppressWarnings({"unchecked"}) public void prepare(ResponseBuilder rb) throws IOException { - Map json = rb.req.getJSON(); - Map jsonFacet = null; + Map json = rb.req.getJSON(); + Map jsonFacet = null; if (json == null) { - int version = rb.req.getParams().getInt("facet.version",1); + int version = rb.req.getParams().getInt("facet.version", 1); if (version <= 1) return; boolean facetsEnabled = rb.req.getParams().getBool(FacetParams.FACET, false); if (!facetsEnabled) return; @@ -90,14 +91,15 @@ public void prepare(ResponseBuilder rb) throws IOException { SolrParams params = rb.req.getParams(); boolean isShard = params.getBool(ShardParams.IS_SHARD, false); - Map facetInfo = null; + @SuppressWarnings({"unchecked"}) + Map facetInfo = null; if (isShard) { String jfacet = params.get(FACET_INFO); if (jfacet == null) { // if this is a shard request, but there is no _facet_ info, then don't do anything. return; } - facetInfo = (Map) fromJSONString(jfacet); + facetInfo = (Map) fromJSONString(jfacet); } // At this point, we know we need to do something. Create and save the state. @@ -118,6 +120,7 @@ public void prepare(ResponseBuilder rb) throws IOException { @Override + @SuppressWarnings({"unchecked"}) public void process(ResponseBuilder rb) throws IOException { // if this is null, faceting is not enabled FacetComponentState facetState = getFacetComponentState(rb); @@ -132,7 +135,7 @@ public void process(ResponseBuilder rb) throws IOException { fcontext.qcontext = QueryContext.newContext(fcontext.searcher); if (isShard) { fcontext.flags |= FacetContext.IS_SHARD; - fcontext.facetInfo = facetState.facetInfo.isEmpty() ? null : (Map)facetState.facetInfo.get(FACET_REFINE); + fcontext.facetInfo = facetState.facetInfo.isEmpty() ? null : (Map) facetState.facetInfo.get(FACET_REFINE); if (fcontext.facetInfo != null) { fcontext.flags |= FacetContext.IS_REFINEMENT; fcontext.flags |= FacetContext.SKIP_FACET; // the root bucket should have been received from all shards previously @@ -170,7 +173,7 @@ public int distributedProcess(ResponseBuilder rb) throws IOException { } // Check if there are any refinements possible - if ((facetState.mcontext==null) ||facetState.mcontext.getSubsWithRefinement(facetState.facetRequest).isEmpty()) { + if ((facetState.mcontext == null) || facetState.mcontext.getSubsWithRefinement(facetState.facetRequest).isEmpty()) { clearFaceting(rb.outgoing); return ResponseBuilder.STAGE_DONE; } @@ -187,7 +190,7 @@ public int distributedProcess(ResponseBuilder rb) throws IOException { facetState.mcontext.setShard(shard); // shard-specific refinement - Map refinement = facetState.merger.getRefinement(facetState.mcontext); + Map refinement = facetState.merger.getRefinement(facetState.mcontext); if (refinement == null) continue; boolean newRequest = false; @@ -197,11 +200,10 @@ public int distributedProcess(ResponseBuilder rb) throws IOException { // If nshards becomes too great, we may want to move to hashing for // better scalability. for (ShardRequest sreq : rb.outgoing) { - if ( (sreq.purpose & (ShardRequest.PURPOSE_GET_FIELDS|ShardRequest.PURPOSE_REFINE_FACETS|ShardRequest.PURPOSE_REFINE_PIVOT_FACETS)) != 0 + if ((sreq.purpose & (ShardRequest.PURPOSE_GET_FIELDS | ShardRequest.PURPOSE_REFINE_FACETS | ShardRequest.PURPOSE_REFINE_PIVOT_FACETS)) != 0 && sreq.shards != null && sreq.shards.length == 1 - && sreq.shards[0].equals(shard)) - { + && sreq.shards[0].equals(shard)) { shardsRefineRequest = sreq; break; } @@ -212,7 +214,7 @@ public int distributedProcess(ResponseBuilder rb) throws IOException { // so create one ourselves. newRequest = true; shardsRefineRequest = new ShardRequest(); - shardsRefineRequest.shards = new String[] { shard }; + shardsRefineRequest.shards = new String[]{shard}; shardsRefineRequest.params = new ModifiableSolrParams(rb.req.getParams()); // don't request any documents shardsRefineRequest.params.remove(CommonParams.START); @@ -222,7 +224,7 @@ public int distributedProcess(ResponseBuilder rb) throws IOException { shardsRefineRequest.purpose |= PURPOSE_REFINE_JSON_FACETS; - Map finfo = new HashMap<>(1); + Map finfo = new HashMap<>(1); finfo.put(FACET_REFINE, refinement); // String finfoStr = JSONUtil.toJSON(finfo, -1); // this doesn't handle formatting of Date objects the way we want @@ -232,7 +234,7 @@ public int distributedProcess(ResponseBuilder rb) throws IOException { public void handleUnknownClass(Object o) { // handle date formatting correctly if (o instanceof Date) { - String s = ((Date)o).toInstant().toString(); + String s = ((Date) o).toInstant().toString(); writeString(s); return; } @@ -254,7 +256,7 @@ public void handleUnknownClass(Object o) { } @Override - public void modifyRequest(ResponseBuilder rb, SearchComponent who,ShardRequest sreq) { + public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest sreq) { FacetComponentState facetState = getFacetComponentState(rb); if (facetState == null) return; @@ -264,8 +266,8 @@ public void modifyRequest(ResponseBuilder rb, SearchComponent who,ShardRequest s } else { // turn off faceting on other requests /*** distributedProcess will need to use other requests for refinement - sreq.params.remove("json.facet"); // this just saves space... the presence of FACET_INFO really control the faceting - sreq.params.remove(FACET_INFO); + sreq.params.remove("json.facet"); // this just saves space... the presence of FACET_INFO really control the faceting + sreq.params.remove(FACET_INFO); **/ } } @@ -281,15 +283,15 @@ public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { if (top == null) continue; // shards.tolerant=true will cause this to happen on exceptions/errors Object facet = top.get("facets"); if (facet == null) { - SimpleOrderedMap shardResponseHeader = (SimpleOrderedMap)rsp.getResponse().get("responseHeader"); - if(Boolean.TRUE.equals(shardResponseHeader.getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) { + @SuppressWarnings("rawtypes") SimpleOrderedMap shardResponseHeader = (SimpleOrderedMap) rsp.getResponse().get("responseHeader"); + if (Boolean.TRUE.equals(shardResponseHeader.getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) { rb.rsp.getResponseHeader().asShallowMap().put(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY, Boolean.TRUE); } continue; } if (facetState.merger == null) { facetState.merger = facetState.facetRequest.createFacetMerger(facet); - facetState.mcontext = new FacetMerger.Context( sreq.responses.size() ); + facetState.mcontext = new FacetMerger.Context(sreq.responses.size()); } if ((sreq.purpose & PURPOSE_REFINE_JSON_FACETS) != 0) { @@ -297,14 +299,14 @@ public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { // call merge again with a diff flag set on the context??? facetState.mcontext.root = facet; facetState.mcontext.setShard(shardRsp.getShard()); // TODO: roll newShard into setShard? - facetState.merger.merge(facet , facetState.mcontext); + facetState.merger.merge(facet, facetState.mcontext); return; } // System.err.println("MERGING FACET RESULT FROM SHARD = " + facet); facetState.mcontext.root = facet; facetState.mcontext.newShard(shardRsp.getShard()); - facetState.merger.merge(facet , facetState.mcontext); + facetState.merger.merge(facet, facetState.mcontext); } } @@ -330,182 +332,181 @@ public String getDescription() { public Category getCategory() { return Category.QUERY; } -} -// TODO: perhaps factor out some sort of root/parent facet object that doesn't depend + // TODO: perhaps factor out some sort of root/parent facet object that doesn't depend // on stuff like ResponseBuilder, but contains request parameters, // root filter lists (for filter exclusions), etc? -class FacetComponentState { - ResponseBuilder rb; - Map facetCommands; - FacetRequest facetRequest; - boolean isShard; - Map facetInfo; // _facet_ param: contains out-of-band facet info, mainly for refinement requests - - // - // Only used for distributed search - // - FacetMerger merger; - FacetMerger.Context mcontext; -} - -// base class for facet functions that can be used in a sort -abstract class FacetSortableMerger extends FacetMerger { - public void prepareSort() { + class FacetComponentState { + ResponseBuilder rb; + Map facetCommands; + FacetRequest facetRequest; + boolean isShard; + Map facetInfo; // _facet_ param: contains out-of-band facet info, mainly for refinement requests + + // + // Only used for distributed search + // + FacetMerger merger; + FacetMerger.Context mcontext; } - @Override - public void finish(Context mcontext) { - // nothing to do for simple stats... - } - - /** Return the normal comparison sort order. The sort direction is only to be used in special circumstances (such as making NaN sort - * last regardless of sort order.) Normal sorters do not need to pay attention to direction. - */ - public abstract int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction); -} - -abstract class FacetDoubleMerger extends FacetSortableMerger { - @Override - public abstract void merge(Object facetResult, Context mcontext); + // base class for facet functions that can be used in a sort + abstract static class FacetSortableMerger extends FacetMerger { + public void prepareSort() { + } - protected abstract double getDouble(); + @Override + public void finish(Context mcontext) { + // nothing to do for simple stats... + } - @Override - public Object getMergedResult() { - return getDouble(); + /** + * Return the normal comparison sort order. The sort direction is only to be used in special circumstances (such as making NaN sort + * last regardless of sort order.) Normal sorters do not need to pay attention to direction. + */ + public abstract int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction); } + abstract static class FacetDoubleMerger extends FacetSortableMerger { + @Override + public abstract void merge(Object facetResult, Context mcontext); - @Override - public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) { - return compare(getDouble(), ((FacetDoubleMerger)other).getDouble(), direction); - } - + protected abstract double getDouble(); - public static int compare(double a, double b, FacetRequest.SortDirection direction) { - if (a < b) return -1; - if (a > b) return 1; - - if (a != a) { // a==NaN - if (b != b) { - return 0; // both NaN - } - return -1 * direction.getMultiplier(); // asc==-1, so this will put NaN at end of sort - } - - if (b != b) { // b is NaN so a is greater - return 1 * direction.getMultiplier(); // if sorting asc, make a less so NaN is at end + @Override + public Object getMergedResult() { + return getDouble(); } - // consider +-0 to be equal - return 0; - } -} - + @Override + public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) { + return compare(getDouble(), ((FacetDoubleMerger) other).getDouble(), direction); + } + public static int compare(double a, double b, FacetRequest.SortDirection direction) { + if (a < b) return -1; + if (a > b) return 1; -class FacetLongMerger extends FacetSortableMerger { - long val; + if (a != a) { // a==NaN + if (b != b) { + return 0; // both NaN + } + return -1 * direction.getMultiplier(); // asc==-1, so this will put NaN at end of sort + } - @Override - public void merge(Object facetResult, Context mcontext) { - val += ((Number)facetResult).longValue(); - } + if (b != b) { // b is NaN so a is greater + return 1 * direction.getMultiplier(); // if sorting asc, make a less so NaN is at end + } - @Override - public Object getMergedResult() { - return val; + // consider +-0 to be equal + return 0; + } } - @Override - public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) { - return Long.compare(val, ((FacetLongMerger)other).val); - } -} + static class FacetLongMerger extends FacetSortableMerger { + long val; + @Override + public void merge(Object facetResult, Context mcontext) { + val += ((Number) facetResult).longValue(); + } -// base class for facets that create buckets (and can hence have sub-facets) -abstract class FacetBucketMerger extends FacetMerger { - FacetRequestT freq; + @Override + public Object getMergedResult() { + return val; + } - public FacetBucketMerger(FacetRequestT freq) { - this.freq = freq; + @Override + public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) { + return Long.compare(val, ((FacetLongMerger) other).val); + } } - /** Bucketval is the representative value for the bucket. Only applicable to terms and range queries to distinguish buckets. */ - FacetBucket newBucket(Comparable bucketVal, Context mcontext) { - return new FacetBucket(this, bucketVal, mcontext); - } - @Override - public Map getRefinement(Context mcontext) { - Collection refineTags = mcontext.getSubsWithRefinement(freq); - return null; // FIXME - } + // base class for facets that create buckets (and can hence have sub-facets) + abstract static class FacetBucketMerger extends FacetMerger { + FacetRequestT freq; - // do subs... + public FacetBucketMerger(FacetRequestT freq) { + this.freq = freq; + } - // callback stuff for buckets? - // passing object gives us a chance to specialize based on value - FacetMerger createFacetMerger(String key, Object val) { - FacetRequest sub = freq.getSubFacets().get(key); - if (sub != null) { - return sub.createFacetMerger(val); + /** + * Bucketval is the representative value for the bucket. Only applicable to terms and range queries to distinguish buckets. + */ + FacetBucket newBucket(@SuppressWarnings("rawtypes") Comparable bucketVal, Context mcontext) { + return new FacetBucket(this, bucketVal, mcontext); } - AggValueSource subStat = freq.getFacetStats().get(key); - if (subStat != null) { - return subStat.createFacetMerger(val); + @Override + public Map getRefinement(Context mcontext) { + Collection refineTags = mcontext.getSubsWithRefinement(freq); + return null; // FIXME } - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "no merger for key=" + key + " , val=" + val); - } -} + // do subs... + // callback stuff for buckets? + // passing object gives us a chance to specialize based on value + FacetMerger createFacetMerger(String key, Object val) { + FacetRequest sub = freq.getSubFacets().get(key); + if (sub != null) { + return sub.createFacetMerger(val); + } -class FacetQueryMerger extends FacetBucketMerger { - FacetBucket bucket; + AggValueSource subStat = freq.getFacetStats().get(key); + if (subStat != null) { + return subStat.createFacetMerger(val); + } - public FacetQueryMerger(FacetQuery freq) { - super(freq); + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "no merger for key=" + key + " , val=" + val); + } } - @Override - public void merge(Object facet, Context mcontext) { - if (bucket == null) { - bucket = newBucket(null, mcontext); + + static class FacetQueryMerger extends FacetBucketMerger { + FacetBucket bucket; + + public FacetQueryMerger(FacetQuery freq) { + super(freq); } - bucket.mergeBucket((SimpleOrderedMap) facet, mcontext); - } - @Override - public Map getRefinement(Context mcontext) { - Collection tags; - if (mcontext.bucketWasMissing()) { - // if this bucket was missing, we need to get all subfacets that have partials (that need to list values for refinement) - tags = mcontext.getSubsWithPartial(freq); - } else { - tags = mcontext.getSubsWithRefinement(freq); + @Override + public void merge(Object facet, Context mcontext) { + if (bucket == null) { + bucket = newBucket(null, mcontext); + } + bucket.mergeBucket((SimpleOrderedMap) facet, mcontext); } - Map refinement = bucket.getRefinement(mcontext, tags); + @Override + public Map getRefinement(Context mcontext) { + Collection tags; + if (mcontext.bucketWasMissing()) { + // if this bucket was missing, we need to get all subfacets that have partials (that need to list values for refinement) + tags = mcontext.getSubsWithPartial(freq); + } else { + tags = mcontext.getSubsWithRefinement(freq); + } - return refinement; - } + Map refinement = bucket.getRefinement(mcontext, tags); + return refinement; + } - @Override - public void finish(Context mcontext) { - // FIXME we need to propagate!!! - } - @Override - public Object getMergedResult() { - return bucket.getMergedBucket(); + @Override + public void finish(Context mcontext) { + // FIXME we need to propagate!!! + } + + @Override + public Object getMergedResult() { + return bucket.getMergedBucket(); + } } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetParser.java b/solr/core/src/java/org/apache/solr/search/facet/FacetParser.java new file mode 100644 index 000000000000..d8bb697503ac --- /dev/null +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetParser.java @@ -0,0 +1,637 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.search.facet; + +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.Optional; + +import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.util.StrUtils; +import org.apache.solr.search.FunctionQParser; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.schema.IndexSchema; +import org.apache.solr.search.QParser; +import org.apache.solr.search.SyntaxError; + +import static org.apache.solr.common.params.CommonParams.SORT; + +abstract class FacetParser { + protected FacetRequestT facet; + protected FacetParser parent; + protected String key; + + public FacetParser(FacetParser parent, String key) { + this.parent = parent; + this.key = key; + } + + public String getKey() { + return key; + } + + public String getPathStr() { + if (parent == null) { + return "/" + key; + } + return parent.getKey() + "/" + key; + } + + protected RuntimeException err(String msg) { + return new SolrException(SolrException.ErrorCode.BAD_REQUEST, msg + " , path="+getPathStr()); + } + + public abstract FacetRequest parse(Object o) throws SyntaxError; + + // TODO: put the FacetRequest on the parser object? + public void parseSubs(Object o) throws SyntaxError { + if (o==null) return; + if (o instanceof Map) { + @SuppressWarnings({"unchecked"}) + Map m = (Map) o; + for (Map.Entry entry : m.entrySet()) { + String key = entry.getKey(); + Object value = entry.getValue(); + + if ("processEmpty".equals(key)) { + facet.processEmpty = getBoolean(m, "processEmpty", false); + continue; + } + + // "my_prices" : { "range" : { "field":... + // key="my_prices", value={"range":.. + + Object parsedValue = parseFacetOrStat(key, value); + + // TODO: have parseFacetOrStat directly add instead of return? + if (parsedValue instanceof FacetRequest) { + facet.addSubFacet(key, (FacetRequest)parsedValue); + } else if (parsedValue instanceof AggValueSource) { + facet.addStat(key, (AggValueSource)parsedValue); + } else { + throw err("Unknown facet type key=" + key + " class=" + (parsedValue == null ? "null" : parsedValue.getClass().getName())); + } + } + } else { + // facet : my_field? + throw err("Expected map for facet/stat"); + } + } + + public Object parseFacetOrStat(String key, Object o) throws SyntaxError { + + if (o instanceof String) { + return parseStringFacetOrStat(key, (String)o); + } + + if (!(o instanceof Map)) { + throw err("expected Map but got " + o); + } + + // The type can be in a one element map, or inside the args as the "type" field + // { "query" : "foo:bar" } + // { "range" : { "field":... } } + // { "type" : range, field : myfield, ... } + @SuppressWarnings({"unchecked"}) + Map m = (Map)o; + String type; + Object args; + + if (m.size() == 1) { + Map.Entry entry = m.entrySet().iterator().next(); + type = entry.getKey(); + args = entry.getValue(); + // throw err("expected facet/stat type name, like {range:{... but got " + m); + } else { + // type should be inside the map as a parameter + Object typeObj = m.get("type"); + if (!(typeObj instanceof String)) { + throw err("expected facet/stat type name, like {type:range, field:price, ...} but got " + typeObj); + } + type = (String)typeObj; + args = m; + } + + return parseFacetOrStat(key, type, args); + } + + public Object parseFacetOrStat(String key, String type, Object args) throws SyntaxError { + // TODO: a place to register all these facet types? + + switch (type) { + case "field": + case "terms": + return new FacetFieldParser(this, key).parse(args); + case "query": + return new FacetQueryParser(this, key).parse(args); + case "range": + return new FacetRangeParser(this, key).parse(args); + case "heatmap": + return new FacetHeatmap.Parser(this, key).parse(args); + case "func": + return parseStat(key, args); + } + + throw err("Unknown facet or stat. key=" + key + " type=" + type + " args=" + args); + } + + public Object parseStringFacetOrStat(String key, String s) throws SyntaxError { + // "avg(myfield)" + return parseStat(key, s); + // TODO - simple string representation of facets + } + + /** Parses simple strings like "avg(x)" in the context of optional local params (may be null) */ + private AggValueSource parseStatWithParams(String key, SolrParams localparams, String stat) throws SyntaxError { + SolrQueryRequest req = getSolrRequest(); + FunctionQParser parser = new FunctionQParser(stat, localparams, req.getParams(), req); + AggValueSource agg = parser.parseAgg(FunctionQParser.FLAG_DEFAULT); + return agg; + } + + /** Parses simple strings like "avg(x)" or robust Maps that may contain local params */ + private AggValueSource parseStat(String key, Object args) throws SyntaxError { + assert null != args; + + if (args instanceof CharSequence) { + // Both of these variants are already unpacked for us in this case, and use no local params... + // 1) x:{func:'min(foo)'} + // 2) x:'min(foo)' + return parseStatWithParams(key, null, args.toString()); + } + + if (args instanceof Map) { + @SuppressWarnings({"unchecked"}) + final Map statMap = (Map)args; + return parseStatWithParams(key, jsonToSolrParams(statMap), statMap.get("func").toString()); + } + + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Stats must be specified as either a simple string, or a json Map"); + + } + + + private FacetRequest.Domain getDomain() { + if (facet.domain == null) { + facet.domain = new FacetRequest.Domain(); + } + return facet.domain; + } + + protected void parseCommonParams(Object o) { + if (o instanceof Map) { + @SuppressWarnings({"unchecked"}) + Map m = (Map)o; + List excludeTags = getStringList(m, "excludeTags"); + if (excludeTags != null) { + getDomain().excludeTags = excludeTags; + } + + Object domainObj = m.get("domain"); + if (domainObj instanceof Map) { + @SuppressWarnings({"unchecked"}) + Map domainMap = (Map)domainObj; + FacetRequest.Domain domain = getDomain(); + + excludeTags = getStringList(domainMap, "excludeTags"); + if (excludeTags != null) { + domain.excludeTags = excludeTags; + } + + if (domainMap.containsKey("query")) { + domain.explicitQueries = parseJSONQueryStruct(domainMap.get("query")); + if (null == domain.explicitQueries) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "'query' domain can not be null or empty"); + } else if (null != domain.excludeTags) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "'query' domain can not be combined with 'excludeTags'"); + } + } + + String blockParent = getString(domainMap, "blockParent", null); + String blockChildren = getString(domainMap, "blockChildren", null); + + if (blockParent != null) { + domain.toParent = true; + domain.parents = blockParent; + } else if (blockChildren != null) { + domain.toChildren = true; + domain.parents = blockChildren; + } + + FacetRequest.Domain.JoinField.createJoinField(domain, domainMap); + FacetRequest.Domain.GraphField.createGraphField(domain, domainMap); + + Object filterOrList = domainMap.get("filter"); + if (filterOrList != null) { + assert domain.filters == null; + domain.filters = parseJSONQueryStruct(filterOrList); + } + + } else if (domainObj != null) { + throw err("Expected Map for 'domain', received " + domainObj.getClass().getSimpleName() + "=" + domainObj); + } + } + } + + /** returns null on null input, otherwise returns a list of the JSON query structures -- either + * directly from the raw (list) input, or if raw input is a not a list then it encapsulates + * it in a new list. + */ + @SuppressWarnings({"unchecked"}) + private List parseJSONQueryStruct(Object raw) { + List result = null; + if (null == raw) { + return result; + } else if (raw instanceof List) { + result = (List) raw; + } else { + result = new ArrayList<>(1); + result.add(raw); + } + return result; + } + + public String getField(Map args) { + Object fieldName = args.get("field"); // TODO: pull out into defined constant + if (fieldName == null) { + fieldName = args.get("f"); // short form + } + if (fieldName == null) { + throw err("Missing 'field'"); + } + + if (!(fieldName instanceof String)) { + throw err("Expected string for 'field', got" + fieldName); + } + + return (String)fieldName; + } + + + public Long getLongOrNull(Map args, String paramName, boolean required) { + Object o = args.get(paramName); + if (o == null) { + if (required) { + throw err("Missing required parameter '" + paramName + "'"); + } + return null; + } + if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) { + throw err("Expected integer type for param '"+paramName + "' but got " + o); + } + + return ((Number)o).longValue(); + } + + public long getLong(Map args, String paramName, long defVal) { + Object o = args.get(paramName); + if (o == null) { + return defVal; + } + if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) { + throw err("Expected integer type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); + } + + return ((Number)o).longValue(); + } + + public Double getDoubleOrNull(Map args, String paramName, boolean required) { + Object o = args.get(paramName); + if (o == null) { + if (required) { + throw err("Missing required parameter '" + paramName + "'"); + } + return null; + } + if (!(o instanceof Number)) { + throw err("Expected double type for param '" + paramName + "' but got " + o); + } + + return ((Number)o).doubleValue(); + } + + public boolean getBoolean(Map args, String paramName, boolean defVal) { + Object o = args.get(paramName); + if (o == null) { + return defVal; + } + // TODO: should we be more flexible and accept things like "true" (strings)? + // Perhaps wait until the use case comes up. + if (!(o instanceof Boolean)) { + throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); + } + + return (Boolean)o; + } + + public Boolean getBooleanOrNull(Map args, String paramName) { + Object o = args.get(paramName); + + if (o != null && !(o instanceof Boolean)) { + throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); + } + return (Boolean) o; + } + + + public String getString(Map args, String paramName, String defVal) { + Object o = args.get(paramName); + if (o == null) { + return defVal; + } + if (!(o instanceof String)) { + throw err("Expected string type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); + } + + return (String)o; + } + + public Object getVal(Map args, String paramName, boolean required) { + Object o = args.get(paramName); + if (o == null && required) { + throw err("Missing required parameter: '" + paramName + "'"); + } + return o; + } + + public List getStringList(Map args, String paramName) { + return getStringList(args, paramName, true); + } + +@SuppressWarnings({"unchecked"}) + public List getStringList(Map args, String paramName, boolean decode) { + Object o = args.get(paramName); + if (o == null) { + return null; + } + if (o instanceof List) { + return (List)o; + } + if (o instanceof String) { + // TODO: SOLR-12539 handle spaces in b/w comma & value ie, should the values be trimmed before returning?? + return StrUtils.splitSmart((String)o, ",", decode); + } + + throw err("Expected list of string or comma separated string values for '" + paramName + + "', received " + o.getClass().getSimpleName() + "=" + o); + } + + public IndexSchema getSchema() { + return parent.getSchema(); + } + + public SolrQueryRequest getSolrRequest() { + return parent.getSolrRequest(); + } + + /** + * Helper that handles the possibility of map values being lists + * NOTE: does *NOT* fail on map values that are sub-maps (ie: nested json objects) + */ + @SuppressWarnings({"unchecked", "rawtypes"}) + public static SolrParams jsonToSolrParams(Map jsonObject) { + // HACK, but NamedList already handles the list processing for us... + NamedList nl = new NamedList<>(); + nl.addAll(jsonObject); + return SolrParams.toSolrParams(nl); + } + + // TODO Make this private (or at least not static) and introduce + // a newInstance method on FacetParser that returns one of these? + static class FacetTopParser extends FacetParser { + private SolrQueryRequest req; + + public FacetTopParser(SolrQueryRequest req) { + super(null, "facet"); + this.facet = new FacetQuery(); + this.req = req; + } + + @Override + public FacetQuery parse(Object args) throws SyntaxError { + parseSubs(args); + return facet; + } + + @Override + public SolrQueryRequest getSolrRequest() { + return req; + } + + @Override + public IndexSchema getSchema() { + return req.getSchema(); + } + } + + static class FacetQueryParser extends FacetParser { + public FacetQueryParser(@SuppressWarnings("rawtypes") FacetParser parent, String key) { + super(parent, key); + facet = new FacetQuery(); + } + + @Override + public FacetQuery parse(Object arg) throws SyntaxError { + parseCommonParams(arg); + + String qstring = null; + if (arg instanceof String) { + // just the field name... + qstring = (String)arg; + + } else if (arg instanceof Map) { + @SuppressWarnings({"unchecked"}) + Map m = (Map) arg; + qstring = getString(m, "q", null); + if (qstring == null) { + qstring = getString(m, "query", null); + } + + // OK to parse subs before we have parsed our own query? + // as long as subs don't need to know about it. + parseSubs( m.get("facet") ); + } else if (arg != null) { + // something lke json.facet.facet.query=2 + throw err("Expected string/map for facet query, received " + arg.getClass().getSimpleName() + "=" + arg); + } + + // TODO: substats that are from defaults!!! + + if (qstring != null) { + QParser parser = QParser.getParser(qstring, getSolrRequest()); + parser.setIsFilter(true); + facet.q = parser.getQuery(); + } + + return facet; + } + } + + /*** not a separate type of parser for now... + static class FacetBlockParentParser extends FacetParser { + public FacetBlockParentParser(FacetParser parent, String key) { + super(parent, key); + facet = new FacetBlockParent(); + } + + @Override + public FacetBlockParent parse(Object arg) throws SyntaxError { + parseCommonParams(arg); + + if (arg instanceof String) { + // just the field name... + facet.parents = (String)arg; + + } else if (arg instanceof Map) { + Map m = (Map) arg; + facet.parents = getString(m, "parents", null); + + parseSubs( m.get("facet") ); + } + + return facet; + } + } + ***/ + + static class FacetFieldParser extends FacetParser { + @SuppressWarnings({"rawtypes"}) + public FacetFieldParser(FacetParser parent, String key) { + super(parent, key); + facet = new FacetField(); + } + + public FacetField parse(Object arg) throws SyntaxError { + parseCommonParams(arg); + if (arg instanceof String) { + // just the field name... + facet.field = (String)arg; + + } else if (arg instanceof Map) { + @SuppressWarnings({"unchecked"}) + Map m = (Map) arg; + facet.field = getField(m); + facet.offset = getLong(m, "offset", facet.offset); + facet.limit = getLong(m, "limit", facet.limit); + facet.overrequest = (int) getLong(m, "overrequest", facet.overrequest); + facet.overrefine = (int) getLong(m, "overrefine", facet.overrefine); + if (facet.limit == 0) facet.offset = 0; // normalize. an offset with a limit of non-zero isn't useful. + facet.mincount = getLong(m, "mincount", facet.mincount); + facet.missing = getBoolean(m, "missing", facet.missing); + facet.numBuckets = getBoolean(m, "numBuckets", facet.numBuckets); + facet.prefix = getString(m, "prefix", facet.prefix); + facet.allBuckets = getBoolean(m, "allBuckets", facet.allBuckets); + facet.method = FacetField.FacetMethod.fromString(getString(m, "method", null)); + facet.cacheDf = (int)getLong(m, "cacheDf", facet.cacheDf); + + // TODO: pull up to higher level? + facet.refine = FacetRequest.RefineMethod.fromObj(m.get("refine")); + + facet.perSeg = getBooleanOrNull(m, "perSeg"); + + // facet.sort may depend on a facet stat... + // should we be parsing / validating this here, or in the execution environment? + Object o = m.get("facet"); + parseSubs(o); + + facet.sort = parseAndValidateSort(facet, m, SORT); + facet.prelim_sort = parseAndValidateSort(facet, m, "prelim_sort"); + } else if (arg != null) { + // something like json.facet.facet.field=2 + throw err("Expected string/map for facet field, received " + arg.getClass().getSimpleName() + "=" + arg); + } + + if (null == facet.sort) { + facet.sort = FacetRequest.FacetSort.COUNT_DESC; + } + + return facet; + } + + /** + * Parses, validates and returns the {@link FacetRequest.FacetSort} for given sortParam + * and facet field + *

    + * Currently, supported sort specifications are 'mystat desc' OR {mystat: 'desc'} + * index - This is equivalent to 'index asc' + * count - This is equivalent to 'count desc' + *

    + * + * @param facet {@link FacetField} for which sort needs to be parsed and validated + * @param args map containing the sortVal for given sortParam + * @param sortParam parameter for which sort needs to parsed and validated + * @return parsed facet sort + */ + private static FacetRequest.FacetSort parseAndValidateSort(FacetField facet, Map args, String sortParam) { + Object sort = args.get(sortParam); + if (sort == null) { + return null; + } + + FacetRequest.FacetSort facetSort = null; + + if (sort instanceof String) { + String sortStr = (String)sort; + if (sortStr.endsWith(" asc")) { + facetSort = new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" asc".length()), + FacetRequest.SortDirection.asc); + } else if (sortStr.endsWith(" desc")) { + facetSort = new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" desc".length()), + FacetRequest.SortDirection.desc); + } else { + facetSort = new FacetRequest.FacetSort(sortStr, + // default direction for "index" is ascending + ("index".equals(sortStr) + ? FacetRequest.SortDirection.asc + : FacetRequest.SortDirection.desc)); + } + } else if (sort instanceof Map) { + // { myvar : 'desc' } + @SuppressWarnings("unchecked") + Optional> optional = ((Map)sort).entrySet().stream().findFirst(); + if (optional.isPresent()) { + Map.Entry entry = optional.get(); + facetSort = new FacetRequest.FacetSort(entry.getKey(), FacetRequest.SortDirection.fromObj(entry.getValue())); + } + } else { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Expected string/map for '" + sortParam +"', received "+ sort.getClass().getSimpleName() + "=" + sort); + } + + Map facetStats = facet.facetStats; + // validate facet sort + boolean isValidSort = facetSort == null || + "index".equals(facetSort.sortVariable) || + "count".equals(facetSort.sortVariable) || + (facetStats != null && facetStats.containsKey(facetSort.sortVariable)); + + if (!isValidSort) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Invalid " + sortParam + " option '" + sort + "' for field '" + facet.field + "'"); + } + return facetSort; + } + + } + +} diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java index b8271f5961ac..c3d84eb2e3d3 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java @@ -53,7 +53,7 @@ public abstract class FacetProcessor { DocSet filter; // additional filters specified by "filter" // TODO: do these need to be on the context to support recomputing during multi-select? LinkedHashMap accMap; SlotAcc[] accs; - CountSlotAcc countAcc; + SlotAcc.CountSlotAcc countAcc; FacetProcessor(FacetContext fcontext, FacetRequestT freq) { this.fcontext = fcontext; @@ -61,7 +61,7 @@ public abstract class FacetProcessor { fcontext.processor = this; } - public Object getResponse() { + public org.apache.solr.common.MapWriter getResponse() { return response; } @@ -82,6 +82,7 @@ private static List evalJSONFilterQueryStruct(FacetContext fcontext, List qlist.add(parserFilter((String) rawFilter, fcontext.req)); } else if (rawFilter instanceof Map) { + @SuppressWarnings({"unchecked"}) Map m = (Map) rawFilter; String type; Object args; @@ -181,6 +182,7 @@ private void handleFilterExclusions() throws IOException { return; } + @SuppressWarnings({"rawtypes"}) Map tagMap = (Map) fcontext.req.getContext().get("tags"); if (tagMap == null) { // no filters were tagged @@ -307,7 +309,7 @@ protected void createAccs(long docCount, int slotCount) throws IOException { // allow a custom count acc to be used if (countAcc == null) { - countAcc = new CountSlotArrAcc(fcontext, slotCount); + countAcc = new SlotAcc.CountSlotArrAcc(fcontext, slotCount); countAcc.key = "count"; } @@ -438,6 +440,7 @@ void fillBucket(SimpleOrderedMap bucket, Query q, DocSet result, boolean } } + @SuppressWarnings({"unchecked"}) void processSubs(SimpleOrderedMap response, Query filter, DocSet domain, boolean skip, Map facetInfo) throws IOException { boolean emptyDomain = domain == null || domain.size() == 0; diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java b/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java index a6782bf70917..43657762b0bf 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java @@ -27,6 +27,7 @@ public class FacetQuery extends FacetRequest { // query string or query? Query q; + @SuppressWarnings("rawtypes") @Override public FacetProcessor createFacetProcessor(FacetContext fcontext) { return new FacetQueryProcessor(fcontext, this); @@ -34,7 +35,7 @@ public FacetProcessor createFacetProcessor(FacetContext fcontext) { @Override public FacetMerger createFacetMerger(Object prototype) { - return new FacetQueryMerger(this); + return new FacetModule.FacetQueryMerger(this); } @Override diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java index a93bc079e709..a3b894915b80 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java @@ -16,38 +16,15 @@ */ package org.apache.solr.search.facet; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Date; import java.util.EnumSet; import java.util.HashMap; -import java.util.List; import java.util.Map; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.NumericUtils; -import org.apache.solr.common.SolrException; import org.apache.solr.common.params.FacetParams.FacetRangeInclude; import org.apache.solr.common.params.FacetParams.FacetRangeOther; -import org.apache.solr.common.util.SimpleOrderedMap; -import org.apache.solr.schema.CurrencyFieldType; -import org.apache.solr.schema.CurrencyValue; -import org.apache.solr.schema.DateRangeField; -import org.apache.solr.schema.ExchangeRateProvider; -import org.apache.solr.schema.FieldType; -import org.apache.solr.schema.SchemaField; -import org.apache.solr.schema.TrieDateField; -import org.apache.solr.schema.TrieField; -import org.apache.solr.search.DocSet; -import org.apache.solr.search.SyntaxError; -import org.apache.solr.search.facet.SlotAcc.SlotContext; -import org.apache.solr.util.DateMathParser; - -import static org.apache.solr.search.facet.FacetContext.SKIP_FACET; public class FacetRange extends FacetRequestSorted { static final String ACTUAL_END_JSON_KEY = "_actual_end"; - String field; Object start; Object end; @@ -64,6 +41,7 @@ public class FacetRange extends FacetRequestSorted { } @Override + @SuppressWarnings({"rawtypes"}) public FacetProcessor createFacetProcessor(FacetContext fcontext) { return new FacetRangeProcessor(fcontext, this); } @@ -72,7 +50,7 @@ public FacetProcessor createFacetProcessor(FacetContext fcontext) { public FacetMerger createFacetMerger(Object prototype) { return new FacetRangeMerger(this); } - + @Override public Map getFacetDescription() { Map descr = new HashMap<>(); @@ -86,1016 +64,4 @@ public Map getFacetDescription() { } return descr; } - -} - - -class FacetRangeProcessor extends FacetProcessor { - // TODO: the code paths for initial faceting, vs refinement, are very different... - // TODO: ...it might make sense to have seperate classes w/a common base? - // TODO: let FacetRange.createFacetProcessor decide which one to instantiate? - - final SchemaField sf; - final Calc calc; - final EnumSet include; - final long effectiveMincount; - final Comparable start; - final Comparable end; - final String gap; - final Object ranges; - - /** Build by {@link #createRangeList} if and only if needed for basic faceting */ - List rangeList; - /** Build by {@link #createRangeList} if and only if needed for basic faceting */ - List otherList; - - /** - * Serves two purposes depending on the type of request. - *
      - *
    • If this is a phase#1 shard request, then {@link #createRangeList} will set this value (non null) - * if and only if it is needed for refinement (ie: hardend:false & other - * that requires an end value low/high value calculation). And it wil be included in the response
    • - *
    • If this is a phase#2 refinement request, this variable will be used - * {@link #getOrComputeActualEndForRefinement} to track the value sent with the refinement request - * -- or to cache a recomputed value if the request omitted it -- for use in refining the - * other buckets that need them
    • - *
    - */ - Comparable actual_end = null; // null until/unless we need it - - FacetRangeProcessor(FacetContext fcontext, FacetRange freq) { - super(fcontext, freq); - include = freq.include; - sf = fcontext.searcher.getSchema().getField(freq.field); - calc = getCalcForField(sf); - if (freq.ranges != null && (freq.start != null || freq.end != null || freq.gap != null)) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Cannot set gap/start/end and ranges params together"); - } - if (freq.ranges != null) { - ranges = freq.ranges; - start = null; - end = null; - gap = null; - } else { - start = calc.getValue(freq.start.toString()); - end = calc.getValue(freq.end.toString()); - gap = freq.gap.toString(); - ranges = null; - } - - // Under the normal mincount=0, each shard will need to return 0 counts since we don't calculate buckets at the top level. - // If mincount>0 then we could *potentially* set our sub mincount to 1... - // ...but that would require sorting the buckets (by their val) at the top level - // - // Rather then do that, which could be complicated by non trivial field types, we'll force the sub-shard effectiveMincount - // to be 0, ensuring that we can trivially merge all the buckets from every shard - // (we have to filter the merged buckets by the original mincount either way) - effectiveMincount = fcontext.isShard() ? 0 : freq.mincount; - } - - @Override - public void process() throws IOException { - super.process(); - - if (fcontext.facetInfo != null) { // refinement? - response = refineFacets(); - } else { - // phase#1: build list of all buckets and return full facets... - createRangeList(); - response = getRangeCountsIndexed(); - } - } - - private static class Range { - Object label; - Comparable low; - Comparable high; - boolean includeLower; - boolean includeUpper; - - public Range(Object label, Comparable low, Comparable high, boolean includeLower, boolean includeUpper) { - this.label = label; - this.low = low; - this.high = high; - this.includeLower = includeLower; - this.includeUpper = includeUpper; - } - } - - /** - * Returns a {@link Calc} instance to use for term faceting over a numeric field. - * This method is unused for range faceting, and exists solely as a helper method for other classes - * - * @param sf A field to facet on, must be of a type such that {@link FieldType#getNumberType} is non null - * @return a Calc instance with {@link Calc#bitsToValue} and {@link Calc#bitsToSortableBits} methods suitable for the specified field. - * @see FacetFieldProcessorByHashDV - */ - public static Calc getNumericCalc(SchemaField sf) { - Calc calc; - final FieldType ft = sf.getType(); - - if (ft instanceof TrieField || ft.isPointField()) { - switch (ft.getNumberType()) { - case FLOAT: - calc = new FloatCalc(sf); - break; - case DOUBLE: - calc = new DoubleCalc(sf); - break; - case INTEGER: - calc = new IntCalc(sf); - break; - case LONG: - calc = new LongCalc(sf); - break; - case DATE: - calc = new DateCalc(sf, null); - break; - default: - throw new SolrException - (SolrException.ErrorCode.BAD_REQUEST, - "Expected numeric field type :" + sf); - } - } else { - throw new SolrException - (SolrException.ErrorCode.BAD_REQUEST, - "Expected numeric field type :" + sf); - } - return calc; - } - - /** - * Helper method used in processor constructor - * @return a Calc instance with {@link Calc#bitsToValue} and {@link Calc#bitsToSortableBits} methods suitable for the specified field. - */ - private static Calc getCalcForField(SchemaField sf) { - final FieldType ft = sf.getType(); - if (ft instanceof TrieField || ft.isPointField()) { - switch (ft.getNumberType()) { - case FLOAT: - return new FloatCalc(sf); - case DOUBLE: - return new DoubleCalc(sf); - case INTEGER: - return new IntCalc(sf); - case LONG: - return new LongCalc(sf); - case DATE: - return new DateCalc(sf, null); - default: - throw new SolrException - (SolrException.ErrorCode.BAD_REQUEST, - "Unable to range facet on numeric field of unexpected type:" + sf.getName()); - } - } else if (ft instanceof CurrencyFieldType) { - return new CurrencyCalc(sf); - } else if (ft instanceof DateRangeField) { - return new DateCalc(sf, null); - } - - // if we made it this far, we have no idea what it is... - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Unable to range facet on field:" + sf.getName()); - } - - private void createRangeList() throws IOException { - - rangeList = new ArrayList<>(); - otherList = new ArrayList<>(3); - - Comparable low = start; - Comparable loop_end = this.end; - - if (ranges != null) { - rangeList.addAll(parseRanges(ranges)); - return; - } - - while (low.compareTo(end) < 0) { - Comparable high = calc.addGap(low, gap); - if (end.compareTo(high) < 0) { - if (freq.hardend) { - high = loop_end; - } else { - loop_end = high; - } - } - if (high.compareTo(low) < 0) { - throw new SolrException - (SolrException.ErrorCode.BAD_REQUEST, - "range facet infinite loop (is gap negative? did the math overflow?)"); - } - if (high.compareTo(low) == 0) { - throw new SolrException - (SolrException.ErrorCode.BAD_REQUEST, - "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high); - } - - boolean incLower = (include.contains(FacetRangeInclude.LOWER) || - (include.contains(FacetRangeInclude.EDGE) && 0 == low.compareTo(start))); - boolean incUpper = (include.contains(FacetRangeInclude.UPPER) || - (include.contains(FacetRangeInclude.EDGE) && 0 == high.compareTo(end))); - - Range range = new Range(calc.buildRangeLabel(low), low, high, incLower, incUpper); - rangeList.add( range ); - - low = high; - } - - // no matter what other values are listed, we don't do - // anything if "none" is specified. - if (! freq.others.contains(FacetRangeOther.NONE) ) { - final boolean all = freq.others.contains(FacetRangeOther.ALL); - - if (all || freq.others.contains(FacetRangeOther.BEFORE)) { - otherList.add( buildBeforeRange() ); - } - if (all || freq.others.contains(FacetRangeOther.AFTER)) { - actual_end = loop_end; - otherList.add( buildAfterRange() ); - } - if (all || freq.others.contains(FacetRangeOther.BETWEEN)) { - actual_end = loop_end; - otherList.add( buildBetweenRange() ); - } - } - // if we're not a shard request, or this is a hardend:true situation, then actual_end isn't needed - if (freq.hardend || (! fcontext.isShard())) { - actual_end = null; - } - } - - /** - * Parses the given list of maps and returns list of Ranges - * - * @param input - list of map containing the ranges - * @return list of {@link Range} - */ - private List parseRanges(Object input) { - if (!(input instanceof List)) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Expected List for ranges but got " + input.getClass().getSimpleName() + " = " + input - ); - } - List intervals = (List) input; - List ranges = new ArrayList<>(); - for (Object obj : intervals) { - if (!(obj instanceof Map)) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Expected Map for range but got " + obj.getClass().getSimpleName() + " = " + obj); - } - Range range; - Map interval = (Map) obj; - if (interval.containsKey("range")) { - range = getRangeByOldFormat(interval); - } else { - range = getRangeByNewFormat(interval); - } - ranges.add(range); - } - return ranges; - } - - private boolean getBoolean(Map args, String paramName, boolean defVal) { - Object o = args.get(paramName); - if (o == null) { - return defVal; - } - // TODO: should we be more flexible and accept things like "true" (strings)? - // Perhaps wait until the use case comes up. - if (!(o instanceof Boolean)) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); - } - - return (Boolean)o; - } - - private String getString(Map args, String paramName, boolean required) { - Object o = args.get(paramName); - if (o == null) { - if (required) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Missing required parameter '" + paramName + "' for " + args); - } - return null; - } - if (!(o instanceof String)) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Expected string type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); - } - - return (String)o; - } - - /** - * Parses the range given in format {from:val1, to:val2, inclusive_to:true} - * and returns the {@link Range} - * - * @param rangeMap Map containing the range info - * @return {@link Range} - */ - private Range getRangeByNewFormat(Map rangeMap) { - Object fromObj = rangeMap.get("from"); - Object toObj = rangeMap.get("to"); - - String fromStr = fromObj == null? "*" : fromObj.toString(); - String toStr = toObj == null? "*": toObj.toString(); - boolean includeUpper = getBoolean(rangeMap, "inclusive_to", false); - boolean includeLower = getBoolean(rangeMap, "inclusive_from", true); - - Object key = rangeMap.get("key"); - // if (key == null) { - // key = (includeLower? "[": "(") + fromStr + "," + toStr + (includeUpper? "]": ")"); - // } - // using the default key as custom key won't work with refine - // refine would need both low and high values - key = (includeLower? "[": "(") + fromStr + "," + toStr + (includeUpper? "]": ")"); - - Comparable from = getComparableFromString(fromStr); - Comparable to = getComparableFromString(toStr); - if (from != null && to != null && from.compareTo(to) > 0) { - // allowing from and to be same - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'from' is higher than 'to' in range for key: " + key); - } - - return new Range(key, from, to, includeLower, includeUpper); - } - - /** - * Parses the range string from the map and Returns {@link Range} - * - * @param range map containing the interval - * @return {@link Range} - */ - private Range getRangeByOldFormat(Map range) { - String key = getString(range, "key", false); - String rangeStr = getString(range, "range", true); - try { - return parseRangeFromString(key, rangeStr); - } catch (SyntaxError e) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); - } - } - - /** - * Parses the given string and returns Range. - * This is adopted from {@link org.apache.solr.request.IntervalFacets} - * - * @param key The name of range which would be used as {@link Range}'s label - * @param rangeStr The string containing the Range - * @return {@link Range} - */ - private Range parseRangeFromString(String key, String rangeStr) throws SyntaxError { - rangeStr = rangeStr.trim(); - if (rangeStr.isEmpty()) { - throw new SyntaxError("empty facet range"); - } - - boolean includeLower = true, includeUpper = true; - Comparable start = null, end = null; - if (rangeStr.charAt(0) == '(') { - includeLower = false; - } else if (rangeStr.charAt(0) != '[') { - throw new SyntaxError( "Invalid start character " + rangeStr.charAt(0) + " in facet range " + rangeStr); - } - - final int lastNdx = rangeStr.length() - 1; - if (rangeStr.charAt(lastNdx) == ')') { - includeUpper = false; - } else if (rangeStr.charAt(lastNdx) != ']') { - throw new SyntaxError("Invalid end character " + rangeStr.charAt(lastNdx) + " in facet range " + rangeStr); - } - - StringBuilder startStr = new StringBuilder(lastNdx); - int i = unescape(rangeStr, 1, lastNdx, startStr); - if (i == lastNdx) { - if (rangeStr.charAt(lastNdx - 1) == ',') { - throw new SyntaxError("Empty range limit"); - } - throw new SyntaxError("Missing unescaped comma separating range ends in " + rangeStr); - } - start = getComparableFromString(startStr.toString()); - - StringBuilder endStr = new StringBuilder(lastNdx); - i = unescape(rangeStr, i, lastNdx, endStr); - if (i != lastNdx) { - throw new SyntaxError("Extra unescaped comma at index " + i + " in range " + rangeStr); - } - end = getComparableFromString(endStr.toString()); - - if (start != null && end != null && start.compareTo(end) > 0) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'start' is higher than 'end' in range for key: " + rangeStr); - } - - // not using custom key as it won't work with refine - // refine would need both low and high values - return new Range(rangeStr, start, end, includeLower, includeUpper); - } - - /* Fill in sb with a string from i to the first unescaped comma, or n. - Return the index past the unescaped comma, or n if no unescaped comma exists */ - private int unescape(String s, int i, int n, StringBuilder sb) throws SyntaxError { - for (; i < n; ++i) { - char c = s.charAt(i); - if (c == '\\') { - ++i; - if (i < n) { - c = s.charAt(i); - } else { - throw new SyntaxError("Unfinished escape at index " + i + " in facet range " + s); - } - } else if (c == ',') { - return i + 1; - } - sb.append(c); - } - return n; - } - - private Comparable getComparableFromString(String value) { - value = value.trim(); - if ("*".equals(value)) { - return null; - } - return calc.getValue(value); - } - - private SimpleOrderedMap getRangeCountsIndexed() throws IOException { - - int slotCount = rangeList.size() + otherList.size(); - intersections = new DocSet[slotCount]; - filters = new Query[slotCount]; - - - createAccs(fcontext.base.size(), slotCount); - - for (int idx = 0; idx(); - List buckets = new ArrayList<>(); - res.add("buckets", buckets); - - for (int idx = 0; idx 0 && countAcc.getCount(idx) < effectiveMincount) continue; - Range range = rangeList.get(idx); - SimpleOrderedMap bucket = new SimpleOrderedMap(); - buckets.add(bucket); - bucket.add("val", range.label); - addStats(bucket, idx); - doSubs(bucket, idx); - } - - for (int idx = 0; idx { return new SlotContext(rangeQ); }); - countAcc.incrementCount(slot, num); // TODO: roll this into collect() - } - - private void doSubs(SimpleOrderedMap bucket, int slot) throws IOException { - // handle sub-facets for this bucket - if (freq.getSubFacets().size() > 0) { - DocSet subBase = intersections[slot]; - try { - processSubs(bucket, filters[slot], subBase, false, null); - } finally { - // subContext.base.decref(); // OFF-HEAP - // subContext.base = null; // do not modify context after creation... there may be deferred execution (i.e. streaming) - } - } - } - - // Essentially copied from SimpleFacets... - // would be nice to unify this stuff w/ analytics component... - /** - * Perhaps someday instead of having a giant "instanceof" case - * statement to pick an impl, we can add a "RangeFacetable" marker - * interface to FieldTypes and they can return instances of these - * directly from some method -- but until then, keep this locked down - * and private. - */ - static abstract class Calc { - protected final SchemaField field; - public Calc(final SchemaField field) { - this.field = field; - } - - /** - * Used by {@link FacetFieldProcessorByHashDV} for field faceting on numeric types -- not used for range faceting - */ - public Comparable bitsToValue(long bits) { - return bits; - } - - /** - * Used by {@link FacetFieldProcessorByHashDV} for field faceting on numeric types -- not used for range faceting - */ - public long bitsToSortableBits(long bits) { - return bits; - } - - /** - * Given the low value for a bucket, generates the appropriate "label" object to use. - * By default return the low object unmodified. - */ - public Object buildRangeLabel(Comparable low) { - return low; - } - - /** - * Formats a value into a label used in a response - * Default Impl just uses toString() - */ - public String formatValue(final Comparable val) { - return val.toString(); - } - - /** - * Parses a String param into a value throwing - * an exception if not possible - */ - public final Comparable getValue(final String rawval) { - try { - return parseStr(rawval); - } catch (Exception e) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Can't parse value "+rawval+" for field: " + - field.getName(), e); - } - } - - /** - * Parses a String param into a value. - * Can throw a low level format exception as needed. - */ - protected abstract Comparable parseStr(final String rawval) - throws java.text.ParseException; - - /** - * Parses a String param into a value that represents the gap and - * can be included in the response, throwing - * a useful exception if not possible. - * - * Note: uses Object as the return type instead of T for things like - * Date where gap is just a DateMathParser string - */ - public final Object getGap(final String gap) { - try { - return parseGap(gap); - } catch (Exception e) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Can't parse gap "+gap+" for field: " + - field.getName(), e); - } - } - - /** - * Parses a String param into a value that represents the gap and - * can be included in the response. - * Can throw a low level format exception as needed. - * - * Default Impl calls parseVal - */ - protected Object parseGap(final String rawval) throws java.text.ParseException { - return parseStr(rawval); - } - - /** - * Adds the String gap param to a low Range endpoint value to determine - * the corresponding high Range endpoint value, throwing - * a useful exception if not possible. - */ - public final Comparable addGap(Comparable value, String gap) { - try { - return parseAndAddGap(value, gap); - } catch (Exception e) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Can't add gap "+gap+" to value " + value + - " for field: " + field.getName(), e); - } - } - /** - * Adds the String gap param to a low Range endpoint value to determine - * the corresponding high Range endpoint value. - * Can throw a low level format exception as needed. - */ - protected abstract Comparable parseAndAddGap(Comparable value, String gap) - throws java.text.ParseException; - - } - - private static class FloatCalc extends Calc { - - @Override - public Comparable bitsToValue(long bits) { - if (field.getType().isPointField() && field.multiValued()) { - return NumericUtils.sortableIntToFloat((int)bits); - } else { - return Float.intBitsToFloat( (int)bits ); - } - } - - @Override - public long bitsToSortableBits(long bits) { - return NumericUtils.sortableDoubleBits(bits); - } - - public FloatCalc(final SchemaField f) { super(f); } - @Override - protected Float parseStr(String rawval) { - return Float.valueOf(rawval); - } - @Override - public Float parseAndAddGap(Comparable value, String gap) { - return ((Number) value).floatValue() + Float.parseFloat(gap); - } - } - - private static class DoubleCalc extends Calc { - @Override - public Comparable bitsToValue(long bits) { - if (field.getType().isPointField() && field.multiValued()) { - return NumericUtils.sortableLongToDouble(bits); - } else { - return Double.longBitsToDouble(bits); - } - } - - @Override - public long bitsToSortableBits(long bits) { - return NumericUtils.sortableDoubleBits(bits); - } - - public DoubleCalc(final SchemaField f) { super(f); } - @Override - protected Double parseStr(String rawval) { - return Double.valueOf(rawval); - } - @Override - public Double parseAndAddGap(Comparable value, String gap) { - return ((Number) value).doubleValue() + Double.parseDouble(gap); - } - } - - private static class IntCalc extends Calc { - - public IntCalc(final SchemaField f) { super(f); } - @Override - public Comparable bitsToValue(long bits) { - return (int)bits; - } - @Override - protected Integer parseStr(String rawval) { - return Integer.valueOf(rawval); - } - @Override - public Integer parseAndAddGap(Comparable value, String gap) { - return ((Number) value).intValue() + Integer.parseInt(gap); - } - } - - private static class LongCalc extends Calc { - - public LongCalc(final SchemaField f) { super(f); } - @Override - protected Long parseStr(String rawval) { - return Long.valueOf(rawval); - } - @Override - public Long parseAndAddGap(Comparable value, String gap) { - return ((Number) value).longValue() + Long.parseLong(gap); - } - } - - private static class DateCalc extends Calc { - private final Date now; - public DateCalc(final SchemaField f, - final Date now) { - super(f); - this.now = now; - if (!(field.getType() instanceof TrieDateField || field.getType().isPointField() || - field.getType() instanceof DateRangeField)) { - throw new IllegalArgumentException("SchemaField must use field type extending TrieDateField, DateRangeField or PointField"); - } - } - - @Override - public Comparable bitsToValue(long bits) { - return new Date(bits); - } - - @Override - public String formatValue(Comparable val) { - return ((Date)val).toInstant().toString(); - } - @Override - protected Date parseStr(String rawval) { - return DateMathParser.parseMath(now, rawval); - } - @Override - protected Object parseGap(final String rawval) { - return rawval; - } - @Override - public Date parseAndAddGap(Comparable value, String gap) throws java.text.ParseException { - final DateMathParser dmp = new DateMathParser(); - dmp.setNow((Date)value); - return dmp.parseMath(gap); - } - } - - private static class CurrencyCalc extends Calc { - private String defaultCurrencyCode; - private ExchangeRateProvider exchangeRateProvider; - public CurrencyCalc(final SchemaField field) { - super(field); - if(!(this.field.getType() instanceof CurrencyFieldType)) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Cannot perform range faceting over non CurrencyField fields"); - } - defaultCurrencyCode = - ((CurrencyFieldType)this.field.getType()).getDefaultCurrency(); - exchangeRateProvider = - ((CurrencyFieldType)this.field.getType()).getProvider(); - } - - /** - * Throws a Server Error that this type of operation is not supported for this field - * {@inheritDoc} - */ - @Override - public Comparable bitsToValue(long bits) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Currency Field " + field.getName() + " can not be used in this way"); - } - - /** - * Throws a Server Error that this type of operation is not supported for this field - * {@inheritDoc} - */ - @Override - public long bitsToSortableBits(long bits) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Currency Field " + field.getName() + " can not be used in this way"); - } - - /** - * Returns the short string representation of the CurrencyValue - * @see CurrencyValue#strValue - */ - @Override - public Object buildRangeLabel(Comparable low) { - return ((CurrencyValue)low).strValue(); - } - - @Override - public String formatValue(Comparable val) { - return ((CurrencyValue)val).strValue(); - } - - @Override - protected Comparable parseStr(final String rawval) throws java.text.ParseException { - return CurrencyValue.parse(rawval, defaultCurrencyCode); - } - - @Override - protected Object parseGap(final String rawval) throws java.text.ParseException { - return parseStr(rawval); - } - - @Override - protected Comparable parseAndAddGap(Comparable value, String gap) throws java.text.ParseException{ - if (value == null) { - throw new NullPointerException("Cannot perform range faceting on null CurrencyValue"); - } - CurrencyValue val = (CurrencyValue) value; - CurrencyValue gapCurrencyValue = - CurrencyValue.parse(gap, defaultCurrencyCode); - long gapAmount = - CurrencyValue.convertAmount(this.exchangeRateProvider, - gapCurrencyValue.getCurrencyCode(), - gapCurrencyValue.getAmount(), - val.getCurrencyCode()); - return new CurrencyValue(val.getAmount() + gapAmount, - val.getCurrencyCode()); - - } - - } - - protected SimpleOrderedMap refineFacets() throws IOException { - // this refineFacets method is patterned after FacetFieldProcessor.refineFacets such that - // the same "_s" skip bucket syntax is used and FacetRangeMerger can subclass FacetRequestSortedMerger - // for dealing with them & the refinement requests. - // - // But range faceting does *NOT* use the "leaves" and "partial" syntax - // - // If/When range facet becomes more like field facet in it's ability to sort and limit the "range buckets" - // FacetRangeProcessor and FacetFieldProcessor should probably be refactored to share more code. - - boolean skipThisFacet = (fcontext.flags & SKIP_FACET) != 0; - - List skip = FacetFieldProcessor.asList(fcontext.facetInfo.get("_s")); // We have seen this bucket, so skip stats on it, and skip sub-facets except for the specified sub-facets that should calculate specified buckets. - - // sanity check our merger's super class didn't send us something we can't handle ... - assert 0 == FacetFieldProcessor.asList(fcontext.facetInfo.get("_l")).size(); - assert 0 == FacetFieldProcessor.asList(fcontext.facetInfo.get("_p")).size(); - - SimpleOrderedMap res = new SimpleOrderedMap<>(); - List bucketList = new ArrayList<>( skip.size() ); - res.add("buckets", bucketList); - - // TODO: an alternate implementations can fill all accs at once - createAccs(-1, 1); - - for (List bucketAndFacetInfo : skip) { - assert bucketAndFacetInfo.size() == 2; - Object bucketVal = bucketAndFacetInfo.get(0); - Map facetInfo = (Map) bucketAndFacetInfo.get(1); - - bucketList.add( refineBucket(bucketVal, true, facetInfo ) ); - } - - { // refine the special "other" buckets - - // NOTE: we're re-using this variable for each special we look for... - Map specialFacetInfo; - - specialFacetInfo = (Map) fcontext.facetInfo.get(FacetRangeOther.BEFORE.toString()); - if (null != specialFacetInfo) { - res.add(FacetRangeOther.BEFORE.toString(), - refineRange(buildBeforeRange(), skipThisFacet, specialFacetInfo)); - } - - specialFacetInfo = (Map) fcontext.facetInfo.get(FacetRangeOther.AFTER.toString()); - if (null != specialFacetInfo) { - res.add(FacetRangeOther.AFTER.toString(), - refineRange(buildAfterRange(), skipThisFacet, specialFacetInfo)); - } - - specialFacetInfo = (Map) fcontext.facetInfo.get(FacetRangeOther.BETWEEN.toString()); - if (null != specialFacetInfo) { - res.add(FacetRangeOther.BETWEEN.toString(), - refineRange(buildBetweenRange(), skipThisFacet, specialFacetInfo)); - } - } - - return res; - } - - /** - * Returns the "Actual End" value sent from the merge as part of the refinement request (if any) - * or re-computes it as needed using the Calc and caches the result for re-use - */ - private Comparable getOrComputeActualEndForRefinement() { - if (null != actual_end) { - return actual_end; - } - - if (freq.hardend) { - actual_end = this.end; - } else if (fcontext.facetInfo.containsKey(FacetRange.ACTUAL_END_JSON_KEY)) { - actual_end = calc.getValue(fcontext.facetInfo.get(FacetRange.ACTUAL_END_JSON_KEY).toString()); - } else { - // a quick and dirty loop over the ranges (we don't need) to compute the actual_end... - Comparable low = start; - while (low.compareTo(end) < 0) { - Comparable high = calc.addGap(low, gap); - if (end.compareTo(high) < 0) { - actual_end = high; - break; - } - if (high.compareTo(low) <= 0) { - throw new SolrException - (SolrException.ErrorCode.BAD_REQUEST, - "Garbage input for facet refinement w/o " + FacetRange.ACTUAL_END_JSON_KEY); - } - low = high; - } - } - - assert null != actual_end; - return actual_end; - } - - private SimpleOrderedMap refineBucket(Object bucketVal, boolean skip, Map facetInfo) throws IOException { - - String val = bucketVal.toString(); - if (ranges != null) { - try { - Range range = parseRangeFromString(val, val); - final SimpleOrderedMap bucket = refineRange(range, skip, facetInfo); - bucket.add("val", range.label); - return bucket; - } catch (SyntaxError e) { - // execution won't reach here as ranges are already validated - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); - } - } - - Comparable low = calc.getValue(val); - Comparable high = calc.addGap(low, gap); - Comparable max_end = end; - if (end.compareTo(high) < 0) { - if (freq.hardend) { - high = max_end; - } else { - max_end = high; - } - } - if (high.compareTo(low) < 0) { - throw new SolrException - (SolrException.ErrorCode.BAD_REQUEST, - "range facet infinite loop (is gap negative? did the math overflow?)"); - } - if (high.compareTo(low) == 0) { - throw new SolrException - (SolrException.ErrorCode.BAD_REQUEST, - "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high ); - } - - boolean incLower = (include.contains(FacetRangeInclude.LOWER) || - (include.contains(FacetRangeInclude.EDGE) && 0 == low.compareTo(start))); - boolean incUpper = (include.contains(FacetRangeInclude.UPPER) || - (include.contains(FacetRangeInclude.EDGE) && 0 == high.compareTo(max_end))); - - Range range = new Range(calc.buildRangeLabel(low), low, high, incLower, incUpper); - - // now refine this range - - final SimpleOrderedMap bucket = refineRange(range, skip, facetInfo); - bucket.add("val", range.label); - - return bucket; - } - - /** Helper method for refining a Range - * @see #fillBucket - */ - private SimpleOrderedMap refineRange(Range range, boolean skip, Map facetInfo) throws IOException { - final SimpleOrderedMap bucket = new SimpleOrderedMap<>(); - final Query domainQ = sf.getType().getRangeQuery(null, sf, range.low == null ? null : calc.formatValue(range.low), range.high==null ? null : calc.formatValue(range.high), range.includeLower, range.includeUpper); - fillBucket(bucket, domainQ, null, skip, facetInfo); - return bucket; - } - - /** Helper method for building a "before" Range */ - private Range buildBeforeRange() { - // include upper bound if "outer" or if first gap doesn't already include it - final boolean incUpper = (include.contains(FacetRangeInclude.OUTER) || - (!(include.contains(FacetRangeInclude.LOWER) || - include.contains(FacetRangeInclude.EDGE)))); - return new Range(FacetRangeOther.BEFORE.toString(), null, start, false, incUpper); - } - - /** Helper method for building a "after" Range */ - private Range buildAfterRange() { - final Comparable the_end = getOrComputeActualEndForRefinement(); - assert null != the_end; - final boolean incLower = (include.contains(FacetRangeInclude.OUTER) || - (!(include.contains(FacetRangeInclude.UPPER) || - include.contains(FacetRangeInclude.EDGE)))); - return new Range(FacetRangeOther.AFTER.toString(), the_end, null, incLower, false); - } - - /** Helper method for building a "between" Range */ - private Range buildBetweenRange() { - final Comparable the_end = getOrComputeActualEndForRefinement(); - assert null != the_end; - final boolean incLower = (include.contains(FacetRangeInclude.LOWER) || - include.contains(FacetRangeInclude.EDGE)); - final boolean incUpper = (include.contains(FacetRangeInclude.UPPER) || - include.contains(FacetRangeInclude.EDGE)); - return new Range(FacetRangeOther.BETWEEN.toString(), start, the_end, incLower, incUpper); - } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRangeMerger.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeMerger.java index aa7112e30181..5aaafa1c87c7 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetRangeMerger.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeMerger.java @@ -89,7 +89,7 @@ private Map getRefinementSpecial(Context mcontext, Map bucketList = (List) facetResult.get("buckets"); mergeBucketList(bucketList , mcontext); } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Object getMergedResult() { // TODO: use sortedBuckets SimpleOrderedMap result = new SimpleOrderedMap(4); diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRangeParser.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeParser.java new file mode 100644 index 000000000000..4098450e6e36 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeParser.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.search.facet; + +import org.apache.solr.common.params.FacetParams; +import org.apache.solr.search.SyntaxError; + +import java.util.EnumSet; +import java.util.List; +import java.util.Map; + +class FacetRangeParser extends FacetParser { + @SuppressWarnings({"rawtypes"}) + public FacetRangeParser(FacetParser parent, String key) { + super(parent, key); + facet = new FacetRange(); + } + + public FacetRange parse(Object arg) throws SyntaxError { + parseCommonParams(arg); + + if (!(arg instanceof Map)) { + throw err("Missing range facet arguments"); + } + + @SuppressWarnings({"unchecked"}) + Map m = (Map) arg; + + facet.field = getString(m, "field", null); + facet.ranges = getVal(m, "ranges", false); + + boolean required = facet.ranges == null; + facet.start = getVal(m, "start", required); + facet.end = getVal(m, "end", required); + facet.gap = getVal(m, "gap", required); + facet.hardend = getBoolean(m, "hardend", facet.hardend); + facet.mincount = getLong(m, "mincount", 0); + + // TODO: refactor list-of-options code + + List list = getStringList(m, "include", false); + String[] includeList = null; + if (list != null) { + includeList = list.toArray(new String[list.size()]); + } + facet.include = FacetParams.FacetRangeInclude.parseParam( includeList ); + facet.others = EnumSet.noneOf(FacetParams.FacetRangeOther.class); + + List other = getStringList(m, "other", false); + if (other != null) { + for (String otherStr : other) { + facet.others.add( FacetParams.FacetRangeOther.get(otherStr) ); + } + } + + Object facetObj = m.get("facet"); + parseSubs(facetObj); + + return facet; + } + +} diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRangeProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeProcessor.java new file mode 100644 index 000000000000..7319d284a764 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeProcessor.java @@ -0,0 +1,1077 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.search.facet; + + +import org.apache.lucene.search.Query; +import org.apache.lucene.util.NumericUtils; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.FacetParams; +import org.apache.solr.common.util.SimpleOrderedMap; +import org.apache.solr.schema.*; +import org.apache.solr.search.DocSet; +import org.apache.solr.search.SyntaxError; +import org.apache.solr.util.DateMathParser; + +import java.io.IOException; +import java.util.*; + +import static org.apache.solr.search.facet.FacetContext.SKIP_FACET; + +class FacetRangeProcessor extends FacetProcessor { + // TODO: the code paths for initial faceting, vs refinement, are very different... + // TODO: ...it might make sense to have seperate classes w/a common base? + // TODO: let FacetRange.createFacetProcessor decide which one to instantiate? + + final SchemaField sf; + final Calc calc; + final EnumSet include; + final long effectiveMincount; + @SuppressWarnings({"rawtypes"}) + final Comparable start; + @SuppressWarnings({"rawtypes"}) + final Comparable end; + final String gap; + final Object ranges; + + /** Build by {@link #createRangeList} if and only if needed for basic faceting */ + List rangeList; + /** Build by {@link #createRangeList} if and only if needed for basic faceting */ + List otherList; + + /** + * Serves two purposes depending on the type of request. + *
      + *
    • If this is a phase#1 shard request, then {@link #createRangeList} will set this value (non null) + * if and only if it is needed for refinement (ie: hardend:false & other + * that requires an end value low/high value calculation). And it wil be included in the response
    • + *
    • If this is a phase#2 refinement request, this variable will be used + * {@link #getOrComputeActualEndForRefinement} to track the value sent with the refinement request + * -- or to cache a recomputed value if the request omitted it -- for use in refining the + * other buckets that need them
    • + *
    + */ + @SuppressWarnings({"rawtypes"}) + Comparable actual_end = null; // null until/unless we need it + + FacetRangeProcessor(FacetContext fcontext, FacetRange freq) { + super(fcontext, freq); + include = freq.include; + sf = fcontext.searcher.getSchema().getField(freq.field); + calc = getCalcForField(sf); + if (freq.ranges != null && (freq.start != null || freq.end != null || freq.gap != null)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Cannot set gap/start/end and ranges params together"); + } + if (freq.ranges != null) { + ranges = freq.ranges; + start = null; + end = null; + gap = null; + } else { + start = calc.getValue(freq.start.toString()); + end = calc.getValue(freq.end.toString()); + gap = freq.gap.toString(); + ranges = null; + } + + // Under the normal mincount=0, each shard will need to return 0 counts since we don't calculate buckets at the top level. + // If mincount>0 then we could *potentially* set our sub mincount to 1... + // ...but that would require sorting the buckets (by their val) at the top level + // + // Rather then do that, which could be complicated by non trivial field types, we'll force the sub-shard effectiveMincount + // to be 0, ensuring that we can trivially merge all the buckets from every shard + // (we have to filter the merged buckets by the original mincount either way) + effectiveMincount = fcontext.isShard() ? 0 : freq.mincount; + } + + @Override + @SuppressWarnings({"unchecked"}) + public void process() throws IOException { + super.process(); + + if (fcontext.facetInfo != null) { // refinement? + response = refineFacets(); + } else { + // phase#1: build list of all buckets and return full facets... + createRangeList(); + response = getRangeCountsIndexed(); + } + } + + @SuppressWarnings({"rawtypes"}) + private static class Range { + Object label; + + Comparable low; + Comparable high; + boolean includeLower; + boolean includeUpper; + + public Range(Object label, Comparable low, Comparable high, boolean includeLower, boolean includeUpper) { + this.label = label; + this.low = low; + this.high = high; + this.includeLower = includeLower; + this.includeUpper = includeUpper; + } + } + + /** + * Returns a {@link Calc} instance to use for term faceting over a numeric field. + * This method is unused for range faceting, and exists solely as a helper method for other classes + * + * @param sf A field to facet on, must be of a type such that {@link FieldType#getNumberType} is non null + * @return a Calc instance with {@link Calc#bitsToValue} and {@link Calc#bitsToSortableBits} methods suitable for the specified field. + * @see FacetFieldProcessorByHashDV + */ + public static Calc getNumericCalc(SchemaField sf) { + Calc calc; + final FieldType ft = sf.getType(); + + if (ft instanceof TrieField || ft.isPointField()) { + switch (ft.getNumberType()) { + case FLOAT: + calc = new FloatCalc(sf); + break; + case DOUBLE: + calc = new DoubleCalc(sf); + break; + case INTEGER: + calc = new IntCalc(sf); + break; + case LONG: + calc = new LongCalc(sf); + break; + case DATE: + calc = new DateCalc(sf, null); + break; + default: + throw new SolrException + (SolrException.ErrorCode.BAD_REQUEST, + "Expected numeric field type :" + sf); + } + } else { + throw new SolrException + (SolrException.ErrorCode.BAD_REQUEST, + "Expected numeric field type :" + sf); + } + return calc; + } + + /** + * Helper method used in processor constructor + * @return a Calc instance with {@link Calc#bitsToValue} and {@link Calc#bitsToSortableBits} methods suitable for the specified field. + */ + private static Calc getCalcForField(SchemaField sf) { + final FieldType ft = sf.getType(); + if (ft instanceof TrieField || ft.isPointField()) { + switch (ft.getNumberType()) { + case FLOAT: + return new FloatCalc(sf); + case DOUBLE: + return new DoubleCalc(sf); + case INTEGER: + return new IntCalc(sf); + case LONG: + return new LongCalc(sf); + case DATE: + return new DateCalc(sf, null); + default: + throw new SolrException + (SolrException.ErrorCode.BAD_REQUEST, + "Unable to range facet on numeric field of unexpected type:" + sf.getName()); + } + } else if (ft instanceof CurrencyFieldType) { + return new CurrencyCalc(sf); + } else if (ft instanceof DateRangeField) { + return new DateCalc(sf, null); + } + + // if we made it this far, we have no idea what it is... + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Unable to range facet on field:" + sf.getName()); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private void createRangeList() throws IOException { + + rangeList = new ArrayList<>(); + otherList = new ArrayList<>(3); + + Comparable low = start; + Comparable loop_end = this.end; + + if (ranges != null) { + rangeList.addAll(parseRanges(ranges)); + return; + } + + while (low.compareTo(end) < 0) { + Comparable high = calc.addGap(low, gap); + if (end.compareTo(high) < 0) { + if (freq.hardend) { + high = loop_end; + } else { + loop_end = high; + } + } + if (high.compareTo(low) < 0) { + throw new SolrException + (SolrException.ErrorCode.BAD_REQUEST, + "range facet infinite loop (is gap negative? did the math overflow?)"); + } + if (high.compareTo(low) == 0) { + throw new SolrException + (SolrException.ErrorCode.BAD_REQUEST, + "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high); + } + + boolean incLower = (include.contains(FacetParams.FacetRangeInclude.LOWER) || + (include.contains(FacetParams.FacetRangeInclude.EDGE) && 0 == low.compareTo(start))); + boolean incUpper = (include.contains(FacetParams.FacetRangeInclude.UPPER) || + (include.contains(FacetParams.FacetRangeInclude.EDGE) && 0 == high.compareTo(end))); + + Range range = new Range(calc.buildRangeLabel(low), low, high, incLower, incUpper); + rangeList.add( range ); + + low = high; + } + + // no matter what other values are listed, we don't do + // anything if "none" is specified. + if (! freq.others.contains(FacetParams.FacetRangeOther.NONE) ) { + final boolean all = freq.others.contains(FacetParams.FacetRangeOther.ALL); + + if (all || freq.others.contains(FacetParams.FacetRangeOther.BEFORE)) { + otherList.add( buildBeforeRange() ); + } + if (all || freq.others.contains(FacetParams.FacetRangeOther.AFTER)) { + actual_end = loop_end; + otherList.add( buildAfterRange() ); + } + if (all || freq.others.contains(FacetParams.FacetRangeOther.BETWEEN)) { + actual_end = loop_end; + otherList.add( buildBetweenRange() ); + } + } + // if we're not a shard request, or this is a hardend:true situation, then actual_end isn't needed + if (freq.hardend || (! fcontext.isShard())) { + actual_end = null; + } + } + + /** + * Parses the given list of maps and returns list of Ranges + * + * @param input - list of map containing the ranges + * @return list of {@link Range} + */ + private List parseRanges(Object input) { + if (!(input instanceof List)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Expected List for ranges but got " + input.getClass().getSimpleName() + " = " + input + ); + } + @SuppressWarnings({"rawtypes"}) + List intervals = (List) input; + List ranges = new ArrayList<>(); + for (Object obj : intervals) { + if (!(obj instanceof Map)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Expected Map for range but got " + obj.getClass().getSimpleName() + " = " + obj); + } + @SuppressWarnings({"unchecked"}) + Range range; + @SuppressWarnings({"unchecked"}) + Map interval = (Map) obj; + if (interval.containsKey("range")) { + range = getRangeByOldFormat(interval); + } else { + range = getRangeByNewFormat(interval); + } + ranges.add(range); + } + return ranges; + } + + private boolean getBoolean(Map args, String paramName, boolean defVal) { + Object o = args.get(paramName); + if (o == null) { + return defVal; + } + // TODO: should we be more flexible and accept things like "true" (strings)? + // Perhaps wait until the use case comes up. + if (!(o instanceof Boolean)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); + } + + return (Boolean)o; + } + + private String getString(Map args, String paramName, boolean required) { + Object o = args.get(paramName); + if (o == null) { + if (required) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Missing required parameter '" + paramName + "' for " + args); + } + return null; + } + if (!(o instanceof String)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Expected string type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); + } + + return (String)o; + } + + /** + * Parses the range given in format {from:val1, to:val2, inclusive_to:true} + * and returns the {@link Range} + * + * @param rangeMap Map containing the range info + * @return {@link Range} + */ + @SuppressWarnings({"unchecked", "rawtypes"}) + private Range getRangeByNewFormat(Map rangeMap) { + Object fromObj = rangeMap.get("from"); + Object toObj = rangeMap.get("to"); + + String fromStr = fromObj == null? "*" : fromObj.toString(); + String toStr = toObj == null? "*": toObj.toString(); + boolean includeUpper = getBoolean(rangeMap, "inclusive_to", false); + boolean includeLower = getBoolean(rangeMap, "inclusive_from", true); + + Object key = rangeMap.get("key"); + // if (key == null) { + // key = (includeLower? "[": "(") + fromStr + "," + toStr + (includeUpper? "]": ")"); + // } + // using the default key as custom key won't work with refine + // refine would need both low and high values + key = (includeLower? "[": "(") + fromStr + "," + toStr + (includeUpper? "]": ")"); + + Comparable from = getComparableFromString(fromStr); + Comparable to = getComparableFromString(toStr); + + if (from != null && to != null && from.compareTo(to) > 0) { + // allowing from and to be same + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'from' is higher than 'to' in range for key: " + key); + } + + return new Range(key, from, to, includeLower, includeUpper); + } + + /** + * Parses the range string from the map and Returns {@link Range} + * + * @param range map containing the interval + * @return {@link Range} + */ + private Range getRangeByOldFormat(Map range) { + String key = getString(range, "key", false); + String rangeStr = getString(range, "range", true); + try { + return parseRangeFromString(key, rangeStr); + } catch (SyntaxError e) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); + } + } + + /** + * Parses the given string and returns Range. + * This is adopted from {@link org.apache.solr.request.IntervalFacets} + * + * @param key The name of range which would be used as {@link Range}'s label + * @param rangeStr The string containing the Range + * @return {@link Range} + */ + @SuppressWarnings({"rawtypes", "unchecked"}) + private Range parseRangeFromString(String key, String rangeStr) throws SyntaxError { + rangeStr = rangeStr.trim(); + if (rangeStr.isEmpty()) { + throw new SyntaxError("empty facet range"); + } + + boolean includeLower = true, includeUpper = true; + Comparable start = null, end = null; + if (rangeStr.charAt(0) == '(') { + includeLower = false; + } else if (rangeStr.charAt(0) != '[') { + throw new SyntaxError( "Invalid start character " + rangeStr.charAt(0) + " in facet range " + rangeStr); + } + + final int lastNdx = rangeStr.length() - 1; + if (rangeStr.charAt(lastNdx) == ')') { + includeUpper = false; + } else if (rangeStr.charAt(lastNdx) != ']') { + throw new SyntaxError("Invalid end character " + rangeStr.charAt(lastNdx) + " in facet range " + rangeStr); + } + + StringBuilder startStr = new StringBuilder(lastNdx); + int i = unescape(rangeStr, 1, lastNdx, startStr); + if (i == lastNdx) { + if (rangeStr.charAt(lastNdx - 1) == ',') { + throw new SyntaxError("Empty range limit"); + } + throw new SyntaxError("Missing unescaped comma separating range ends in " + rangeStr); + } + start = getComparableFromString(startStr.toString()); + + StringBuilder endStr = new StringBuilder(lastNdx); + i = unescape(rangeStr, i, lastNdx, endStr); + if (i != lastNdx) { + throw new SyntaxError("Extra unescaped comma at index " + i + " in range " + rangeStr); + } + end = getComparableFromString(endStr.toString()); + + if (start != null && end != null && start.compareTo(end) > 0) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'start' is higher than 'end' in range for key: " + rangeStr); + } + + // not using custom key as it won't work with refine + // refine would need both low and high values + return new Range(rangeStr, start, end, includeLower, includeUpper); + } + + /* Fill in sb with a string from i to the first unescaped comma, or n. + Return the index past the unescaped comma, or n if no unescaped comma exists */ + private int unescape(String s, int i, int n, StringBuilder sb) throws SyntaxError { + for (; i < n; ++i) { + char c = s.charAt(i); + if (c == '\\') { + ++i; + if (i < n) { + c = s.charAt(i); + } else { + throw new SyntaxError("Unfinished escape at index " + i + " in facet range " + s); + } + } else if (c == ',') { + return i + 1; + } + sb.append(c); + } + return n; + } + + @SuppressWarnings({"rawtypes"}) + private Comparable getComparableFromString(String value) { + value = value.trim(); + if ("*".equals(value)) { + return null; + } + return calc.getValue(value); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private SimpleOrderedMap getRangeCountsIndexed() throws IOException { + + int slotCount = rangeList.size() + otherList.size(); + intersections = new DocSet[slotCount]; + filters = new Query[slotCount]; + + + createAccs(fcontext.base.size(), slotCount); + + for (int idx = 0; idx(); + List buckets = new ArrayList<>(); + res.add("buckets", buckets); + + for (int idx = 0; idx 0 && countAcc.getCount(idx) < effectiveMincount) continue; + Range range = rangeList.get(idx); + SimpleOrderedMap bucket = new SimpleOrderedMap(); + buckets.add(bucket); + bucket.add("val", range.label); + addStats(bucket, idx); + doSubs(bucket, idx); + } + + for (int idx = 0; idx { return new SlotAcc.SlotContext(rangeQ); }); + countAcc.incrementCount(slot, num); // TODO: roll this into collect() + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private void doSubs(SimpleOrderedMap bucket, int slot) throws IOException { + // handle sub-facets for this bucket + if (freq.getSubFacets().size() > 0) { + DocSet subBase = intersections[slot]; + try { + processSubs(bucket, filters[slot], subBase, false, null); + } finally { + // subContext.base.decref(); // OFF-HEAP + // subContext.base = null; // do not modify context after creation... there may be deferred execution (i.e. streaming) + } + } + } + + // Essentially copied from SimpleFacets... + // would be nice to unify this stuff w/ analytics component... + /** + * Perhaps someday instead of having a giant "instanceof" case + * statement to pick an impl, we can add a "RangeFacetable" marker + * interface to FieldTypes and they can return instances of these + * directly from some method -- but until then, keep this locked down + * and private. + */ + static abstract class Calc { + protected final SchemaField field; + public Calc(final SchemaField field) { + this.field = field; + } + + /** + * Used by {@link FacetFieldProcessorByHashDV} for field faceting on numeric types -- not used for range faceting + */ + @SuppressWarnings({"rawtypes"}) + public Comparable bitsToValue(long bits) { + return bits; + } + + /** + * Used by {@link FacetFieldProcessorByHashDV} for field faceting on numeric types -- not used for range faceting + */ + public long bitsToSortableBits(long bits) { + return bits; + } + + /** + * Given the low value for a bucket, generates the appropriate "label" object to use. + * By default return the low object unmodified. + */ + public Object buildRangeLabel(@SuppressWarnings("rawtypes") Comparable low) { + return low; + } + + /** + * Formats a value into a label used in a response + * Default Impl just uses toString() + */ + public String formatValue(@SuppressWarnings("rawtypes") final Comparable val) { + return val.toString(); + } + + /** + * Parses a String param into a value throwing + * an exception if not possible + */ + @SuppressWarnings({"rawtypes"}) + public final Comparable getValue(final String rawval) { + try { + return parseStr(rawval); + } catch (Exception e) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Can't parse value "+rawval+" for field: " + + field.getName(), e); + } + } + + /** + * Parses a String param into a value. + * Can throw a low level format exception as needed. + */ + @SuppressWarnings({"rawtypes"}) + protected abstract Comparable parseStr(final String rawval) + throws java.text.ParseException; + + /** + * Parses a String param into a value that represents the gap and + * can be included in the response, throwing + * a useful exception if not possible. + * + * Note: uses Object as the return type instead of T for things like + * Date where gap is just a DateMathParser string + */ + public final Object getGap(final String gap) { + try { + return parseGap(gap); + } catch (Exception e) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Can't parse gap "+gap+" for field: " + + field.getName(), e); + } + } + + /** + * Parses a String param into a value that represents the gap and + * can be included in the response. + * Can throw a low level format exception as needed. + * + * Default Impl calls parseVal + */ + protected Object parseGap(final String rawval) throws java.text.ParseException { + return parseStr(rawval); + } + + /** + * Adds the String gap param to a low Range endpoint value to determine + * the corresponding high Range endpoint value, throwing + * a useful exception if not possible. + */ + @SuppressWarnings({"rawtypes"}) + public final Comparable addGap(Comparable value, String gap) { + try { + return parseAndAddGap(value, gap); + } catch (Exception e) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Can't add gap "+gap+" to value " + value + + " for field: " + field.getName(), e); + } + } + /** + * Adds the String gap param to a low Range endpoint value to determine + * the corresponding high Range endpoint value. + * Can throw a low level format exception as needed. + */ + @SuppressWarnings({"rawtypes"}) + protected abstract Comparable parseAndAddGap(Comparable value, String gap) + throws java.text.ParseException; + + } + + private static class FloatCalc extends Calc { + + @SuppressWarnings("rawtypes") + @Override + public Comparable bitsToValue(long bits) { + if (field.getType().isPointField() && field.multiValued()) { + return NumericUtils.sortableIntToFloat((int)bits); + } else { + return Float.intBitsToFloat( (int)bits ); + } + } + + @Override + public long bitsToSortableBits(long bits) { + return NumericUtils.sortableDoubleBits(bits); + } + + public FloatCalc(final SchemaField f) { super(f); } + @Override + protected Float parseStr(String rawval) { + return Float.valueOf(rawval); + } + @Override + public Float parseAndAddGap(@SuppressWarnings("rawtypes") Comparable value, String gap) { + return ((Number) value).floatValue() + Float.parseFloat(gap); + } + } + + private static class DoubleCalc extends Calc { + @Override + @SuppressWarnings({"rawtypes"}) + public Comparable bitsToValue(long bits) { + if (field.getType().isPointField() && field.multiValued()) { + return NumericUtils.sortableLongToDouble(bits); + } else { + return Double.longBitsToDouble(bits); + } + } + + @Override + public long bitsToSortableBits(long bits) { + return NumericUtils.sortableDoubleBits(bits); + } + + public DoubleCalc(final SchemaField f) { super(f); } + @Override + protected Double parseStr(String rawval) { + return Double.valueOf(rawval); + } + @Override + public Double parseAndAddGap(@SuppressWarnings("rawtypes") Comparable value, String gap) { + return ((Number) value).doubleValue() + Double.parseDouble(gap); + } + } + + private static class IntCalc extends Calc { + + public IntCalc(final SchemaField f) { super(f); } + @Override + @SuppressWarnings({"rawtypes"}) + public Comparable bitsToValue(long bits) { + return (int)bits; + } + @Override + protected Integer parseStr(String rawval) { + return Integer.valueOf(rawval); + } + @Override + public Integer parseAndAddGap(@SuppressWarnings("rawtypes") Comparable value, String gap) { + return ((Number) value).intValue() + Integer.parseInt(gap); + } + } + + private static class LongCalc extends Calc { + + public LongCalc(final SchemaField f) { super(f); } + @Override + protected Long parseStr(String rawval) { + return Long.valueOf(rawval); + } + @Override + public Long parseAndAddGap(@SuppressWarnings("rawtypes") Comparable value, String gap) { + return ((Number) value).longValue() + Long.parseLong(gap); + } + } + + private static class DateCalc extends Calc { + private final Date now; + public DateCalc(final SchemaField f, + final Date now) { + super(f); + this.now = now; + if (!(field.getType() instanceof TrieDateField || field.getType().isPointField() || + field.getType() instanceof DateRangeField)) { + throw new IllegalArgumentException("SchemaField must use field type extending TrieDateField, DateRangeField or PointField"); + } + } + + @Override + @SuppressWarnings({"rawtypes"}) + public Comparable bitsToValue(long bits) { + return new Date(bits); + } + + @Override + public String formatValue(@SuppressWarnings("rawtypes") Comparable val) { + return ((Date)val).toInstant().toString(); + } + @Override + protected Date parseStr(String rawval) { + return DateMathParser.parseMath(now, rawval); + } + @Override + protected Object parseGap(final String rawval) { + return rawval; + } + @Override + public Date parseAndAddGap(@SuppressWarnings("rawtypes") Comparable value, String gap) throws java.text.ParseException { + final DateMathParser dmp = new DateMathParser(); + dmp.setNow((Date)value); + return dmp.parseMath(gap); + } + } + + private static class CurrencyCalc extends Calc { + private String defaultCurrencyCode; + private ExchangeRateProvider exchangeRateProvider; + public CurrencyCalc(final SchemaField field) { + super(field); + if(!(this.field.getType() instanceof CurrencyFieldType)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Cannot perform range faceting over non CurrencyField fields"); + } + defaultCurrencyCode = + ((CurrencyFieldType)this.field.getType()).getDefaultCurrency(); + exchangeRateProvider = + ((CurrencyFieldType)this.field.getType()).getProvider(); + } + + /** + * Throws a Server Error that this type of operation is not supported for this field + * {@inheritDoc} + */ + @Override + @SuppressWarnings({"rawtypes"}) + public Comparable bitsToValue(long bits) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, + "Currency Field " + field.getName() + " can not be used in this way"); + } + + /** + * Throws a Server Error that this type of operation is not supported for this field + * {@inheritDoc} + */ + @Override + public long bitsToSortableBits(long bits) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, + "Currency Field " + field.getName() + " can not be used in this way"); + } + + /** + * Returns the short string representation of the CurrencyValue + * @see CurrencyValue#strValue + */ + @Override + public Object buildRangeLabel(@SuppressWarnings("rawtypes") Comparable low) { + return ((CurrencyValue)low).strValue(); + } + + @Override + public String formatValue(@SuppressWarnings("rawtypes") Comparable val) { + return ((CurrencyValue)val).strValue(); + } + + @Override + @SuppressWarnings({"rawtypes"}) + protected Comparable parseStr(final String rawval) throws java.text.ParseException { + return CurrencyValue.parse(rawval, defaultCurrencyCode); + } + + @Override + protected Object parseGap(final String rawval) throws java.text.ParseException { + return parseStr(rawval); + } + + @Override + @SuppressWarnings({"rawtypes"}) + protected Comparable parseAndAddGap(Comparable value, String gap) throws java.text.ParseException{ + if (value == null) { + throw new NullPointerException("Cannot perform range faceting on null CurrencyValue"); + } + CurrencyValue val = (CurrencyValue) value; + CurrencyValue gapCurrencyValue = + CurrencyValue.parse(gap, defaultCurrencyCode); + long gapAmount = + CurrencyValue.convertAmount(this.exchangeRateProvider, + gapCurrencyValue.getCurrencyCode(), + gapCurrencyValue.getAmount(), + val.getCurrencyCode()); + return new CurrencyValue(val.getAmount() + gapAmount, + val.getCurrencyCode()); + + } + + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + protected SimpleOrderedMap refineFacets() throws IOException { + // this refineFacets method is patterned after FacetFieldProcessor.refineFacets such that + // the same "_s" skip bucket syntax is used and FacetRangeMerger can subclass FacetRequestSortedMerger + // for dealing with them & the refinement requests. + // + // But range faceting does *NOT* use the "leaves" and "partial" syntax + // + // If/When range facet becomes more like field facet in it's ability to sort and limit the "range buckets" + // FacetRangeProcessor and FacetFieldProcessor should probably be refactored to share more code. + + boolean skipThisFacet = (fcontext.flags & SKIP_FACET) != 0; + + List skip = FacetFieldProcessor.asList(fcontext.facetInfo.get("_s")); // We have seen this bucket, so skip stats on it, and skip sub-facets except for the specified sub-facets that should calculate specified buckets. + + // sanity check our merger's super class didn't send us something we can't handle ... + assert 0 == FacetFieldProcessor.asList(fcontext.facetInfo.get("_l")).size(); + assert 0 == FacetFieldProcessor.asList(fcontext.facetInfo.get("_p")).size(); + + SimpleOrderedMap res = new SimpleOrderedMap<>(); + List bucketList = new ArrayList<>( skip.size() ); + res.add("buckets", bucketList); + + // TODO: an alternate implementations can fill all accs at once + createAccs(-1, 1); + + for (List bucketAndFacetInfo : skip) { + assert bucketAndFacetInfo.size() == 2; + Object bucketVal = bucketAndFacetInfo.get(0); + Map facetInfo = (Map) bucketAndFacetInfo.get(1); + + bucketList.add( refineBucket(bucketVal, true, facetInfo ) ); + } + + { // refine the special "other" buckets + + // NOTE: we're re-using this variable for each special we look for... + Map specialFacetInfo; + + specialFacetInfo = (Map) fcontext.facetInfo.get(FacetParams.FacetRangeOther.BEFORE.toString()); + if (null != specialFacetInfo) { + res.add(FacetParams.FacetRangeOther.BEFORE.toString(), + refineRange(buildBeforeRange(), skipThisFacet, specialFacetInfo)); + } + + specialFacetInfo = (Map) fcontext.facetInfo.get(FacetParams.FacetRangeOther.AFTER.toString()); + if (null != specialFacetInfo) { + res.add(FacetParams.FacetRangeOther.AFTER.toString(), + refineRange(buildAfterRange(), skipThisFacet, specialFacetInfo)); + } + + specialFacetInfo = (Map) fcontext.facetInfo.get(FacetParams.FacetRangeOther.BETWEEN.toString()); + if (null != specialFacetInfo) { + res.add(FacetParams.FacetRangeOther.BETWEEN.toString(), + refineRange(buildBetweenRange(), skipThisFacet, specialFacetInfo)); + } + } + + return res; + } + + /** + * Returns the "Actual End" value sent from the merge as part of the refinement request (if any) + * or re-computes it as needed using the Calc and caches the result for re-use + */ + @SuppressWarnings({"rawtypes", "unchecked"}) + private Comparable getOrComputeActualEndForRefinement() { + if (null != actual_end) { + return actual_end; + } + + if (freq.hardend) { + actual_end = this.end; + } else if (fcontext.facetInfo.containsKey(FacetRange.ACTUAL_END_JSON_KEY)) { + actual_end = calc.getValue(fcontext.facetInfo.get(FacetRange.ACTUAL_END_JSON_KEY).toString()); + } else { + // a quick and dirty loop over the ranges (we don't need) to compute the actual_end... + Comparable low = start; + while (low.compareTo(end) < 0) { + Comparable high = calc.addGap(low, gap); + if (end.compareTo(high) < 0) { + actual_end = high; + break; + } + if (high.compareTo(low) <= 0) { + throw new SolrException + (SolrException.ErrorCode.BAD_REQUEST, + "Garbage input for facet refinement w/o " + FacetRange.ACTUAL_END_JSON_KEY); + } + low = high; + } + } + + assert null != actual_end; + return actual_end; + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private SimpleOrderedMap refineBucket(Object bucketVal, boolean skip, Map facetInfo) throws IOException { + + String val = bucketVal.toString(); + if (ranges != null) { + try { + Range range = parseRangeFromString(val, val); + final SimpleOrderedMap bucket = refineRange(range, skip, facetInfo); + bucket.add("val", range.label); + return bucket; + } catch (SyntaxError e) { + // execution won't reach here as ranges are already validated + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); + } + } + + Comparable low = calc.getValue(val); + Comparable high = calc.addGap(low, gap); + Comparable max_end = end; + if (end.compareTo(high) < 0) { + if (freq.hardend) { + high = max_end; + } else { + max_end = high; + } + } + if (high.compareTo(low) < 0) { + throw new SolrException + (SolrException.ErrorCode.BAD_REQUEST, + "range facet infinite loop (is gap negative? did the math overflow?)"); + } + if (high.compareTo(low) == 0) { + throw new SolrException + (SolrException.ErrorCode.BAD_REQUEST, + "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high ); + } + + boolean incLower = (include.contains(FacetParams.FacetRangeInclude.LOWER) || + (include.contains(FacetParams.FacetRangeInclude.EDGE) && 0 == low.compareTo(start))); + boolean incUpper = (include.contains(FacetParams.FacetRangeInclude.UPPER) || + (include.contains(FacetParams.FacetRangeInclude.EDGE) && 0 == high.compareTo(max_end))); + + Range range = new Range(calc.buildRangeLabel(low), low, high, incLower, incUpper); + + // now refine this range + + final SimpleOrderedMap bucket = refineRange(range, skip, facetInfo); + bucket.add("val", range.label); + + return bucket; + } + + /** Helper method for refining a Range + * @see #fillBucket + */ + private SimpleOrderedMap refineRange(Range range, boolean skip, Map facetInfo) throws IOException { + final SimpleOrderedMap bucket = new SimpleOrderedMap<>(); + final Query domainQ = sf.getType().getRangeQuery(null, sf, range.low == null ? null : calc.formatValue(range.low), range.high==null ? null : calc.formatValue(range.high), range.includeLower, range.includeUpper); + fillBucket(bucket, domainQ, null, skip, facetInfo); + return bucket; + } + + /** Helper method for building a "before" Range */ + private Range buildBeforeRange() { + // include upper bound if "outer" or if first gap doesn't already include it + final boolean incUpper = (include.contains(FacetParams.FacetRangeInclude.OUTER) || + (!(include.contains(FacetParams.FacetRangeInclude.LOWER) || + include.contains(FacetParams.FacetRangeInclude.EDGE)))); + return new Range(FacetParams.FacetRangeOther.BEFORE.toString(), null, start, false, incUpper); + } + + /** Helper method for building a "after" Range */ + private Range buildAfterRange() { + @SuppressWarnings({"rawtypes"}) + final Comparable the_end = getOrComputeActualEndForRefinement(); + assert null != the_end; + final boolean incLower = (include.contains(FacetParams.FacetRangeInclude.OUTER) || + (!(include.contains(FacetParams.FacetRangeInclude.UPPER) || + include.contains(FacetParams.FacetRangeInclude.EDGE)))); + return new Range(FacetParams.FacetRangeOther.AFTER.toString(), the_end, null, incLower, false); + } + + /** Helper method for building a "between" Range */ + private Range buildBetweenRange() { + @SuppressWarnings({"rawtypes"}) + final Comparable the_end = getOrComputeActualEndForRefinement(); + assert null != the_end; + final boolean incLower = (include.contains(FacetParams.FacetRangeInclude.LOWER) || + include.contains(FacetParams.FacetRangeInclude.EDGE)); + final boolean incUpper = (include.contains(FacetParams.FacetRangeInclude.UPPER) || + include.contains(FacetParams.FacetRangeInclude.EDGE)); + return new Range(FacetParams.FacetRangeOther.BETWEEN.toString(), start, the_end, incLower, incUpper); + } +} diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java index 6860a943841f..db9d9c9c9cc6 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java @@ -17,35 +17,24 @@ package org.apache.solr.search.facet; import java.io.IOException; -import java.util.ArrayList; -import java.util.EnumSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import org.apache.lucene.search.Query; import org.apache.solr.common.SolrException; -import org.apache.solr.common.params.FacetParams; import org.apache.solr.common.params.SolrParams; -import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.StrUtils; import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.schema.IndexSchema; import org.apache.solr.search.DocSet; -import org.apache.solr.search.FunctionQParser; import org.apache.solr.search.JoinQParserPlugin; -import org.apache.solr.search.QParser; import org.apache.solr.search.QueryContext; import org.apache.solr.search.SolrConstantScoreQuery; -import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SyntaxError; import org.apache.solr.search.join.GraphQuery; import org.apache.solr.search.join.GraphQueryParser; import org.apache.solr.util.RTimer; -import static org.apache.solr.common.params.CommonParams.SORT; import static org.apache.solr.search.facet.FacetRequest.RefineMethod.NONE; /** @@ -203,6 +192,7 @@ public static void createJoinField(FacetRequest.Domain domain, Map join = (Map) queryJoin; if (! (join.containsKey("from") && join.containsKey("to") && null != join.get("from") && null != join.get("to")) ) { @@ -264,6 +254,7 @@ public static void createGraphField(FacetRequest.Domain domain, Map graph = (Map) queryGraph; if (! (graph.containsKey("from") && graph.containsKey("to") && null != graph.get("from") && null != graph.get("to")) ) { @@ -306,7 +297,8 @@ public Query createDomainQuery(FacetContext fcontext) throws IOException { * @param params a typed parameter structure (unlike SolrParams which are all string values). */ public static FacetRequest parse(SolrQueryRequest req, Map params) { - FacetParser parser = new FacetTopParser(req); + @SuppressWarnings({"rawtypes"}) + FacetParser parser = new FacetParser.FacetTopParser(req); try { return parser.parse(params); } catch (SyntaxError syntaxError) { @@ -324,7 +316,8 @@ public static FacetRequest parse(SolrQueryRequest req, Map param * @param params a typed parameter structure (unlike SolrParams which are all string values). */ public static FacetRequest parseOneFacetReq(SolrQueryRequest req, Map params) { - FacetParser parser = new FacetTopParser(req); + @SuppressWarnings("rawtypes") + FacetParser parser = new FacetParser.FacetTopParser(req); try { return (FacetRequest) parser.parseFacetOrStat("", params); } catch (SyntaxError syntaxError) { @@ -409,6 +402,7 @@ public final Object process(SolrQueryRequest req, DocSet domain) throws IOExcept /** Process the request with the facet context settings, a parameter-object. */ final Object process(FacetContext fcontext) throws IOException { + @SuppressWarnings("rawtypes") FacetProcessor facetProcessor = createFacetProcessor(fcontext); FacetDebugInfo debugInfo = fcontext.getDebugInfo(); @@ -432,703 +426,13 @@ final Object process(FacetContext fcontext) throws IOException { return facetProcessor.getResponse(); } + @SuppressWarnings("rawtypes") public abstract FacetProcessor createFacetProcessor(FacetContext fcontext); public abstract FacetMerger createFacetMerger(Object prototype); public abstract Map getFacetDescription(); -} - - -class FacetContext { - // Context info for actually executing a local facet command - public static final int IS_SHARD=0x01; - public static final int IS_REFINEMENT=0x02; - public static final int SKIP_FACET=0x04; // refinement: skip calculating this immediate facet, but proceed to specific sub-facets based on facetInfo - - FacetProcessor processor; - Map facetInfo; // refinement info for this node - QueryContext qcontext; - SolrQueryRequest req; // TODO: replace with params? - SolrIndexSearcher searcher; - Query filter; // TODO: keep track of as a DocSet or as a Query? - DocSet base; - FacetContext parent; - int flags; - FacetDebugInfo debugInfo; - - public void setDebugInfo(FacetDebugInfo debugInfo) { - this.debugInfo = debugInfo; - } - - public FacetDebugInfo getDebugInfo() { - return debugInfo; - } - - public boolean isShard() { - return (flags & IS_SHARD) != 0; - } - - /** - * @param filter The filter for the bucket that resulted in this context/domain. Can be null if this is the root context. - * @param domain The resulting set of documents for this facet. - */ - public FacetContext sub(Query filter, DocSet domain) { - FacetContext ctx = new FacetContext(); - ctx.parent = this; - ctx.base = domain; - ctx.filter = filter; - - // carry over from parent - ctx.flags = flags; - ctx.qcontext = qcontext; - ctx.req = req; - ctx.searcher = searcher; - - return ctx; - } -} - - -abstract class FacetParser { - protected FacetRequestT facet; - protected FacetParser parent; - protected String key; - - public FacetParser(FacetParser parent,String key) { - this.parent = parent; - this.key = key; - } - - public String getKey() { - return key; - } - - public String getPathStr() { - if (parent == null) { - return "/" + key; - } - return parent.getKey() + "/" + key; - } - - protected RuntimeException err(String msg) { - return new SolrException(SolrException.ErrorCode.BAD_REQUEST, msg + " , path="+getPathStr()); - } - - public abstract FacetRequest parse(Object o) throws SyntaxError; - - // TODO: put the FacetRequest on the parser object? - public void parseSubs(Object o) throws SyntaxError { - if (o==null) return; - if (o instanceof Map) { - Map m = (Map) o; - for (Map.Entry entry : m.entrySet()) { - String key = entry.getKey(); - Object value = entry.getValue(); - - if ("processEmpty".equals(key)) { - facet.processEmpty = getBoolean(m, "processEmpty", false); - continue; - } - - // "my_prices" : { "range" : { "field":... - // key="my_prices", value={"range":.. - - Object parsedValue = parseFacetOrStat(key, value); - - // TODO: have parseFacetOrStat directly add instead of return? - if (parsedValue instanceof FacetRequest) { - facet.addSubFacet(key, (FacetRequest)parsedValue); - } else if (parsedValue instanceof AggValueSource) { - facet.addStat(key, (AggValueSource)parsedValue); - } else { - throw err("Unknown facet type key=" + key + " class=" + (parsedValue == null ? "null" : parsedValue.getClass().getName())); - } - } - } else { - // facet : my_field? - throw err("Expected map for facet/stat"); - } - } - - public Object parseFacetOrStat(String key, Object o) throws SyntaxError { - - if (o instanceof String) { - return parseStringFacetOrStat(key, (String)o); - } - - if (!(o instanceof Map)) { - throw err("expected Map but got " + o); - } - - // The type can be in a one element map, or inside the args as the "type" field - // { "query" : "foo:bar" } - // { "range" : { "field":... } } - // { "type" : range, field : myfield, ... } - Map m = (Map)o; - String type; - Object args; - - if (m.size() == 1) { - Map.Entry entry = m.entrySet().iterator().next(); - type = entry.getKey(); - args = entry.getValue(); - // throw err("expected facet/stat type name, like {range:{... but got " + m); - } else { - // type should be inside the map as a parameter - Object typeObj = m.get("type"); - if (!(typeObj instanceof String)) { - throw err("expected facet/stat type name, like {type:range, field:price, ...} but got " + typeObj); - } - type = (String)typeObj; - args = m; - } - - return parseFacetOrStat(key, type, args); - } - - public Object parseFacetOrStat(String key, String type, Object args) throws SyntaxError { - // TODO: a place to register all these facet types? - - switch (type) { - case "field": - case "terms": - return new FacetFieldParser(this, key).parse(args); - case "query": - return new FacetQueryParser(this, key).parse(args); - case "range": - return new FacetRangeParser(this, key).parse(args); - case "heatmap": - return new FacetHeatmap.Parser(this, key).parse(args); - case "func": - return parseStat(key, args); - } - - throw err("Unknown facet or stat. key=" + key + " type=" + type + " args=" + args); - } - - public Object parseStringFacetOrStat(String key, String s) throws SyntaxError { - // "avg(myfield)" - return parseStat(key, s); - // TODO - simple string representation of facets - } - - /** Parses simple strings like "avg(x)" in the context of optional local params (may be null) */ - private AggValueSource parseStatWithParams(String key, SolrParams localparams, String stat) throws SyntaxError { - SolrQueryRequest req = getSolrRequest(); - FunctionQParser parser = new FunctionQParser(stat, localparams, req.getParams(), req); - AggValueSource agg = parser.parseAgg(FunctionQParser.FLAG_DEFAULT); - return agg; - } - - /** Parses simple strings like "avg(x)" or robust Maps that may contain local params */ - private AggValueSource parseStat(String key, Object args) throws SyntaxError { - assert null != args; - if (args instanceof CharSequence) { - // Both of these variants are already unpacked for us in this case, and use no local params... - // 1) x:{func:'min(foo)'} - // 2) x:'min(foo)' - return parseStatWithParams(key, null, args.toString()); - } - - if (args instanceof Map) { - final Map statMap = (Map)args; - return parseStatWithParams(key, jsonToSolrParams(statMap), statMap.get("func").toString()); - } - - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Stats must be specified as either a simple string, or a json Map"); - - } - - - private FacetRequest.Domain getDomain() { - if (facet.domain == null) { - facet.domain = new FacetRequest.Domain(); - } - return facet.domain; - } - - protected void parseCommonParams(Object o) { - if (o instanceof Map) { - Map m = (Map)o; - List excludeTags = getStringList(m, "excludeTags"); - if (excludeTags != null) { - getDomain().excludeTags = excludeTags; - } - - Object domainObj = m.get("domain"); - if (domainObj instanceof Map) { - Map domainMap = (Map)domainObj; - FacetRequest.Domain domain = getDomain(); - - excludeTags = getStringList(domainMap, "excludeTags"); - if (excludeTags != null) { - domain.excludeTags = excludeTags; - } - - if (domainMap.containsKey("query")) { - domain.explicitQueries = parseJSONQueryStruct(domainMap.get("query")); - if (null == domain.explicitQueries) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "'query' domain can not be null or empty"); - } else if (null != domain.excludeTags) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "'query' domain can not be combined with 'excludeTags'"); - } - } - - String blockParent = getString(domainMap, "blockParent", null); - String blockChildren = getString(domainMap, "blockChildren", null); - - if (blockParent != null) { - domain.toParent = true; - domain.parents = blockParent; - } else if (blockChildren != null) { - domain.toChildren = true; - domain.parents = blockChildren; - } - - FacetRequest.Domain.JoinField.createJoinField(domain, domainMap); - FacetRequest.Domain.GraphField.createGraphField(domain, domainMap); - - Object filterOrList = domainMap.get("filter"); - if (filterOrList != null) { - assert domain.filters == null; - domain.filters = parseJSONQueryStruct(filterOrList); - } - - } else if (domainObj != null) { - throw err("Expected Map for 'domain', received " + domainObj.getClass().getSimpleName() + "=" + domainObj); - } - } - } - - /** returns null on null input, otherwise returns a list of the JSON query structures -- either - * directly from the raw (list) input, or if raw input is a not a list then it encapsulates - * it in a new list. - */ - private List parseJSONQueryStruct(Object raw) { - List result = null; - if (null == raw) { - return result; - } else if (raw instanceof List) { - result = (List) raw; - } else { - result = new ArrayList<>(1); - result.add(raw); - } - return result; - } - - public String getField(Map args) { - Object fieldName = args.get("field"); // TODO: pull out into defined constant - if (fieldName == null) { - fieldName = args.get("f"); // short form - } - if (fieldName == null) { - throw err("Missing 'field'"); - } - - if (!(fieldName instanceof String)) { - throw err("Expected string for 'field', got" + fieldName); - } - - return (String)fieldName; - } - - - public Long getLongOrNull(Map args, String paramName, boolean required) { - Object o = args.get(paramName); - if (o == null) { - if (required) { - throw err("Missing required parameter '" + paramName + "'"); - } - return null; - } - if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) { - throw err("Expected integer type for param '"+paramName + "' but got " + o); - } - - return ((Number)o).longValue(); - } - - public long getLong(Map args, String paramName, long defVal) { - Object o = args.get(paramName); - if (o == null) { - return defVal; - } - if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) { - throw err("Expected integer type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); - } - - return ((Number)o).longValue(); - } - - public Double getDoubleOrNull(Map args, String paramName, boolean required) { - Object o = args.get(paramName); - if (o == null) { - if (required) { - throw err("Missing required parameter '" + paramName + "'"); - } - return null; - } - if (!(o instanceof Number)) { - throw err("Expected double type for param '" + paramName + "' but got " + o); - } - - return ((Number)o).doubleValue(); - } - - public boolean getBoolean(Map args, String paramName, boolean defVal) { - Object o = args.get(paramName); - if (o == null) { - return defVal; - } - // TODO: should we be more flexible and accept things like "true" (strings)? - // Perhaps wait until the use case comes up. - if (!(o instanceof Boolean)) { - throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); - } - - return (Boolean)o; - } - - public Boolean getBooleanOrNull(Map args, String paramName) { - Object o = args.get(paramName); - - if (o != null && !(o instanceof Boolean)) { - throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); - } - return (Boolean) o; - } - - - public String getString(Map args, String paramName, String defVal) { - Object o = args.get(paramName); - if (o == null) { - return defVal; - } - if (!(o instanceof String)) { - throw err("Expected string type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); - } - - return (String)o; - } - - public Object getVal(Map args, String paramName, boolean required) { - Object o = args.get(paramName); - if (o == null && required) { - throw err("Missing required parameter: '" + paramName + "'"); - } - return o; - } - - public List getStringList(Map args, String paramName) { - return getStringList(args, paramName, true); - } - - public List getStringList(Map args, String paramName, boolean decode) { - Object o = args.get(paramName); - if (o == null) { - return null; - } - if (o instanceof List) { - return (List)o; - } - if (o instanceof String) { - // TODO: SOLR-12539 handle spaces in b/w comma & value ie, should the values be trimmed before returning?? - return StrUtils.splitSmart((String)o, ",", decode); - } - - throw err("Expected list of string or comma separated string values for '" + paramName + - "', received " + o.getClass().getSimpleName() + "=" + o); - } - - public IndexSchema getSchema() { - return parent.getSchema(); - } - - public SolrQueryRequest getSolrRequest() { - return parent.getSolrRequest(); - } - - /** - * Helper that handles the possibility of map values being lists - * NOTE: does *NOT* fail on map values that are sub-maps (ie: nested json objects) - */ - public static SolrParams jsonToSolrParams(Map jsonObject) { - // HACK, but NamedList already handles the list processing for us... - NamedList nl = new NamedList<>(); - nl.addAll(jsonObject); - return SolrParams.toSolrParams(nl); - } -} - - -class FacetTopParser extends FacetParser { - private SolrQueryRequest req; - - public FacetTopParser(SolrQueryRequest req) { - super(null, "facet"); - this.facet = new FacetQuery(); - this.req = req; - } - - @Override - public FacetQuery parse(Object args) throws SyntaxError { - parseSubs(args); - return facet; - } - - @Override - public SolrQueryRequest getSolrRequest() { - return req; - } - - @Override - public IndexSchema getSchema() { - return req.getSchema(); - } -} - -class FacetQueryParser extends FacetParser { - public FacetQueryParser(FacetParser parent, String key) { - super(parent, key); - facet = new FacetQuery(); - } - - @Override - public FacetQuery parse(Object arg) throws SyntaxError { - parseCommonParams(arg); - - String qstring = null; - if (arg instanceof String) { - // just the field name... - qstring = (String)arg; - - } else if (arg instanceof Map) { - Map m = (Map) arg; - qstring = getString(m, "q", null); - if (qstring == null) { - qstring = getString(m, "query", null); - } - - // OK to parse subs before we have parsed our own query? - // as long as subs don't need to know about it. - parseSubs( m.get("facet") ); - } else if (arg != null) { - // something lke json.facet.facet.query=2 - throw err("Expected string/map for facet query, received " + arg.getClass().getSimpleName() + "=" + arg); - } - - // TODO: substats that are from defaults!!! - - if (qstring != null) { - QParser parser = QParser.getParser(qstring, getSolrRequest()); - parser.setIsFilter(true); - facet.q = parser.getQuery(); - } - - return facet; - } -} - -/*** not a separate type of parser for now... -class FacetBlockParentParser extends FacetParser { - public FacetBlockParentParser(FacetParser parent, String key) { - super(parent, key); - facet = new FacetBlockParent(); - } - - @Override - public FacetBlockParent parse(Object arg) throws SyntaxError { - parseCommonParams(arg); - - if (arg instanceof String) { - // just the field name... - facet.parents = (String)arg; - - } else if (arg instanceof Map) { - Map m = (Map) arg; - facet.parents = getString(m, "parents", null); - - parseSubs( m.get("facet") ); - } - - return facet; - } -} -***/ - - -class FacetFieldParser extends FacetParser { - public FacetFieldParser(FacetParser parent, String key) { - super(parent, key); - facet = new FacetField(); - } - - public FacetField parse(Object arg) throws SyntaxError { - parseCommonParams(arg); - if (arg instanceof String) { - // just the field name... - facet.field = (String)arg; - - } else if (arg instanceof Map) { - Map m = (Map) arg; - facet.field = getField(m); - facet.offset = getLong(m, "offset", facet.offset); - facet.limit = getLong(m, "limit", facet.limit); - facet.overrequest = (int) getLong(m, "overrequest", facet.overrequest); - facet.overrefine = (int) getLong(m, "overrefine", facet.overrefine); - if (facet.limit == 0) facet.offset = 0; // normalize. an offset with a limit of non-zero isn't useful. - facet.mincount = getLong(m, "mincount", facet.mincount); - facet.missing = getBoolean(m, "missing", facet.missing); - facet.numBuckets = getBoolean(m, "numBuckets", facet.numBuckets); - facet.prefix = getString(m, "prefix", facet.prefix); - facet.allBuckets = getBoolean(m, "allBuckets", facet.allBuckets); - facet.method = FacetField.FacetMethod.fromString(getString(m, "method", null)); - facet.cacheDf = (int)getLong(m, "cacheDf", facet.cacheDf); - - // TODO: pull up to higher level? - facet.refine = FacetField.RefineMethod.fromObj(m.get("refine")); - - facet.perSeg = getBooleanOrNull(m, "perSeg"); - - // facet.sort may depend on a facet stat... - // should we be parsing / validating this here, or in the execution environment? - Object o = m.get("facet"); - parseSubs(o); - - facet.sort = parseAndValidateSort(facet, m, SORT); - facet.prelim_sort = parseAndValidateSort(facet, m, "prelim_sort"); - } else if (arg != null) { - // something like json.facet.facet.field=2 - throw err("Expected string/map for facet field, received " + arg.getClass().getSimpleName() + "=" + arg); - } - - if (null == facet.sort) { - facet.sort = FacetRequest.FacetSort.COUNT_DESC; - } - - return facet; - } - - /** - * Parses, validates and returns the {@link FacetRequest.FacetSort} for given sortParam - * and facet field - *

    - * Currently, supported sort specifications are 'mystat desc' OR {mystat: 'desc'} - * index - This is equivalent to 'index asc' - * count - This is equivalent to 'count desc' - *

    - * - * @param facet {@link FacetField} for which sort needs to be parsed and validated - * @param args map containing the sortVal for given sortParam - * @param sortParam parameter for which sort needs to parsed and validated - * @return parsed facet sort - */ - private static FacetRequest.FacetSort parseAndValidateSort(FacetField facet, Map args, String sortParam) { - Object sort = args.get(sortParam); - if (sort == null) { - return null; - } - - FacetRequest.FacetSort facetSort = null; - - if (sort instanceof String) { - String sortStr = (String)sort; - if (sortStr.endsWith(" asc")) { - facetSort = new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" asc".length()), - FacetRequest.SortDirection.asc); - } else if (sortStr.endsWith(" desc")) { - facetSort = new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" desc".length()), - FacetRequest.SortDirection.desc); - } else { - facetSort = new FacetRequest.FacetSort(sortStr, - // default direction for "index" is ascending - ("index".equals(sortStr) - ? FacetRequest.SortDirection.asc - : FacetRequest.SortDirection.desc)); - } - } else if (sort instanceof Map) { - // { myvar : 'desc' } - Optional> optional = ((Map)sort).entrySet().stream().findFirst(); - if (optional.isPresent()) { - Map.Entry entry = optional.get(); - facetSort = new FacetRequest.FacetSort(entry.getKey(), FacetRequest.SortDirection.fromObj(entry.getValue())); - } - } else { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Expected string/map for '" + sortParam +"', received "+ sort.getClass().getSimpleName() + "=" + sort); - } - - Map facetStats = facet.facetStats; - // validate facet sort - boolean isValidSort = facetSort == null || - "index".equals(facetSort.sortVariable) || - "count".equals(facetSort.sortVariable) || - (facetStats != null && facetStats.containsKey(facetSort.sortVariable)); - - if (!isValidSort) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Invalid " + sortParam + " option '" + sort + "' for field '" + facet.field + "'"); - } - return facetSort; - } - -} - - - -class FacetRangeParser extends FacetParser { - public FacetRangeParser(FacetParser parent, String key) { - super(parent, key); - facet = new FacetRange(); - } - - public FacetRange parse(Object arg) throws SyntaxError { - parseCommonParams(arg); - - if (!(arg instanceof Map)) { - throw err("Missing range facet arguments"); - } - - Map m = (Map) arg; - - facet.field = getString(m, "field", null); - facet.ranges = getVal(m, "ranges", false); - - boolean required = facet.ranges == null; - facet.start = getVal(m, "start", required); - facet.end = getVal(m, "end", required); - facet.gap = getVal(m, "gap", required); - facet.hardend = getBoolean(m, "hardend", facet.hardend); - facet.mincount = getLong(m, "mincount", 0); - - // TODO: refactor list-of-options code - - List list = getStringList(m, "include", false); - String[] includeList = null; - if (list != null) { - includeList = list.toArray(new String[list.size()]); - } - facet.include = FacetParams.FacetRangeInclude.parseParam( includeList ); - facet.others = EnumSet.noneOf(FacetParams.FacetRangeOther.class); - - List other = getStringList(m, "other", false); - if (other != null) { - for (String otherStr : other) { - facet.others.add( FacetParams.FacetRangeOther.get(otherStr) ); - } - } - - Object facetObj = m.get("facet"); - parseSubs(facetObj); - - return facet; - } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSorted.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSorted.java new file mode 100644 index 000000000000..ee67fcaac7d3 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSorted.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.search.facet; + +// Any type of facet request that generates a variable number of buckets +// and the ability to sort by those generated buckets. +abstract class FacetRequestSorted extends FacetRequest { + long offset; + long limit; + /** + * Number of buckets to request beyond the limit to do internally during initial distributed search. + * -1 means default heuristic. + */ + int overrequest = -1; + /** + * Number of buckets to fill in beyond the limit to do internally during refinement of distributed search. + * -1 means default heuristic. + */ + int overrefine = -1; + long mincount; + /** + * The basic sorting to do on buckets, defaults to {@link FacetRequest.FacetSort#COUNT_DESC} + * @see #prelim_sort + */ + FacetSort sort; + /** + * An optional "Pre-Sort" that defaults to null. + * If specified, then the prelim_sort is used as an optimization in place of {@link #sort} + * during collection, and the full {@link #sort} values are only computed for the top candidate buckets + * (after refinement) + */ + FacetSort prelim_sort; + RefineMethod refine; // null, NONE, or SIMPLE + + @Override + public RefineMethod getRefineMethod() { + return refine; + } + + @Override + public boolean returnsPartial() { + return super.returnsPartial() || (limit > 0); + } +} diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java index c1e5631cc86a..cdaa5f20fdf5 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java @@ -31,7 +31,7 @@ import org.apache.solr.common.util.SimpleOrderedMap; // base class for facets that create a list of buckets that can be sorted -abstract class FacetRequestSortedMerger extends FacetBucketMerger { +abstract class FacetRequestSortedMerger extends FacetModule.FacetBucketMerger { LinkedHashMap buckets = new LinkedHashMap<>(); List sortedBuckets; BitSet shardHasMoreBuckets; // null, or "true" if we saw a result from this shard and it indicated that there are more results @@ -44,6 +44,7 @@ public FacetRequestSortedMerger(FacetRequestT freq) { @Override public void merge(Object facetResult, Context mcontext) { this.mcontext = mcontext; + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap res = (SimpleOrderedMap)facetResult; Boolean more = (Boolean)res.get("more"); if (more != null && more) { @@ -57,16 +58,18 @@ public void merge(Object facetResult, Context mcontext) { private static class SortVal implements Comparable { FacetBucket bucket; - FacetSortableMerger merger; // make this class inner and access merger , direction in parent? + FacetModule.FacetSortableMerger merger; // make this class inner and access merger , direction in parent? FacetRequest.SortDirection direction; @Override + @SuppressWarnings({"unchecked"}) public int compareTo(SortVal o) { int c = -merger.compareTo(o.merger, direction) * direction.getMultiplier(); return c == 0 ? bucket.bucketValue.compareTo(o.bucket.bucketValue) : c; } } + @SuppressWarnings({"unchecked", "rawtypes"}) public void mergeBucketList(List bucketList, Context mcontext) { for (SimpleOrderedMap bucketRes : bucketList) { Comparable bucketVal = (Comparable)bucketRes.get("val"); @@ -80,6 +83,7 @@ public void mergeBucketList(List bucketList, Context mcontext) } + @SuppressWarnings({"unchecked", "rawtypes"}) public void sortBuckets(final FacetRequest.FacetSort sort) { // NOTE: we *always* re-init from buckets, because it may have been modified post-refinement sortedBuckets = new ArrayList<>( buckets.values() ); @@ -140,7 +144,7 @@ public int compare(int a, int b) { if (merger != null) { SortVal sv = new SortVal(); sv.bucket = bucket; - sv.merger = (FacetSortableMerger)merger; + sv.merger = (FacetModule.FacetSortableMerger)merger; sv.direction = direction; // sv.pos = i; // if we need position in the future... lst.add(sv); diff --git a/solr/core/src/java/org/apache/solr/search/facet/HLLAgg.java b/solr/core/src/java/org/apache/solr/search/facet/HLLAgg.java index ff7b2a2a78f8..665bdd95604e 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/HLLAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/HLLAgg.java @@ -75,7 +75,7 @@ public FacetMerger createFacetMerger(Object prototype) { return new Merger(); } - private static class Merger extends FacetSortableMerger { + private static class Merger extends FacetModule.FacetSortableMerger { HLL aggregate = null; long answer = -1; // -1 means unset @@ -86,6 +86,9 @@ public void merge(Object facetResult, Context mcontext) { return; } + + + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap map = (SimpleOrderedMap)facetResult; byte[] serialized = ((byte[])map.get("hll")); HLL subHLL = HLL.fromBytes(serialized); @@ -109,7 +112,7 @@ public Object getMergedResult() { } @Override - public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) { + public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) { return Long.compare( getLong(), ((Merger)other).getLong() ); } } @@ -160,6 +163,7 @@ private long getCardinality(int slot) { return set == null ? 0 : set.cardinality(); } + @SuppressWarnings({"unchecked", "rawtypes"}) public Object getShardValue(int slot) throws IOException { HLL hll = sets[slot]; if (hll == null) return NO_VALUES; diff --git a/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java b/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java index 7c2bde469487..fcba43c89fd3 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java +++ b/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java @@ -270,6 +270,7 @@ private void addSub(String key, Map sub) { getCurrentSubs().put(key, sub); } + @SuppressWarnings({"unchecked", "rawtypes"}) private Map getCurrentSubs() { if (currentSubs == null) { currentSubs = new LinkedHashMap(); diff --git a/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java b/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java index d036ee1c3622..828fbe60623d 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java @@ -116,7 +116,7 @@ else if (prototype instanceof Comparable) { } // TODO: can this be replaced by ComparableMerger? - private class NumericMerger extends FacetDoubleMerger { + private class NumericMerger extends FacetModule.FacetDoubleMerger { double val = Double.NaN; @Override @@ -133,9 +133,11 @@ protected double getDouble() { } } - private class ComparableMerger extends FacetSortableMerger { + private class ComparableMerger extends FacetModule.FacetSortableMerger { + @SuppressWarnings("rawtypes") Comparable val; @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void merge(Object facetResult, Context mcontext) { Comparable other = (Comparable)facetResult; if (val == null) { @@ -153,7 +155,8 @@ public Object getMergedResult() { } @Override - public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) { + @SuppressWarnings({"unchecked"}) + public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) { // NOTE: we don't use the minmax multiplier here because we still want natural ordering between slots (i.e. min(field) asc and max(field) asc) both sort "A" before "Z") return this.val.compareTo(((ComparableMerger)other).val); } @@ -221,7 +224,7 @@ public void reset() throws IOException { @Override public void resize(Resizer resizer) { - resizer.resize(result, MISSING); + this.result = resizer.resize(result, MISSING); } @Override @@ -233,7 +236,7 @@ public void call(int termNum) { } } - class DFuncAcc extends DoubleFuncSlotAcc { + class DFuncAcc extends SlotAcc.DoubleFuncSlotAcc { public DFuncAcc(ValueSource values, FacetContext fcontext, int numSlots) { super(values, fcontext, numSlots, Double.NaN); } @@ -260,7 +263,7 @@ public Object getValue(int slot) { } } - class LFuncAcc extends LongFuncSlotAcc { + class LFuncAcc extends SlotAcc.LongFuncSlotAcc { FixedBitSet exists; public LFuncAcc(ValueSource values, FacetContext fcontext, int numSlots) { super(values, fcontext, numSlots, 0); @@ -320,7 +323,7 @@ public void reset() { } - class DateFuncAcc extends LongFuncSlotAcc { + class DateFuncAcc extends SlotAcc.LongFuncSlotAcc { private static final long MISSING = Long.MIN_VALUE; public DateFuncAcc(ValueSource values, FacetContext fcontext, int numSlots) { super(values, fcontext, numSlots, MISSING); @@ -504,7 +507,7 @@ public void reset() throws IOException { @Override public void resize(Resizer resizer) { - resizer.resize(slotOrd, MISSING); + this.slotOrd = resizer.resize(slotOrd, MISSING); } @Override diff --git a/solr/core/src/java/org/apache/solr/search/facet/MissingAgg.java b/solr/core/src/java/org/apache/solr/search/facet/MissingAgg.java index 4ff980f0ea92..b1b630ac4add 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/MissingAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/MissingAgg.java @@ -61,10 +61,10 @@ public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) @Override public FacetMerger createFacetMerger(Object prototype) { - return new FacetLongMerger(); + return new FacetModule.FacetLongMerger(); } - class MissingSlotAcc extends LongFuncSlotAcc { + class MissingSlotAcc extends SlotAcc.LongFuncSlotAcc { public MissingSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { super(values, fcontext, numSlots, 0); diff --git a/solr/core/src/java/org/apache/solr/search/facet/PercentileAgg.java b/solr/core/src/java/org/apache/solr/search/facet/PercentileAgg.java index 956b179b2ddc..b645b7f5000f 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/PercentileAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/PercentileAgg.java @@ -132,7 +132,7 @@ protected Object getValueFromDigest(AVLTreeDigest digest) { return lst; } - class Acc extends FuncSlotAcc { + class Acc extends SlotAcc.FuncSlotAcc { protected AVLTreeDigest[] digests; protected ByteBuffer buf; protected double[] sortvals; @@ -465,7 +465,7 @@ public void call(int ord) { } } - class Merger extends FacetSortableMerger { + class Merger extends FacetModule.FacetSortableMerger { protected AVLTreeDigest digest; protected Double sortVal; @@ -488,7 +488,7 @@ public Object getMergedResult() { } @Override - public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) { + public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) { return Double.compare(getSortVal(), ((Merger) other).getSortVal()); } diff --git a/solr/core/src/java/org/apache/solr/search/facet/RelatednessAgg.java b/solr/core/src/java/org/apache/solr/search/facet/RelatednessAgg.java index 81d0f3b9e82e..10146db4dd9a 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/RelatednessAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/RelatednessAgg.java @@ -22,6 +22,7 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.Map; import java.util.function.IntFunction; @@ -36,7 +37,6 @@ import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.search.DocSet; import org.apache.solr.search.QParser; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -117,7 +117,7 @@ public int hashCode() { } @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { + public FunctionValues getValues(@SuppressWarnings("rawtypes") Map context, LeafReaderContext readerContext) throws IOException { throw new UnsupportedOperationException("NOT IMPLEMENTED " + name + " " + this); } @@ -163,6 +163,8 @@ public FacetMerger createFacetMerger(Object prototype) { return new Merger(this); } + private static final String IMPLIED_KEY = "implied"; + private static final class SKGSlotAcc extends SlotAcc { private final RelatednessAgg agg; private BucketData[] slotvalues; @@ -186,7 +188,22 @@ private void processSlot(int slot, IntFunction slotContext) throws assert null != slotContext; - Query slotQ = slotContext.apply(slot).getSlotQuery(); + final BucketData slotVal = new BucketData(agg); + slotvalues[slot] = slotVal; + + final SlotContext ctx = slotContext.apply(slot); + if (ctx.isAllBuckets()) { + // relatedness is meaningless for allBuckets (see SOLR-14467) + // our current (implied & empty) BucketData is all we need + // + // NOTE: it might be temping to use 'slotvalues[slot] = null' in this case + // since getValue() will also ultimately generate an implied bucket in that case, + // but by using a non-null bucket we let collect(int,...) know it doesn't need to keep calling + // processSlot over and over. + return; + } + + Query slotQ = ctx.getSlotQuery(); if (null == slotQ) { // extremeley special edge case... // the only way this should be possible is if our relatedness() function is used as a "top level" @@ -198,11 +215,9 @@ private void processSlot(int slot, IntFunction slotContext) throws // ...and in which case we should just use the current base final DocSet slotSet = null == slotQ ? fcontext.base : fcontext.searcher.getDocSet(slotQ); - final BucketData slotVal = new BucketData(agg); slotVal.incSizes(fgSize, bgSize); slotVal.incCounts(fgSet.intersectionSize(slotSet), bgSet.intersectionSize(slotSet)); - slotvalues[slot] = slotVal; } @Override @@ -247,12 +262,13 @@ public int compare(int slotA, int slotB) { public Object getValue(int slotNum) { BucketData slotVal = slotvalues[slotNum]; if (null == slotVal) { - // since we haven't been told about any docs for this slot, use a slot w/no counts, + // since we haven't collected any docs for this slot, use am (implied) slot w/no counts, // just the known fg/bg sizes. (this is most likely a refinement request for a bucket we dont have) slotVal = new BucketData(agg); slotVal.incSizes(fgSize, bgSize); } + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap res = slotVal.externalize(fcontext.isShard()); return res; } @@ -279,12 +295,22 @@ public void close() throws IOException { * @see SKGSlotAcc * @see Merger */ - private static final class BucketData implements Comparable { + private static class BucketData implements Comparable { private RelatednessAgg agg; private long fg_size = 0; private long bg_size = 0; private long fg_count = 0; private long bg_count = 0; + + /** + * Buckets are implied until/unless counts are explicitly incremented (even if those counts are 0) + * An implied bucket means we have no real data for it -- it may be useful for a per-Shard request + * to return "size" info of a bucket that doesn't exist on the current shard, or it may represent + * the allBuckets bucket. + * + * @see #incCounts + */ + private boolean implied; /** * NaN indicates that all derived values need (re)-computed @@ -305,6 +331,7 @@ private static final class BucketData implements Comparable { public BucketData(final RelatednessAgg agg) { this.agg = agg; + this.implied = true; } /** @@ -312,9 +339,10 @@ public BucketData(final RelatednessAgg agg) { * derived values that may be cached */ public void incCounts(final long fgInc, final long bgInc) { - this.relatedness = Double.NaN; - fg_count += fgInc; - bg_count += bgInc; + this.implied = false; + this.relatedness = Double.NaN; + fg_count += fgInc; + bg_count += bgInc; } /** * Increment both the foreground & background sizes for the current bucket, reseting any @@ -328,7 +356,7 @@ public void incSizes(final long fgInc, final long bgInc) { @Override public int hashCode() { - return Objects.hash(this.getClass(), fg_count, bg_count, fg_size, bg_size, agg); + return Objects.hash(this.getClass(), implied, fg_count, bg_count, fg_size, bg_size, agg); } @Override @@ -338,7 +366,8 @@ public boolean equals(Object other) { } BucketData that = (BucketData)other; // we will most certainly be compared to other buckets of the same Agg instance, so compare counts first - return Objects.equals(this.fg_count, that.fg_count) + return Objects.equals(this.implied, that.implied) + && Objects.equals(this.fg_count, that.fg_count) && Objects.equals(this.bg_count, that.bg_count) && Objects.equals(this.fg_size, that.fg_size) && Objects.equals(this.bg_size, that.bg_size) @@ -403,18 +432,40 @@ public int compareTo(BucketData that) { * @see SlotAcc#getValue * @see Merger#getMergedResult */ + + @SuppressWarnings({"unchecked", "rawtypes"}) public SimpleOrderedMap externalize(final boolean isShardRequest) { SimpleOrderedMap result = new SimpleOrderedMap(); + + // if counts are non-zero, then this bucket must not be implied + assert 0 == fg_count || ! implied : "Implied bucket has non-zero fg_count"; + assert 0 == bg_count || ! implied : "Implied bucket has non-zero bg_count"; if (isShardRequest) { - result.add(FG_COUNT, fg_count); - result.add(BG_COUNT, bg_count); + // shard responses must include size info, but don't need the derived stats + // // NOTE: sizes will be the same for every slot... // TODO: it would be nice to put them directly in the parent facet, instead of every bucket, // in order to reduce the size of the response. result.add(FG_SIZE, fg_size); result.add(BG_SIZE, bg_size); + + if (implied) { + // for an implied bucket on this shard, we don't need to bother returning the (empty) + // counts, just the flag explaining that this bucket is (locally) implied... + result.add(IMPLIED_KEY, Boolean.TRUE); + } else { + result.add(FG_COUNT, fg_count); + result.add(BG_COUNT, bg_count); + } } else { + if (implied) { + // When returning results to an external client, any bucket still 'implied' shouldn't return + // any results at all. + // (practically speaking this should only happen for the 'allBuckets' bucket + return null; + } + // there's no need to bother computing these when returning results *to* a shard coordinator // only useful to external clients result.add(RELATEDNESS, this.getRelatedness()); @@ -429,7 +480,7 @@ public SimpleOrderedMap externalize(final boolean isShardRequest) { /** * Merges in the per shard {@link BucketData} output into a unified {@link BucketData} */ - private static final class Merger extends FacetSortableMerger { + private static final class Merger extends FacetModule.FacetSortableMerger { private final BucketData mergedData; public Merger(final RelatednessAgg agg) { this.mergedData = new BucketData(agg); @@ -437,13 +488,27 @@ public Merger(final RelatednessAgg agg) { @Override public void merge(Object facetResult, Context mcontext) { - NamedList shardData = (NamedList)facetResult; + @SuppressWarnings({"unchecked"}) + final NamedList shardData = (NamedList)facetResult; + + final boolean shardImplied = Optional.ofNullable((Boolean)shardData.remove(IMPLIED_KEY)).orElse(false); + + // regardless of wether this shard is implied, we want to know it's size info... mergedData.incSizes((Long)shardData.remove(FG_SIZE), (Long)shardData.remove(BG_SIZE)); - mergedData.incCounts((Long)shardData.remove(FG_COUNT), (Long)shardData.remove(BG_COUNT)); + + if (! shardImplied) { + // only merge in counts from non-implied shard buckets... + mergedData.incCounts((Long)shardData.remove(FG_COUNT), (Long)shardData.remove(BG_COUNT)); + } else { + // if this shard is implied, we shouldn't have even gotten counts... + assert shardImplied; + assert null == shardData.remove(FG_COUNT); + assert null == shardData.remove(BG_COUNT); + } } @Override - public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) { + public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) { // NOTE: regardless of the SortDirection hint, we want normal comparison of the BucketData assert other instanceof Merger; diff --git a/solr/core/src/java/org/apache/solr/search/facet/SimpleAggValueSource.java b/solr/core/src/java/org/apache/solr/search/facet/SimpleAggValueSource.java index 661ec83cf4dd..afc506cb0a78 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/SimpleAggValueSource.java +++ b/solr/core/src/java/org/apache/solr/search/facet/SimpleAggValueSource.java @@ -35,6 +35,7 @@ public ValueSource getArg() { } @Override + @SuppressWarnings({"rawtypes"}) public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { throw new UnsupportedOperationException(); } diff --git a/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java b/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java index 99387bebe36f..e8555523ab88 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java +++ b/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java @@ -68,28 +68,28 @@ public void setNextReader(LeafReaderContext readerContext) throws IOException { } /** - * All subclasses must override this method to collect documents. This method is called by the - * default impl of {@link #collect(DocSet,int,IntFunction)} but it's also neccessary if this accumulator + * All subclasses must override this method to collect documents. This method is called by the + * default impl of {@link #collect(DocSet, int, IntFunction)} but it's also neccessary if this accumulator * is used for sorting. * - * @param doc Single Segment docId (relative to the current {@link LeafReaderContext} to collect - * @param slot The slot number to collect this document in - * @param slotContext A callback that can be used for Accumulators that would like additional info - * about the current slot -- the {@link IntFunction} is only garunteed to be valid for - * the current slot, and the {@link SlotContext} returned is only valid for the duration - * of the collect() call. + * @param doc Single Segment docId (relative to the current {@link LeafReaderContext} to collect + * @param slot The slot number to collect this document in + * @param slotContext A callback that can be used for Accumulators that would like additional info + * about the current slot -- the {@link IntFunction} is only garunteed to be valid for + * the current slot, and the {@link SlotContext} returned is only valid for the duration + * of the collect() call. */ public abstract void collect(int doc, int slot, IntFunction slotContext) throws IOException; /** - * Bulk collection of all documents in a slot. The default implementation calls {@link #collect(int,int,IntFunction)} + * Bulk collection of all documents in a slot. The default implementation calls {@link #collect(int, int, IntFunction)} * - * @param docs (global) Documents to collect - * @param slot The slot number to collect these documents in - * @param slotContext A callback that can be used for Accumulators that would like additional info - * about the current slot -- the {@link IntFunction} is only garunteed to be valid for - * the current slot, and the {@link SlotContext} returned is only valid for the duration - * of the collect() call. + * @param docs (global) Documents to collect + * @param slot The slot number to collect these documents in + * @param slotContext A callback that can be used for Accumulators that would like additional info + * about the current slot -- the {@link IntFunction} is only garunteed to be valid for + * the current slot, and the {@link SlotContext} returned is only valid for the duration + * of the collect() call. */ public int collect(DocSet docs, int slot, IntFunction slotContext) throws IOException { int count = 0; @@ -101,7 +101,7 @@ public int collect(DocSet docs, int slot, IntFunction slotContext) int segBase = 0; int segMax; int adjustedMax = 0; - for (DocIterator docsIt = docs.iterator(); docsIt.hasNext();) { + for (DocIterator docsIt = docs.iterator(); docsIt.hasNext(); ) { final int doc = docsIt.nextDoc(); if (doc >= adjustedMax) { do { @@ -135,16 +135,24 @@ public void setValues(SimpleOrderedMap bucket, int slotNum) throws IOExc } } - /** Called to reset the acc to a fresh state, ready for reuse */ + /** + * Called to reset the acc to a fresh state, ready for reuse + */ public abstract void reset() throws IOException; - /** Typically called from setNextReader to reset docValue iterators */ - protected void resetIterators() throws IOException {}; + /** + * Typically called from setNextReader to reset docValue iterators + */ + protected void resetIterators() throws IOException { + } + + ; public abstract void resize(Resizer resizer); @Override - public void close() throws IOException {} + public void close() throws IOException { + } public static abstract class Resizer { public abstract int getNewSize(); @@ -206,7 +214,7 @@ public FixedBitSet resize(FixedBitSet old) { FixedBitSet values = new FixedBitSet(getNewSize()); int oldSize = old.length(); - for(int oldSlot = 0;;) { + for (int oldSlot = 0; ; ) { oldSlot = values.nextSetBit(oldSlot); if (oldSlot == DocIdSetIterator.NO_MORE_DOCS) break; int newSlot = getNewSlot(oldSlot); @@ -218,6 +226,7 @@ public FixedBitSet resize(FixedBitSet old) { } public T[] resize(T[] old, T defaultValue) { + @SuppressWarnings({"unchecked"}) T[] values = (T[]) Array.newInstance(old.getClass().getComponentType(), getNewSize()); if (defaultValue != null) { Arrays.fill(values, 0, values.length, defaultValue); @@ -237,37 +246,51 @@ public T[] resize(T[] old, T defaultValue) { } // end class Resizer /** - * Incapsulates information about the current slot, for Accumulators that may want + * Incapsulates information about the current slot, for Accumulators that may want * additional info during collection. */ - public static final class SlotContext { + public static class SlotContext { private final Query slotQuery; + public SlotContext(Query slotQuery) { this.slotQuery = slotQuery; } + + /** + * behavior of this method is undefined if {@link #isAllBuckets} returns true + */ public Query getSlotQuery() { return slotQuery; } + + /** + * @return true if and only if this slot corrisponds to the allBuckets bucket. + * @see #getSlotQuery + */ + public boolean isAllBuckets() { + return false; + } } -} -// TODO: we should really have a decoupled value provider... + + // TODO: we should really have a decoupled value provider... // This would enhance reuse and also prevent multiple lookups of same value across diff stats -abstract class FuncSlotAcc extends SlotAcc { - protected final ValueSource valueSource; - protected FunctionValues values; + abstract static class FuncSlotAcc extends SlotAcc { + protected final ValueSource valueSource; + protected FunctionValues values; - public FuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { - super(fcontext); - this.valueSource = values; - } + public FuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { + super(fcontext); + this.valueSource = values; + } - @Override - public void setNextReader(LeafReaderContext readerContext) throws IOException { - super.setNextReader(readerContext); - values = valueSource.getValues(fcontext.qcontext, readerContext); + @Override + @SuppressWarnings({"unchecked"}) + public void setNextReader(LeafReaderContext readerContext) throws IOException { + super.setNextReader(readerContext); + values = valueSource.getValues(fcontext.qcontext, readerContext); + } } -} // have a version that counts the number of times a Slot has been hit? (for avg... what else?) @@ -275,393 +298,452 @@ public void setNextReader(LeafReaderContext readerContext) throws IOException { // double-slot-func -> func-slot -> slot -> acc // double-slot-func -> double-slot -> slot -> acc -abstract class DoubleFuncSlotAcc extends FuncSlotAcc { - double[] result; // TODO: use DoubleArray - double initialValue; + abstract static class DoubleFuncSlotAcc extends FuncSlotAcc { + double[] result; // TODO: use DoubleArray + double initialValue; - public DoubleFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { - this(values, fcontext, numSlots, 0); - } + public DoubleFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { + this(values, fcontext, numSlots, 0); + } - public DoubleFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots, double initialValue) { - super(values, fcontext, numSlots); - this.initialValue = initialValue; - result = new double[numSlots]; - if (initialValue != 0) { - reset(); + public DoubleFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots, double initialValue) { + super(values, fcontext, numSlots); + this.initialValue = initialValue; + result = new double[numSlots]; + if (initialValue != 0) { + reset(); + } } - } - @Override - public int compare(int slotA, int slotB) { - return Double.compare(result[slotA], result[slotB]); - } + @Override + public int compare(int slotA, int slotB) { + return Double.compare(result[slotA], result[slotB]); + } - @Override - public Object getValue(int slot) { - return result[slot]; - } + @Override + public Object getValue(int slot) { + return result[slot]; + } - @Override - public void reset() { - Arrays.fill(result, initialValue); - } + @Override + public void reset() { + Arrays.fill(result, initialValue); + } - @Override - public void resize(Resizer resizer) { - result = resizer.resize(result, initialValue); + @Override + public void resize(Resizer resizer) { + result = resizer.resize(result, initialValue); + } } -} -abstract class LongFuncSlotAcc extends FuncSlotAcc { - long[] result; - long initialValue; + abstract static class LongFuncSlotAcc extends FuncSlotAcc { + long[] result; + long initialValue; - public LongFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots, long initialValue) { - super(values, fcontext, numSlots); - this.initialValue = initialValue; - result = new long[numSlots]; - if (initialValue != 0) { - reset(); + public LongFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots, long initialValue) { + super(values, fcontext, numSlots); + this.initialValue = initialValue; + result = new long[numSlots]; + if (initialValue != 0) { + reset(); + } } - } - @Override - public int compare(int slotA, int slotB) { - return Long.compare(result[slotA], result[slotB]); - } + @Override + public int compare(int slotA, int slotB) { + return Long.compare(result[slotA], result[slotB]); + } - @Override - public Object getValue(int slot) { - return result[slot]; - } + @Override + public Object getValue(int slot) { + return result[slot]; + } - @Override - public void reset() { - Arrays.fill(result, initialValue); - } + @Override + public void reset() { + Arrays.fill(result, initialValue); + } - @Override - public void resize(Resizer resizer) { - result = resizer.resize(result, initialValue); + @Override + public void resize(Resizer resizer) { + result = resizer.resize(result, initialValue); + } } -} -abstract class IntSlotAcc extends SlotAcc { - int[] result; // use LongArray32 - int initialValue; + abstract class IntSlotAcc extends SlotAcc { + int[] result; // use LongArray32 + int initialValue; - public IntSlotAcc(FacetContext fcontext, int numSlots, int initialValue) { - super(fcontext); - this.initialValue = initialValue; - result = new int[numSlots]; - if (initialValue != 0) { - reset(); + public IntSlotAcc(FacetContext fcontext, int numSlots, int initialValue) { + super(fcontext); + this.initialValue = initialValue; + result = new int[numSlots]; + if (initialValue != 0) { + reset(); + } } - } - @Override - public int compare(int slotA, int slotB) { - return Integer.compare(result[slotA], result[slotB]); - } + @Override + public int compare(int slotA, int slotB) { + return Integer.compare(result[slotA], result[slotB]); + } - @Override - public Object getValue(int slot) { - return result[slot]; - } + @Override + public Object getValue(int slot) { + return result[slot]; + } - @Override - public void reset() { - Arrays.fill(result, initialValue); - } + @Override + public void reset() { + Arrays.fill(result, initialValue); + } - @Override - public void resize(Resizer resizer) { - result = resizer.resize(result, initialValue); + @Override + public void resize(Resizer resizer) { + result = resizer.resize(result, initialValue); + } } -} -class SumSlotAcc extends DoubleFuncSlotAcc { - public SumSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { - super(values, fcontext, numSlots); - } + static class SumSlotAcc extends DoubleFuncSlotAcc { + public SumSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { + super(values, fcontext, numSlots); + } - public void collect(int doc, int slotNum, IntFunction slotContext) throws IOException { - double val = values.doubleVal(doc); // todo: worth trying to share this value across multiple stats that need it? - result[slotNum] += val; + public void collect(int doc, int slotNum, IntFunction slotContext) throws IOException { + double val = values.doubleVal(doc); // todo: worth trying to share this value across multiple stats that need it? + result[slotNum] += val; + } } -} -class SumsqSlotAcc extends DoubleFuncSlotAcc { - public SumsqSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { - super(values, fcontext, numSlots); - } + static class SumsqSlotAcc extends DoubleFuncSlotAcc { + public SumsqSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { + super(values, fcontext, numSlots); + } - @Override - public void collect(int doc, int slotNum, IntFunction slotContext) throws IOException { - double val = values.doubleVal(doc); - val = val * val; - result[slotNum] += val; + @Override + public void collect(int doc, int slotNum, IntFunction slotContext) throws IOException { + double val = values.doubleVal(doc); + val = val * val; + result[slotNum] += val; + } } -} -class AvgSlotAcc extends DoubleFuncSlotAcc { - int[] counts; + static class AvgSlotAcc extends DoubleFuncSlotAcc { + int[] counts; - public AvgSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { - super(values, fcontext, numSlots); - counts = new int[numSlots]; - } + public AvgSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { + super(values, fcontext, numSlots); + counts = new int[numSlots]; + } - @Override - public void reset() { - super.reset(); - for (int i = 0; i < counts.length; i++) { - counts[i] = 0; + @Override + public void reset() { + super.reset(); + for (int i = 0; i < counts.length; i++) { + counts[i] = 0; + } } - } - @Override - public void collect(int doc, int slotNum, IntFunction slotContext) throws IOException { - double val = values.doubleVal(doc); - if (val != 0 || values.exists(doc)) { - result[slotNum] += val; - counts[slotNum] += 1; + @Override + public void collect(int doc, int slotNum, IntFunction slotContext) throws IOException { + double val = values.doubleVal(doc); + if (val != 0 || values.exists(doc)) { + result[slotNum] += val; + counts[slotNum] += 1; + } } - } - private double avg(int slot) { - return AggUtil.avg(result[slot], counts[slot]); // calc once and cache in result? - } + private double avg(int slot) { + return AggUtil.avg(result[slot], counts[slot]); // calc once and cache in result? + } - @Override - public int compare(int slotA, int slotB) { - return Double.compare(avg(slotA), avg(slotB)); - } + @Override + public int compare(int slotA, int slotB) { + return Double.compare(avg(slotA), avg(slotB)); + } - @Override - public Object getValue(int slot) { - if (fcontext.isShard()) { - ArrayList lst = new ArrayList(2); - lst.add(counts[slot]); - lst.add(result[slot]); - return lst; - } else { - return avg(slot); + @Override + public Object getValue(int slot) { + if (fcontext.isShard()) { + ArrayList lst = new ArrayList<>(2); + lst.add(counts[slot]); + lst.add(result[slot]); + return lst; + } else { + return avg(slot); + } } - } - @Override - public void resize(Resizer resizer) { - super.resize(resizer); - counts = resizer.resize(counts, 0); + @Override + public void resize(Resizer resizer) { + super.resize(resizer); + counts = resizer.resize(counts, 0); + } } -} -class VarianceSlotAcc extends DoubleFuncSlotAcc { - int[] counts; - double[] sum; + static class VarianceSlotAcc extends DoubleFuncSlotAcc { + int[] counts; + double[] sum; - public VarianceSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { - super(values, fcontext, numSlots); - counts = new int[numSlots]; - sum = new double[numSlots]; - } + public VarianceSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { + super(values, fcontext, numSlots); + counts = new int[numSlots]; + sum = new double[numSlots]; + } - @Override - public void reset() { - super.reset(); - Arrays.fill(counts, 0); - Arrays.fill(sum, 0); - } + @Override + public void reset() { + super.reset(); + Arrays.fill(counts, 0); + Arrays.fill(sum, 0); + } - @Override - public void resize(Resizer resizer) { - super.resize(resizer); - this.counts = resizer.resize(this.counts, 0); - this.sum = resizer.resize(this.sum, 0); - } + @Override + public void resize(Resizer resizer) { + super.resize(resizer); + this.counts = resizer.resize(this.counts, 0); + this.sum = resizer.resize(this.sum, 0); + } - private double variance(int slot) { - return AggUtil.variance(result[slot], sum[slot], counts[slot]); // calc once and cache in result? - } + private double variance(int slot) { + return AggUtil.variance(result[slot], sum[slot], counts[slot]); // calc once and cache in result? + } - @Override - public int compare(int slotA, int slotB) { - return Double.compare(this.variance(slotA), this.variance(slotB)); - } + @Override + public int compare(int slotA, int slotB) { + return Double.compare(this.variance(slotA), this.variance(slotB)); + } - @Override - public Object getValue(int slot) { - if (fcontext.isShard()) { - ArrayList lst = new ArrayList(3); - lst.add(counts[slot]); - lst.add(result[slot]); - lst.add(sum[slot]); - return lst; - } else { - return this.variance(slot); + @Override + public Object getValue(int slot) { + if (fcontext.isShard()) { + ArrayList lst = new ArrayList<>(3); + lst.add(counts[slot]); + lst.add(result[slot]); + lst.add(sum[slot]); + return lst; + } else { + return this.variance(slot); + } } - } - @Override - public void collect(int doc, int slot, IntFunction slotContext) throws IOException { - double val = values.doubleVal(doc); - if (values.exists(doc)) { - counts[slot]++; - result[slot] += val * val; - sum[slot] += val; + @Override + public void collect(int doc, int slot, IntFunction slotContext) throws IOException { + double val = values.doubleVal(doc); + if (values.exists(doc)) { + counts[slot]++; + result[slot] += val * val; + sum[slot] += val; + } } } -} -class StddevSlotAcc extends DoubleFuncSlotAcc { - int[] counts; - double[] sum; + static class StddevSlotAcc extends DoubleFuncSlotAcc { + int[] counts; + double[] sum; - public StddevSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { - super(values, fcontext, numSlots); - counts = new int[numSlots]; - sum = new double[numSlots]; - } + public StddevSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) { + super(values, fcontext, numSlots); + counts = new int[numSlots]; + sum = new double[numSlots]; + } - @Override - public void reset() { - super.reset(); - Arrays.fill(counts, 0); - Arrays.fill(sum, 0); - } + @Override + public void reset() { + super.reset(); + Arrays.fill(counts, 0); + Arrays.fill(sum, 0); + } - @Override - public void resize(Resizer resizer) { - super.resize(resizer); - this.counts = resizer.resize(this.counts, 0); - this.result = resizer.resize(this.result, 0); - } + @Override + public void resize(Resizer resizer) { + super.resize(resizer); + this.counts = resizer.resize(this.counts, 0); + this.result = resizer.resize(this.result, 0); + } - private double stdDev(int slot) { - return AggUtil.stdDev(result[slot], sum[slot], counts[slot]); // calc once and cache in result? - } + private double stdDev(int slot) { + return AggUtil.stdDev(result[slot], sum[slot], counts[slot]); // calc once and cache in result? + } - @Override - public int compare(int slotA, int slotB) { - return Double.compare(this.stdDev(slotA), this.stdDev(slotB)); - } + @Override + public int compare(int slotA, int slotB) { + return Double.compare(this.stdDev(slotA), this.stdDev(slotB)); + } - @Override - public Object getValue(int slot) { - if (fcontext.isShard()) { - ArrayList lst = new ArrayList(3); - lst.add(counts[slot]); - lst.add(result[slot]); - lst.add(sum[slot]); - return lst; - } else { - return this.stdDev(slot); + @Override + @SuppressWarnings({"unchecked"}) + public Object getValue(int slot) { + if (fcontext.isShard()) { + ArrayList lst = new ArrayList<>(3); + lst.add(counts[slot]); + lst.add(result[slot]); + lst.add(sum[slot]); + return lst; + } else { + return this.stdDev(slot); + } } - } - @Override - public void collect(int doc, int slot, IntFunction slotContext) throws IOException { - double val = values.doubleVal(doc); - if (values.exists(doc)) { - counts[slot]++; - result[slot] += val * val; - sum[slot] += val; + @Override + public void collect(int doc, int slot, IntFunction slotContext) throws IOException { + double val = values.doubleVal(doc); + if (values.exists(doc)) { + counts[slot]++; + result[slot] += val * val; + sum[slot] += val; + } } } -} -abstract class CountSlotAcc extends SlotAcc { - public CountSlotAcc(FacetContext fcontext) { - super(fcontext); + abstract static class CountSlotAcc extends SlotAcc { + public CountSlotAcc(FacetContext fcontext) { + super(fcontext); + } + + public abstract void incrementCount(int slot, long count); + + public abstract long getCount(int slot); } - public abstract void incrementCount(int slot, long count); + /** + * This CountSlotAcc exists as a /dev/null sink for callers of collect(...) and other "write"-type + * methods. It should be used in contexts where "read"-type access methods will never be called. + */ + static final CountSlotAcc DEV_NULL_SLOT_ACC = new CountSlotAcc(null) { - public abstract long getCount(int slot); -} + @Override + public void resize(Resizer resizer) { + // No-op + } -class CountSlotArrAcc extends CountSlotAcc { - long[] result; + @Override + public void reset() throws IOException { + // No-op + } - public CountSlotArrAcc(FacetContext fcontext, int numSlots) { - super(fcontext); - result = new long[numSlots]; - } + @Override + public void collect(int doc, int slot, IntFunction slotContext) throws IOException { + // No-op + } - @Override - public void collect(int doc, int slotNum, IntFunction slotContext) { - // TODO: count arrays can use fewer bytes based on the number of docs in - // the base set (that's the upper bound for single valued) - look at ttf? - result[slotNum]++; - } + @Override + public void incrementCount(int slot, long count) { + // No-op + } - @Override - public int compare(int slotA, int slotB) { - return Long.compare(result[slotA], result[slotB]); - } + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + // No-op + } - @Override - public Object getValue(int slotNum) throws IOException { - return result[slotNum]; - } + @Override + public int collect(DocSet docs, int slot, IntFunction slotContext) throws IOException { + return docs.size(); // dressed up no-op + } - @Override - public void incrementCount(int slot, long count) { - result[slot] += count; - } + @Override + public Object getValue(int slotNum) throws IOException { + throw new UnsupportedOperationException("not supported"); + } - @Override - public long getCount(int slot) { - return result[slot]; - } + @Override + public int compare(int slotA, int slotB) { + throw new UnsupportedOperationException("not supported"); + } - // internal and expert - long[] getCountArray() { - return result; - } + @Override + public void setValues(SimpleOrderedMap bucket, int slotNum) throws IOException { + throw new UnsupportedOperationException("not supported"); + } - @Override - public void reset() { - Arrays.fill(result, 0); - } + @Override + public long getCount(int slot) { + throw new UnsupportedOperationException("not supported"); + } + }; - @Override - public void resize(Resizer resizer) { - result = resizer.resize(result, 0); - } -} + static class CountSlotArrAcc extends CountSlotAcc { + long[] result; -class SortSlotAcc extends SlotAcc { - public SortSlotAcc(FacetContext fcontext) { - super(fcontext); - } + public CountSlotArrAcc(FacetContext fcontext, int numSlots) { + super(fcontext); + result = new long[numSlots]; + } - @Override - public void collect(int doc, int slot, IntFunction slotContext) throws IOException { - // no-op - } + @Override + public void collect(int doc, int slotNum, IntFunction slotContext) { + // TODO: count arrays can use fewer bytes based on the number of docs in + // the base set (that's the upper bound for single valued) - look at ttf? + result[slotNum]++; + } - @Override - public int compare(int slotA, int slotB) { - return slotA - slotB; - } + @Override + public int compare(int slotA, int slotB) { + return Long.compare(result[slotA], result[slotB]); + } - @Override - public Object getValue(int slotNum) { - return slotNum; - } + @Override + public Object getValue(int slotNum) throws IOException { + return result[slotNum]; + } - @Override - public void reset() { - // no-op + @Override + public void incrementCount(int slot, long count) { + result[slot] += count; + } + + @Override + public long getCount(int slot) { + return result[slot]; + } + + // internal and expert + long[] getCountArray() { + return result; + } + + @Override + public void reset() { + Arrays.fill(result, 0); + } + + @Override + public void resize(Resizer resizer) { + result = resizer.resize(result, 0); + } } - @Override - public void resize(Resizer resizer) { - // sort slot only works with direct-mapped accumulators - throw new UnsupportedOperationException(); + static class SortSlotAcc extends SlotAcc { + public SortSlotAcc(FacetContext fcontext) { + super(fcontext); + } + + @Override + public void collect(int doc, int slot, IntFunction slotContext) throws IOException { + // no-op + } + + @Override + public int compare(int slotA, int slotB) { + return slotA - slotB; + } + + @Override + public Object getValue(int slotNum) { + return slotNum; + } + + @Override + public void reset() { + // no-op + } + + @Override + public void resize(Resizer resizer) { + // sort slot only works with direct-mapped accumulators + throw new UnsupportedOperationException(); + } } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java b/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java index d0fb90be5881..368a62fe26df 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java @@ -57,7 +57,7 @@ public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) } vs = sf.getType().getValueSource(sf, null); } - return new StddevSlotAcc(vs, fcontext, numSlots); + return new SlotAcc.StddevSlotAcc(vs, fcontext, numSlots); } @Override @@ -65,7 +65,7 @@ public FacetMerger createFacetMerger(Object prototype) { return new Merger(); } - private static class Merger extends FacetDoubleMerger { + private static class Merger extends FacetModule.FacetDoubleMerger { long count; double sumSq; double sum; @@ -90,7 +90,7 @@ protected double getDouble() { } } - class StddevSortedNumericAcc extends SDVSortedNumericAcc { + class StddevSortedNumericAcc extends DocValuesAcc.SDVSortedNumericAcc { public StddevSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots); @@ -102,7 +102,7 @@ protected double computeVal(int slot) { } } - class StddevSortedSetAcc extends SDVSortedSetAcc { + class StddevSortedSetAcc extends DocValuesAcc.SDVSortedSetAcc { public StddevSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots); @@ -114,7 +114,7 @@ protected double computeVal(int slot) { } } - class StddevUnInvertedFieldAcc extends SDVUnInvertedFieldAcc { + class StddevUnInvertedFieldAcc extends UnInvertedFieldAcc.SDVUnInvertedFieldAcc { public StddevUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots); diff --git a/solr/core/src/java/org/apache/solr/search/facet/SumAgg.java b/solr/core/src/java/org/apache/solr/search/facet/SumAgg.java index 5fe5aed70f66..05f5476e7f40 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/SumAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/SumAgg.java @@ -59,7 +59,7 @@ public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) } vs = sf.getType().getValueSource(sf, null); } - return new SumSlotAcc(vs, fcontext, numSlots); + return new SlotAcc.SumSlotAcc(vs, fcontext, numSlots); } @Override @@ -67,7 +67,7 @@ public FacetMerger createFacetMerger(Object prototype) { return new Merger(); } - public static class Merger extends FacetDoubleMerger { + public static class Merger extends FacetModule.FacetDoubleMerger { double val; @Override @@ -80,7 +80,7 @@ protected double getDouble() { } } - class SumSortedNumericAcc extends DoubleSortedNumericDVAcc { + class SumSortedNumericAcc extends DocValuesAcc.DoubleSortedNumericDVAcc { public SumSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots, 0); @@ -95,7 +95,7 @@ protected void collectValues(int doc, int slot) throws IOException { } - class SumSortedSetAcc extends DoubleSortedSetDVAcc { + class SumSortedSetAcc extends DocValuesAcc.DoubleSortedSetDVAcc { public SumSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots, 0); @@ -113,7 +113,7 @@ protected void collectValues(int doc, int slot) throws IOException { } } - class SumUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc { + class SumUnInvertedFieldAcc extends UnInvertedFieldAcc.DoubleUnInvertedFieldAcc { public SumUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots, 0); diff --git a/solr/core/src/java/org/apache/solr/search/facet/SumsqAgg.java b/solr/core/src/java/org/apache/solr/search/facet/SumsqAgg.java index 4bb9e67d7ca5..12eec86699c0 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/SumsqAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/SumsqAgg.java @@ -58,7 +58,7 @@ public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) } vs = sf.getType().getValueSource(sf, null); } - return new SumsqSlotAcc(vs, fcontext, numSlots); + return new SlotAcc.SumsqSlotAcc(vs, fcontext, numSlots); } @Override @@ -66,7 +66,7 @@ public FacetMerger createFacetMerger(Object prototype) { return new SumAgg.Merger(); } - class SumSqSortedNumericAcc extends DoubleSortedNumericDVAcc { + class SumSqSortedNumericAcc extends DocValuesAcc.DoubleSortedNumericDVAcc { public SumSqSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots, 0); @@ -81,7 +81,7 @@ protected void collectValues(int doc, int slot) throws IOException { } } - class SumSqSortedSetAcc extends DoubleSortedSetDVAcc { + class SumSqSortedSetAcc extends DocValuesAcc.DoubleSortedSetDVAcc { public SumSqSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots, 0); @@ -99,7 +99,7 @@ protected void collectValues(int doc, int slot) throws IOException { } } - class SumSqUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc { + class SumSqUnInvertedFieldAcc extends UnInvertedFieldAcc.DoubleUnInvertedFieldAcc { public SumSqUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots, 0); diff --git a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java index f0dd7df25686..04f88f967d13 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java +++ b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java @@ -315,7 +315,7 @@ public interface Callback { - private void getCounts(FacetFieldProcessorByArrayUIF processor, CountSlotAcc counts) throws IOException { + private void getCounts(FacetFieldProcessorByArrayUIF processor, SlotAcc.CountSlotAcc counts) throws IOException { DocSet docs = processor.fcontext.base; int baseSize = docs.size(); int maxDoc = searcher.maxDoc(); @@ -427,7 +427,7 @@ public void collectDocsGeneric(FacetFieldProcessorByArrayUIF processor) throws I DocSet docs = processor.fcontext.base; int uniqueTerms = 0; - final CountSlotAcc countAcc = processor.countAcc; + final SlotAcc.CountSlotAcc countAcc = processor.countAcc; for (TopTerm tt : bigTerms.values()) { if (tt.termNum >= startTermIndex && tt.termNum < endTermIndex) { diff --git a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java index 7f2d9eb56da8..209677340b2b 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java +++ b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java @@ -51,109 +51,111 @@ public void close() throws IOException { docToTerm = null; } } -} -abstract class DoubleUnInvertedFieldAcc extends UnInvertedFieldAcc { - double[] result; - int currentSlot; - double initialValue; - public DoubleUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException { - super(fcontext, sf, numSlots); - result = new double[numSlots]; - if (initialValue != 0) { - this.initialValue = initialValue; - Arrays.fill(result, initialValue); + abstract static class DoubleUnInvertedFieldAcc extends UnInvertedFieldAcc { + double[] result; + int currentSlot; + double initialValue; + + public DoubleUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException { + super(fcontext, sf, numSlots); + result = new double[numSlots]; + if (initialValue != 0) { + this.initialValue = initialValue; + Arrays.fill(result, initialValue); + } } - } - @Override - public void collect(int doc, int slot, IntFunction slotContext) throws IOException { - this.currentSlot = slot; - docToTerm.getBigTerms(doc + currentDocBase, this); - docToTerm.getSmallTerms(doc + currentDocBase, this); - } + @Override + public void collect(int doc, int slot, IntFunction slotContext) throws IOException { + this.currentSlot = slot; + docToTerm.getBigTerms(doc + currentDocBase, this); + docToTerm.getSmallTerms(doc + currentDocBase, this); + } - @Override - public int compare(int slotA, int slotB) { - return Double.compare(result[slotA], result[slotB]); - } + @Override + public int compare(int slotA, int slotB) { + return Double.compare(result[slotA], result[slotB]); + } - @Override - public Object getValue(int slotNum) throws IOException { - return result[slotNum]; - } + @Override + public Object getValue(int slotNum) throws IOException { + return result[slotNum]; + } - @Override - public void reset() throws IOException { - Arrays.fill(result, initialValue); - } + @Override + public void reset() throws IOException { + Arrays.fill(result, initialValue); + } - @Override - public void resize(Resizer resizer) { - resizer.resize(result, initialValue); + @Override + public void resize(Resizer resizer) { + this.result = resizer.resize(result, initialValue); + } } -} -/** - * Base accumulator to compute standard deviation and variance for uninvertible fields - */ -abstract class SDVUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc { - int[] counts; - double[] sum; - - public SDVUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { - super(fcontext, sf, numSlots, 0); - this.counts = new int[numSlots]; - this.sum = new double[numSlots]; - } + /** + * Base accumulator to compute standard deviation and variance for uninvertible fields + */ + abstract static class SDVUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc { + int[] counts; + double[] sum; + + public SDVUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { + super(fcontext, sf, numSlots, 0); + this.counts = new int[numSlots]; + this.sum = new double[numSlots]; + } - @Override - public void call(int termNum) { - try { - BytesRef term = docToTerm.lookupOrd(termNum); - Object obj = sf.getType().toObject(sf, term); - double val = obj instanceof Date ? ((Date)obj).getTime(): ((Number)obj).doubleValue(); - result[currentSlot] += val * val; - sum[currentSlot]+= val; - counts[currentSlot]++; - } catch (IOException e) { - // find a better way to do it - throw new UncheckedIOException(e); + @Override + public void call(int termNum) { + try { + BytesRef term = docToTerm.lookupOrd(termNum); + Object obj = sf.getType().toObject(sf, term); + double val = obj instanceof Date ? ((Date) obj).getTime() : ((Number) obj).doubleValue(); + result[currentSlot] += val * val; + sum[currentSlot] += val; + counts[currentSlot]++; + } catch (IOException e) { + // find a better way to do it + throw new UncheckedIOException(e); + } } - } - protected abstract double computeVal(int slot); + protected abstract double computeVal(int slot); - @Override - public int compare(int slotA, int slotB) { - return Double.compare(computeVal(slotA), computeVal(slotB)); - } + @Override + public int compare(int slotA, int slotB) { + return Double.compare(computeVal(slotA), computeVal(slotB)); + } - @Override - public Object getValue(int slot) { - if (fcontext.isShard()) { - ArrayList lst = new ArrayList(3); - lst.add(counts[slot]); - lst.add(result[slot]); - lst.add(sum[slot]); - return lst; - } else { - return computeVal(slot); + @Override + @SuppressWarnings({"unchecked", "rawtypes"}) + public Object getValue(int slot) { + if (fcontext.isShard()) { + ArrayList lst = new ArrayList(3); + lst.add(counts[slot]); + lst.add(result[slot]); + lst.add(sum[slot]); + return lst; + } else { + return computeVal(slot); + } } - } - @Override - public void reset() throws IOException { - super.reset(); - Arrays.fill(counts, 0); - Arrays.fill(sum, 0); - } + @Override + public void reset() throws IOException { + super.reset(); + Arrays.fill(counts, 0); + Arrays.fill(sum, 0); + } - @Override - public void resize(Resizer resizer) { - super.resize(resizer); - resizer.resize(counts, 0); - resizer.resize(sum, 0); + @Override + public void resize(Resizer resizer) { + super.resize(resizer); + this.counts = resizer.resize(counts, 0); + this.sum = resizer.resize(sum, 0); + } } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java index 240c1a7eb117..7f476d6ef8bb 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java @@ -66,7 +66,7 @@ public FacetMerger createFacetMerger(Object prototype) { return new Merger(); } - private static class Merger extends FacetSortableMerger { + private static class Merger extends FacetModule.FacetSortableMerger { long answer = -1; long sumUnique; Set values; @@ -75,6 +75,7 @@ private static class Merger extends FacetSortableMerger { long shardsMissingMax; @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void merge(Object facetResult, Context mcontext) { SimpleOrderedMap map = (SimpleOrderedMap)facetResult; long unique = ((Number)map.get(UNIQUE)).longValue(); @@ -117,7 +118,7 @@ public Object getMergedResult() { } @Override - public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) { + public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) { return Long.compare( getLong(), ((Merger)other).getLong() ); } } @@ -177,6 +178,7 @@ private int getCardinality(int slot) { return set == null ? 0 : set.cardinality(); } + @SuppressWarnings({"unchecked", "rawtypes"}) public Object getShardValue(int slot) throws IOException { LongSet set = sets[slot]; int unique = getCardinality(slot); diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java index 8d5a0c3fc9fc..efa8dbfb49d3 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java @@ -83,6 +83,6 @@ public UniqueBlockAgg(String field) { @Override public FacetMerger createFacetMerger(Object prototype) { - return new FacetLongMerger() ; + return new FacetModule.FacetLongMerger() ; } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java index 885725dfc000..0df743d378f3 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java @@ -25,7 +25,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BitSet; -import static org.apache.solr.search.join.BlockJoinParentQParser.getCachedFilter; +import static org.apache.solr.search.join.BlockJoinParentQParser.getCachedBitSetProducer; public class UniqueBlockQueryAgg extends UniqueBlockAgg { @@ -42,7 +42,7 @@ private UniqueBlockQuerySlotAcc(FacetContext fcontext, Query query, int numSlots @Override public void setNextReader(LeafReaderContext readerContext) throws IOException { - this.parentBitSet = getCachedFilter(fcontext.req, query).getFilter().getBitSet(readerContext); + this.parentBitSet = getCachedBitSetProducer(fcontext.req, query).getBitSet(readerContext); } @Override diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java index 29873125d96b..0bb62da214e3 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java +++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java @@ -74,6 +74,7 @@ public long getNonShardValue(int slot) { return res; } + @SuppressWarnings({"unchecked", "rawtypes"}) private Object getShardHLL(int slot) throws IOException { FixedBitSet ords = arr[slot]; if (ords == null) return HLLAgg.NO_VALUES; @@ -97,6 +98,7 @@ private Object getShardHLL(int slot) throws IOException { return map; } + @SuppressWarnings({"unchecked", "rawtypes"}) private Object getShardValue(int slot) throws IOException { if (factory != null) return getShardHLL(slot); FixedBitSet ords = arr[slot]; diff --git a/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java b/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java index 892f5c5f3fd6..fa12be69cf30 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java @@ -56,7 +56,7 @@ public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) } vs = sf.getType().getValueSource(sf, null); } - return new VarianceSlotAcc(vs, fcontext, numSlots); + return new SlotAcc.VarianceSlotAcc(vs, fcontext, numSlots); } @Override @@ -64,7 +64,7 @@ public FacetMerger createFacetMerger(Object prototype) { return new Merger(); } - private static class Merger extends FacetDoubleMerger { + private static class Merger extends FacetModule.FacetDoubleMerger { long count; double sumSq; double sum; @@ -89,7 +89,7 @@ protected double getDouble() { } } - class VarianceSortedNumericAcc extends SDVSortedNumericAcc { + class VarianceSortedNumericAcc extends DocValuesAcc.SDVSortedNumericAcc { public VarianceSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots); @@ -101,7 +101,7 @@ protected double computeVal(int slot) { } } - class VarianceSortedSetAcc extends SDVSortedSetAcc { + class VarianceSortedSetAcc extends DocValuesAcc.SDVSortedSetAcc { public VarianceSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots); @@ -113,7 +113,7 @@ protected double computeVal(int slot) { } } - class VarianceUnInvertedFieldAcc extends SDVUnInvertedFieldAcc { + class VarianceUnInvertedFieldAcc extends UnInvertedFieldAcc.SDVUnInvertedFieldAcc { public VarianceUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots); diff --git a/solr/core/src/java/org/apache/solr/search/function/CollapseScoreFunction.java b/solr/core/src/java/org/apache/solr/search/function/CollapseScoreFunction.java index 3932f56c3a34..69a3d5993f4b 100644 --- a/solr/core/src/java/org/apache/solr/search/function/CollapseScoreFunction.java +++ b/solr/core/src/java/org/apache/solr/search/function/CollapseScoreFunction.java @@ -41,6 +41,7 @@ public int hashCode() { return 1213241257; } + @SuppressWarnings({"rawtypes"}) public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { return new CollapseScoreFunctionValues(context); } @@ -49,6 +50,7 @@ public static class CollapseScoreFunctionValues extends FunctionValues { private CollapseScore cscore; + @SuppressWarnings({"rawtypes"}) public CollapseScoreFunctionValues(Map context) { this.cscore = (CollapseScore) context.get("CSCORE"); assert null != this.cscore; diff --git a/solr/core/src/java/org/apache/solr/search/function/FieldNameValueSource.java b/solr/core/src/java/org/apache/solr/search/function/FieldNameValueSource.java index c122dbb611e4..7b5714a2e33a 100644 --- a/solr/core/src/java/org/apache/solr/search/function/FieldNameValueSource.java +++ b/solr/core/src/java/org/apache/solr/search/function/FieldNameValueSource.java @@ -39,6 +39,7 @@ public String getFieldName() { } @Override + @SuppressWarnings({"rawtypes"}) public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { throw new UnsupportedOperationException("FieldNameValueSource should not be directly used: " + this); } diff --git a/solr/core/src/java/org/apache/solr/search/function/FileFloatSource.java b/solr/core/src/java/org/apache/solr/search/function/FileFloatSource.java index 3b2eb23cf7af..183cf2d8e9c3 100644 --- a/solr/core/src/java/org/apache/solr/search/function/FileFloatSource.java +++ b/solr/core/src/java/org/apache/solr/search/function/FileFloatSource.java @@ -88,6 +88,8 @@ public String description() { } @Override + @SuppressWarnings({"rawtypes"}) + public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { final int off = readerContext.docBase; IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(readerContext); @@ -165,29 +167,34 @@ protected Object createValue(IndexReader reader, Object key) { /** Internal cache. (from lucene FieldCache) */ abstract static class Cache { + @SuppressWarnings({"rawtypes"}) private final Map readerCache = new WeakHashMap(); protected abstract Object createValue(IndexReader reader, Object key); + @SuppressWarnings({"unchecked"}) public void refresh(IndexReader reader, Object key) { Object refreshedValues = createValue(reader, key); synchronized (readerCache) { + @SuppressWarnings({"rawtypes"}) Map innerCache = (Map) readerCache.get(reader); if (innerCache == null) { - innerCache = new HashMap(); + innerCache = new HashMap<>(); readerCache.put(reader, innerCache); } innerCache.put(key, refreshedValues); } } + @SuppressWarnings({"unchecked"}) public Object get(IndexReader reader, Object key) { + @SuppressWarnings({"rawtypes"}) Map innerCache; Object value; synchronized (readerCache) { innerCache = (Map) readerCache.get(reader); if (innerCache == null) { - innerCache = new HashMap(); + innerCache = new HashMap<>(); readerCache.put(reader, innerCache); value = null; } else { diff --git a/solr/core/src/java/org/apache/solr/search/function/MultiStringFunction.java b/solr/core/src/java/org/apache/solr/search/function/MultiStringFunction.java index c4aef6cf9952..763b1703cc11 100644 --- a/solr/core/src/java/org/apache/solr/search/function/MultiStringFunction.java +++ b/solr/core/src/java/org/apache/solr/search/function/MultiStringFunction.java @@ -60,7 +60,8 @@ public String description() { } @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { + @SuppressWarnings({"unchecked"}) + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, LeafReaderContext readerContext) throws IOException { final FunctionValues[] valsArr = new FunctionValues[sources.length]; for (int i=0; i commands = new ArrayList<>(); private SolrIndexSearcher searcher; private boolean needDocSet = false; @@ -71,7 +72,7 @@ public Builder setQueryCommand(QueryCommand queryCommand) { return this; } - public Builder addCommandField(Command commandField) { + public Builder addCommandField(@SuppressWarnings({"rawtypes"})Command commandField) { commands.add(commandField); return this; } @@ -116,6 +117,7 @@ public CommandHandler build() { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final QueryCommand queryCommand; + @SuppressWarnings({"rawtypes"}) private final List commands; private final SolrIndexSearcher searcher; private final boolean needDocset; @@ -127,7 +129,7 @@ public CommandHandler build() { private DocSet docSet; private CommandHandler(QueryCommand queryCommand, - List commands, + @SuppressWarnings({"rawtypes"})List commands, SolrIndexSearcher searcher, boolean needDocset, boolean truncateGroups, @@ -144,7 +146,7 @@ private CommandHandler(QueryCommand queryCommand, public void execute() throws IOException { final int nrOfCommands = commands.size(); List collectors = new ArrayList<>(nrOfCommands); - for (Command command : commands) { + for (@SuppressWarnings({"rawtypes"})Command command : commands) { collectors.addAll(command.create()); } @@ -162,17 +164,19 @@ public void execute() throws IOException { searchWithTimeLimiter(query, filter, null); } - for (Command command : commands) { + for (@SuppressWarnings({"rawtypes"})Command command : commands) { command.postCollect(searcher); } } private DocSet computeGroupedDocSet(Query query, ProcessedFilter filter, List collectors) throws IOException { + @SuppressWarnings({"rawtypes"}) Command firstCommand = commands.get(0); String field = firstCommand.getKey(); SchemaField sf = searcher.getSchema().getField(field); FieldType fieldType = sf.getType(); + @SuppressWarnings({"rawtypes"}) final AllGroupHeadsCollector allGroupHeadsCollector; if (fieldType.getNumberType() != null) { ValueSource vs = fieldType.getValueSource(sf, null); @@ -201,7 +205,7 @@ private DocSet computeDocSet(Query query, ProcessedFilter filter, List> toMutable(SchemaField field, Collec return result; } + @SuppressWarnings({"unchecked", "rawtypes"}) static TopGroups fromMutable(SchemaField field, TopGroups values) { if (values == null) { return null; } FieldType fieldType = field.getType(); - - @SuppressWarnings("unchecked") + GroupDocs groupDocs[] = new GroupDocs[values.groups.length]; - + for (int i = 0; i < values.groups.length; i++) { GroupDocs original = values.groups[i]; final BytesRef groupValue; diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java index 06b49f6c9fae..7c7362b69d7e 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java @@ -138,6 +138,7 @@ public QueryCommand build() { private final String queryString; private final Query mainQuery; + @SuppressWarnings({"rawtypes"}) private TopDocsCollector topDocsCollector; private FilterCollector filterCollector; private MaxScoreCollector maxScoreCollector; diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java index 425a9877ff49..1dad199e46cc 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java @@ -83,7 +83,9 @@ public SearchGroupsFieldCommand build() { private final int topNGroups; private final boolean includeGroupCount; + @SuppressWarnings({"rawtypes"}) private FirstPassGroupingCollector firstPassGroupingCollector; + @SuppressWarnings({"rawtypes"}) private AllGroupsCollector allGroupsCollector; private SearchGroupsFieldCommand(SchemaField field, Sort groupSort, int topNGroups, boolean includeGroupCount) { @@ -121,6 +123,7 @@ public List create() throws IOException { } @Override + @SuppressWarnings({"unchecked"}) public SearchGroupsFieldCommandResult result() throws IOException { final Collection> topGroups; if (firstPassGroupingCollector != null) { diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java index b6182864e603..c13a3b6fa4e5 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java @@ -116,6 +116,7 @@ public TopGroupsFieldCommand build() { private final int maxDocPerGroup; private final boolean needScores; private final boolean needMaxScore; + @SuppressWarnings({"rawtypes"}) private TopGroupsCollector secondPassCollector; private TopGroups topGroups; @@ -161,6 +162,7 @@ public List create() throws IOException { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void postCollect(IndexSearcher searcher) throws IOException { if (firstPhaseGroups.isEmpty()) { topGroups = new TopGroups<>(groupSort.getSort(), withinGroupSort.getSort(), 0, 0, new GroupDocs[0], Float.NaN); diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java index 016da1db2336..c3ced664173e 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java @@ -48,6 +48,7 @@ public class SearchGroupShardResponseProcessor implements ShardResponseProcessor { @Override + @SuppressWarnings({"unchecked"}) public void process(ResponseBuilder rb, ShardRequest shardRequest) { SortSpec groupSortSpec = rb.getGroupingSpec().getGroupSortSpec(); Sort groupSort = rb.getGroupingSpec().getGroupSortSpec().getSort(); @@ -104,7 +105,7 @@ public void process(ResponseBuilder rb, ShardRequest shardRequest) { continue; // continue if there was an error and we're tolerant. } maxElapsedTime = (int) Math.max(maxElapsedTime, srsp.getSolrResponse().getElapsedTime()); - @SuppressWarnings("unchecked") + @SuppressWarnings({"rawtypes"}) NamedList firstPhaseResult = (NamedList) srsp.getSolrResponse().getResponse().get("firstPhase"); final Map result = serializer.transformToNative(firstPhaseResult, groupSort, withinGroupSort, srsp.getShard()); for (Map.Entry>>> entry : commandSearchGroups.entrySet()) { diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java index 32ea64a32f6f..5e84980d60df 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java @@ -119,6 +119,7 @@ public void process(ResponseBuilder rb, ShardRequest shardRequest) { rb.rsp.getResponseHeader().asShallowMap().put(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY, Boolean.TRUE); continue; // continue if there was an error and we're tolerant. } + @SuppressWarnings({"rawtypes"}) NamedList secondPhaseResult = (NamedList) srsp.getSolrResponse().getResponse().get("secondPhase"); if(secondPhaseResult == null) continue; @@ -156,11 +157,12 @@ public void process(ResponseBuilder rb, ShardRequest shardRequest) { continue; } + @SuppressWarnings({"rawtypes"}) TopGroups[] topGroupsArr = new TopGroups[topGroups.size()]; int docsPerGroup = docsPerGroupDefault; if (docsPerGroup < 0) { docsPerGroup = 0; - for (TopGroups subTopGroups : topGroups) { + for (@SuppressWarnings({"rawtypes"})TopGroups subTopGroups : topGroups) { docsPerGroup += subTopGroups.totalGroupedHitCount; } } diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java index 425ca5bdcafc..0ecc3c4df0bf 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java @@ -35,6 +35,7 @@ /** * Implementation for transforming {@link SearchGroup} into a {@link NamedList} structure and visa versa. */ +@SuppressWarnings({"rawtypes"}) public class SearchGroupsResultTransformer implements ShardResultTransformer, Map> { private static final String TOP_GROUPS = "topGroups"; @@ -47,6 +48,7 @@ public SearchGroupsResultTransformer(SolrIndexSearcher searcher) { } @Override + @SuppressWarnings({"rawtypes"}) public NamedList transform(List data) throws IOException { final NamedList result = new NamedList<>(data.size()); for (Command command : data) { @@ -71,6 +73,7 @@ public NamedList transform(List data) throws IOException { return result; } + @SuppressWarnings({"rawtypes"}) private SearchGroup deserializeOneSearchGroup(SchemaField groupField, String groupValue, SortField[] groupSortField, List rawSearchGroupData) { SearchGroup searchGroup = new SearchGroup<>(); @@ -93,12 +96,13 @@ private SearchGroup deserializeOneSearchGroup(SchemaField groupField, } @Override + @SuppressWarnings({"rawtypes"}) public Map transformToNative(NamedList shardResponse, Sort groupSort, Sort withinGroupSort, String shard) { final Map result = new HashMap<>(shardResponse.size()); for (Map.Entry command : shardResponse) { List> searchGroups = new ArrayList<>(); NamedList topGroupsAndGroupCount = command.getValue(); - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked"}) final NamedList> rawSearchGroups = (NamedList>) topGroupsAndGroupCount.get(TOP_GROUPS); if (rawSearchGroups != null) { final SchemaField groupField = searcher.getSchema().getFieldOrNull(command.getKey()); @@ -128,6 +132,7 @@ private Object[] serializeOneSearchGroup(SortField[] groupSortField, SearchGroup return convertedSortValues; } + @SuppressWarnings({"rawtypes"}) private NamedList serializeSearchGroup(Collection> data, SearchGroupsFieldCommand command) { final NamedList result = new NamedList<>(data.size()); diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java index 47e20a0018c3..e604b1f51cae 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java @@ -37,6 +37,7 @@ public interface ShardResultTransformer { * @return {@link NamedList} structure * @throws IOException If I/O related errors occur during transforming */ + @SuppressWarnings({"rawtypes"}) NamedList transform(T data) throws IOException; /** @@ -48,6 +49,7 @@ public interface ShardResultTransformer { * @param shard The shard address where the response originated from * @return native structure of the data */ + @SuppressWarnings({"rawtypes"}) R transformToNative(NamedList shardResponse, Sort groupSort, Sort withinGroupSort, String shard); } diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java index fb5bd32ef9ea..047c31ed3d78 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java @@ -54,6 +54,7 @@ * Implementation for transforming {@link TopGroups} and {@link TopDocs} into a {@link NamedList} structure and * vice versa. */ +@SuppressWarnings({"rawtypes"}) public class TopGroupsResultTransformer implements ShardResultTransformer, Map> { private final ResponseBuilder rb; @@ -138,7 +139,7 @@ public NamedList transform(List data) throws IOException { groupDocs.add(new GroupDocs<>(Float.NaN, maxScore, new TotalHits(totalGroupHits.longValue(), TotalHits.Relation.EQUAL_TO), scoreDocs, groupValueRef, null)); } - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked"}) GroupDocs[] groupDocsArr = groupDocs.toArray(new GroupDocs[groupDocs.size()]); TopGroups topGroups = new TopGroups<>( groupSort.getSort(), withinGroupSort.getSort(), totalHitCount, totalGroupedHitCount, groupDocsArr, Float.NaN diff --git a/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java index 3ba3470dc5a9..1b4058c478fd 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java @@ -68,6 +68,7 @@ public void transform(Map result, ResponseBuilder rb, SolrDocumentSou command.add("ngroups", totalGroupCount); } + @SuppressWarnings({"rawtypes"}) List groups = new ArrayList<>(); SchemaField groupField = searcher.getSchema().getField(entry.getKey()); FieldType groupFieldType = groupField.getType(); diff --git a/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java b/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java index 2c005acabd38..b24e3e459548 100644 --- a/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java +++ b/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.join.ToChildBlockJoinQuery; import org.apache.solr.common.params.SolrParams; import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.search.SolrConstantScoreQuery; import org.apache.solr.search.SyntaxError; public class BlockJoinChildQParser extends BlockJoinParentQParser { @@ -34,7 +33,7 @@ public BlockJoinChildQParser(String qstr, SolrParams localParams, SolrParams par @Override protected Query createQuery(Query parentListQuery, Query query, String scoreMode) { - return new ToChildBlockJoinQuery(query, getFilter(parentListQuery).getFilter()); + return new ToChildBlockJoinQuery(query, getBitSetProducer(parentListQuery)); } @Override @@ -49,8 +48,6 @@ protected Query noClausesQuery() throws SyntaxError { .add(new MatchAllDocsQuery(), Occur.MUST) .add(parents, Occur.MUST_NOT) .build(); - SolrConstantScoreQuery wrapped = new SolrConstantScoreQuery(getFilter(notParents)); - wrapped.setCache(false); - return wrapped; + return new BitSetProducerQuery(getBitSetProducer(notParents)); } } diff --git a/solr/core/src/java/org/apache/solr/search/join/BlockJoinParentQParser.java b/solr/core/src/java/org/apache/solr/search/join/BlockJoinParentQParser.java index 416c9f356ba3..151062f2d10b 100644 --- a/solr/core/src/java/org/apache/solr/search/join/BlockJoinParentQParser.java +++ b/solr/core/src/java/org/apache/solr/search/join/BlockJoinParentQParser.java @@ -20,22 +20,25 @@ import java.util.Objects; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.DocIdSet; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.QueryBitSetProducer; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ToParentBlockJoinQuery; -import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; -import org.apache.lucene.util.Bits; +import org.apache.lucene.util.BitSetIterator; import org.apache.solr.common.params.SolrParams; import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.search.BitsFilteredDocIdSet; -import org.apache.solr.search.Filter; +import org.apache.solr.search.ExtendedQueryBase; import org.apache.solr.search.QParser; import org.apache.solr.search.SolrCache; -import org.apache.solr.search.SolrConstantScoreQuery; import org.apache.solr.search.SyntaxError; public class BlockJoinParentQParser extends FiltersQParser { @@ -71,42 +74,26 @@ protected Query wrapSubordinateClause(Query subordinate) throws SyntaxError { @Override protected Query noClausesQuery() throws SyntaxError { - SolrConstantScoreQuery wrapped = new SolrConstantScoreQuery(getFilter(parseParentFilter())); - wrapped.setCache(false); - return wrapped; + return new BitSetProducerQuery(getBitSetProducer(parseParentFilter())); } protected Query createQuery(final Query parentList, Query query, String scoreMode) throws SyntaxError { - return new AllParentsAware(query, getFilter(parentList).filter, ScoreModeParser.parse(scoreMode), parentList); + return new AllParentsAware(query, getBitSetProducer(parentList), ScoreModeParser.parse(scoreMode), parentList); } - BitDocIdSetFilterWrapper getFilter(Query parentList) { - return getCachedFilter(req, parentList); + BitSetProducer getBitSetProducer(Query query) { + return getCachedBitSetProducer(req, query); } - public static BitDocIdSetFilterWrapper getCachedFilter(final SolrQueryRequest request, Query parentList) { + public static BitSetProducer getCachedBitSetProducer(final SolrQueryRequest request, Query query) { @SuppressWarnings("unchecked") - SolrCache parentCache = request.getSearcher().getCache(CACHE_NAME); + SolrCache parentCache = request.getSearcher().getCache(CACHE_NAME); // lazily retrieve from solr cache - BitDocIdSetFilterWrapper result; if (parentCache != null) { - Filter filter = parentCache.computeIfAbsent(parentList, - query -> new BitDocIdSetFilterWrapper(createParentFilter(query))); - if (filter instanceof BitDocIdSetFilterWrapper) { - result = (BitDocIdSetFilterWrapper) filter; - } else { - result = new BitDocIdSetFilterWrapper(createParentFilter(parentList)); - // non-atomic update of existing entry to ensure strong-typing - parentCache.put(parentList, result); - } + return parentCache.computeIfAbsent(query, QueryBitSetProducer::new); } else { - result = new BitDocIdSetFilterWrapper(createParentFilter(parentList)); + return new QueryBitSetProducer(query); } - return result; - } - - private static BitSetProducer createParentFilter(Query parentQ) { - return new QueryBitSetProducer(parentQ); } static final class AllParentsAware extends ToParentBlockJoinQuery { @@ -123,49 +110,55 @@ public Query getParentQuery(){ } } - // We need this wrapper since BitDocIdSetFilter does not extend Filter - public static class BitDocIdSetFilterWrapper extends Filter { + /** A constant score query based on a {@link BitSetProducer}. */ + static class BitSetProducerQuery extends ExtendedQueryBase { - private final BitSetProducer filter; + final BitSetProducer bitSetProducer; - BitDocIdSetFilterWrapper(BitSetProducer filter) { - this.filter = filter; + public BitSetProducerQuery(BitSetProducer bitSetProducer) { + this.bitSetProducer = bitSetProducer; + setCache(false); // because we assume the bitSetProducer is itself cached } @Override - public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException { - BitSet set = filter.getBitSet(context); - if (set == null) { - return null; - } - return BitsFilteredDocIdSet.wrap(new BitDocIdSet(set), acceptDocs); + public String toString(String field) { + return getClass().getSimpleName() + "(" + bitSetProducer + ")"; } - public BitSetProducer getFilter() { - return filter; + @Override + public boolean equals(Object other) { + return sameClassAs(other) && Objects.equals(bitSetProducer, getClass().cast(other).bitSetProducer); } @Override - public String toString(String field) { - return getClass().getSimpleName() + "(" + filter + ")"; + public int hashCode() { + return classHash() + bitSetProducer.hashCode(); } @Override - public boolean equals(Object other) { - return sameClassAs(other) && - Objects.equals(filter, getClass().cast(other).getFilter()); + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); } @Override - public int hashCode() { - return classHash() + filter.hashCode(); + public Weight createWeight(IndexSearcher searcher, org.apache.lucene.search.ScoreMode scoreMode, float boost) throws IOException { + return new ConstantScoreWeight(BitSetProducerQuery.this, boost) { + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + BitSet bitSet = bitSetProducer.getBitSet(context); + if (bitSet == null) { + return null; + } + DocIdSetIterator disi = new BitSetIterator(bitSet, bitSet.approximateCardinality()); + return new ConstantScoreScorer(this, boost, scoreMode, disi); + } + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return getCache(); + } + }; } } } - - - - - - diff --git a/solr/core/src/java/org/apache/solr/search/join/ChildFieldValueSourceParser.java b/solr/core/src/java/org/apache/solr/search/join/ChildFieldValueSourceParser.java index 768c7e3733c5..30be3a320897 100644 --- a/solr/core/src/java/org/apache/solr/search/join/ChildFieldValueSourceParser.java +++ b/solr/core/src/java/org/apache/solr/search/join/ChildFieldValueSourceParser.java @@ -180,8 +180,8 @@ public ValueSource parse(FunctionQParser fp) throws SyntaxError { } bjQ = (AllParentsAware) query; - parentFilter = BlockJoinParentQParser.getCachedFilter(fp.getReq(), bjQ.getParentQuery()).getFilter(); - childFilter = BlockJoinParentQParser.getCachedFilter(fp.getReq(), bjQ.getChildQuery()).getFilter(); + parentFilter = BlockJoinParentQParser.getCachedBitSetProducer(fp.getReq(), bjQ.getParentQuery()); + childFilter = BlockJoinParentQParser.getCachedBitSetProducer(fp.getReq(), bjQ.getChildQuery()); if (sortFieldName==null || sortFieldName.equals("")) { throw new SyntaxError ("field is omitted in "+fp.getString()); diff --git a/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java b/solr/core/src/java/org/apache/solr/search/join/GraphEdgeCollector.java similarity index 59% rename from solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java rename to solr/core/src/java/org/apache/solr/search/join/GraphEdgeCollector.java index 6ca02d35728a..02ed123f3a7e 100644 --- a/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java +++ b/solr/core/src/java/org/apache/solr/search/join/GraphEdgeCollector.java @@ -53,7 +53,7 @@ abstract class GraphEdgeCollector extends SimpleCollector implements Collector { // known leaf nodes DocSet leafNodes; - int numHits=0; // number of documents visited + int numHits = 0; // number of documents visited BitSet bits; // if not null, used to collect documents visited int base; @@ -74,8 +74,10 @@ public void setCollectDocs(FixedBitSet target) { } // the number of docs visited - public int getNumHits() { return numHits; } - + public int getNumHits() { + return numHits; + } + public void collect(int segDoc) throws IOException { int doc = segDoc + base; if (skipSet != null && skipSet.exists(doc)) { @@ -91,19 +93,19 @@ public void collect(int segDoc) throws IOException { // Optimization to not look up edges for a document that is a leaf node (i.e. has no outgoing edges) if (leafNodes == null || !leafNodes.exists(doc)) { addEdgeIdsToResult(segDoc); - } + } // Note: tracking links in for each result would be a huge memory hog... so not implementing at this time. } - + abstract void addEdgeIdsToResult(int doc) throws IOException; - + private void addDocToResult(int docWithBase) { // this document is part of the traversal. mark it in our bitmap. bits.set(docWithBase); // increment the hit count so we know how many docs we traversed this time. numHits++; } - + @Override public void doSetNextReader(LeafReaderContext context) throws IOException { base = context.docBase; @@ -115,87 +117,90 @@ public void doSetNextReader(LeafReaderContext context) throws IOException { public ScoreMode scoreMode() { return ScoreMode.COMPLETE_NO_SCORES; } - -} -class GraphTermsCollector extends GraphEdgeCollector { - // all the collected terms - private BytesRefHash collectorTerms; - private SortedSetDocValues docTermOrds; + static class GraphTermsCollector extends GraphEdgeCollector { + // all the collected terms + private BytesRefHash collectorTerms; + private SortedSetDocValues docTermOrds; - GraphTermsCollector(SchemaField collectField, DocSet skipSet, DocSet leafNodes) { - super(collectField, skipSet, leafNodes); - this.collectorTerms = new BytesRefHash(); - } - @Override - public void doSetNextReader(LeafReaderContext context) throws IOException { - super.doSetNextReader(context); - // Grab the updated doc values. - docTermOrds = DocValues.getSortedSet(context.reader(), collectField.getName()); - } + GraphTermsCollector(SchemaField collectField, DocSet skipSet, DocSet leafNodes) { + super(collectField, skipSet, leafNodes); + this.collectorTerms = new BytesRefHash(); + } - @Override - void addEdgeIdsToResult(int doc) throws IOException { - // set the doc to pull the edges ids for. - if (doc > docTermOrds.docID()) { - docTermOrds.advance(doc); + @Override + public void doSetNextReader(LeafReaderContext context) throws IOException { + super.doSetNextReader(context); + // Grab the updated doc values. + docTermOrds = DocValues.getSortedSet(context.reader(), collectField.getName()); } - if (doc == docTermOrds.docID()) { - BytesRef edgeValue = new BytesRef(); - long ord; - while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { - edgeValue = docTermOrds.lookupOrd(ord); - // add the edge id to the collector terms. - collectorTerms.add(edgeValue); + + @Override + void addEdgeIdsToResult(int doc) throws IOException { + // set the doc to pull the edges ids for. + if (doc > docTermOrds.docID()) { + docTermOrds.advance(doc); + } + if (doc == docTermOrds.docID()) { + BytesRef edgeValue = new BytesRef(); + long ord; + while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { + edgeValue = docTermOrds.lookupOrd(ord); + // add the edge id to the collector terms. + collectorTerms.add(edgeValue); + } } } - } - @Override - public Query getResultQuery(SchemaField matchField, boolean useAutomaton) { - if (collectorTerms == null || collectorTerms.size() == 0) { - // return null if there are no terms (edges) to traverse. - return null; - } else { - // Create a query - Query q = null; - - // TODO: see if we should dynamically select this based on the frontier size. - if (useAutomaton) { - // build an automaton based query for the frontier. - Automaton autn = buildAutomaton(collectorTerms); - AutomatonQuery autnQuery = new AutomatonQuery(new Term(matchField.getName()), autn); - q = autnQuery; + @Override + public Query getResultQuery(SchemaField matchField, boolean useAutomaton) { + if (collectorTerms == null || collectorTerms.size() == 0) { + // return null if there are no terms (edges) to traverse. + return null; } else { - List termList = new ArrayList<>(collectorTerms.size()); - for (int i = 0 ; i < collectorTerms.size(); i++) { - BytesRef ref = new BytesRef(); - collectorTerms.get(i, ref); - termList.add(ref); + // Create a query + Query q = null; + + // TODO: see if we should dynamically select this based on the frontier size. + if (useAutomaton) { + // build an automaton based query for the frontier. + Automaton autn = buildAutomaton(collectorTerms); + AutomatonQuery autnQuery = new AutomatonQuery(new Term(matchField.getName()), autn); + q = autnQuery; + } else { + List termList = new ArrayList<>(collectorTerms.size()); + for (int i = 0; i < collectorTerms.size(); i++) { + BytesRef ref = new BytesRef(); + collectorTerms.get(i, ref); + termList.add(ref); + } + q = (matchField.hasDocValues() && !matchField.indexed()) + ? new DocValuesTermsQuery(matchField.getName(), termList) + : new TermInSetQuery(matchField.getName(), termList); } - q = (matchField.hasDocValues() && !matchField.indexed()) - ? new DocValuesTermsQuery(matchField.getName(), termList) - : new TermInSetQuery(matchField.getName(), termList); - } - return q; + return q; + } } - } - /** Build an automaton to represent the frontier query */ - private Automaton buildAutomaton(BytesRefHash termBytesHash) { - // need top pass a sorted set of terms to the autn builder (maybe a better way to avoid this?) - final TreeSet terms = new TreeSet(); - for (int i = 0 ; i < termBytesHash.size(); i++) { - BytesRef ref = new BytesRef(); - termBytesHash.get(i, ref); - terms.add(ref); + /** + * Build an automaton to represent the frontier query + */ + private Automaton buildAutomaton(BytesRefHash termBytesHash) { + // need top pass a sorted set of terms to the autn builder (maybe a better way to avoid this?) + final TreeSet terms = new TreeSet(); + for (int i = 0; i < termBytesHash.size(); i++) { + BytesRef ref = new BytesRef(); + termBytesHash.get(i, ref); + terms.add(ref); + } + final Automaton a = DaciukMihovAutomatonBuilder.build(terms); + return a; } - final Automaton a = DaciukMihovAutomatonBuilder.build(terms); - return a; + } } diff --git a/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java b/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java index 5bec5997dc03..c25679b9b4cd 100644 --- a/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java +++ b/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java @@ -200,7 +200,7 @@ private DocSet getDocSet() throws IOException { // Create the graph result collector for this level GraphEdgeCollector graphResultCollector = collectSchemaField.getType().isPointField() ? new GraphPointsCollector(collectSchemaField, new BitDocSet(resultBits), leafNodes) - : new GraphTermsCollector(collectSchemaField, new BitDocSet(resultBits), leafNodes); + : new GraphEdgeCollector.GraphTermsCollector(collectSchemaField, new BitDocSet(resultBits), leafNodes); fromSet = new BitDocSet(new FixedBitSet(capacity)); graphResultCollector.setCollectDocs(fromSet.getBits()); diff --git a/solr/core/src/java/org/apache/solr/search/join/XCJFQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/join/XCJFQParserPlugin.java index d8323915f873..a7300e6b1129 100644 --- a/solr/core/src/java/org/apache/solr/search/join/XCJFQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/join/XCJFQParserPlugin.java @@ -45,11 +45,11 @@ public QParser createParser(String qstr, SolrParams localParams, SolrParams para } @Override - public void init(NamedList args) { + @SuppressWarnings({"unchecked"}) + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { routerField = (String) args.get("routerField"); solrUrlWhitelist = new HashSet<>(); if (args.get("solrUrl") != null) { - //noinspection unchecked for (String s : (List) args.get("solrUrl")) { if (!StringUtils.isEmpty(s)) solrUrlWhitelist.add(s); diff --git a/solr/core/src/java/org/apache/solr/search/join/XCJFQuery.java b/solr/core/src/java/org/apache/solr/search/join/XCJFQuery.java index f5c464b02fe0..fbe12e8b39dc 100644 --- a/solr/core/src/java/org/apache/solr/search/join/XCJFQuery.java +++ b/solr/core/src/java/org/apache/solr/search/join/XCJFQuery.java @@ -261,7 +261,7 @@ private TupleStream createSolrStream() { } private DocSet getDocSet() throws IOException { - SolrClientCache solrClientCache = new SolrClientCache(); + SolrClientCache solrClientCache = searcher.getCore().getCoreContainer().getSolrClientCache(); TupleStream solrStream; if (zkHost != null || solrUrl == null) { solrStream = createCloudSolrStream(solrClientCache); @@ -299,7 +299,6 @@ private DocSet getDocSet() throws IOException { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } finally { solrStream.close(); - solrClientCache.close(); } return collector.getDocSet(); diff --git a/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java b/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java index f7a79dc82d16..70ce4e4cb8f2 100644 --- a/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java +++ b/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java @@ -91,6 +91,7 @@ public Query parse() { String[] fieldNames; if (qf != null) { + @SuppressWarnings({"unchecked", "rawtypes"}) ArrayList fields = new ArrayList(); for (String fieldName : qf) { if (!StringUtils.isEmpty(fieldName)) { @@ -106,6 +107,7 @@ public Query parse() { boostFields = SolrPluginUtils.parseFieldBoosts(fields.toArray(new String[0])); fieldNames = boostFields.keySet().toArray(new String[0]); } else { + @SuppressWarnings({"unchecked", "rawtypes"}) ArrayList fields = new ArrayList(); for (String field : doc.getFieldNames()) { // Only use fields that are stored and have an explicit analyzer. @@ -128,6 +130,7 @@ public Query parse() { for (String field : fieldNames) { Collection fieldValues = doc.getFieldValues(field); if (fieldValues != null) { + @SuppressWarnings({"unchecked", "rawtypes"}) Collection values = new ArrayList(); for (Object val : fieldValues) { if (val instanceof IndexableField) { @@ -187,6 +190,7 @@ private SolrDocument getDocument(String id) { }; core.getRequestHandler("/get").handleRequest(request, rsp); + @SuppressWarnings({"rawtypes"}) NamedList response = rsp.getValues(); return (SolrDocument) response.get("doc"); diff --git a/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java b/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java index c57fb0000624..04648e0f9d0e 100644 --- a/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java +++ b/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java @@ -99,6 +99,7 @@ public Query parse() { fieldNames = boostFields.keySet().toArray(new String[0]); } else { Map fieldDefinitions = req.getSearcher().getSchema().getFields(); + @SuppressWarnings({"unchecked", "rawtypes"}) ArrayList fields = new ArrayList(); for (Map.Entry entry : fieldDefinitions.entrySet()) { if (entry.getValue().indexed() && entry.getValue().stored()) diff --git a/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java index 63690c321642..e0ffee224c2d 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java +++ b/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java @@ -70,7 +70,9 @@ public class ExactStatsCache extends StatsCache { @Override protected StatsSource doGet(SolrQueryRequest req) { + @SuppressWarnings({"unchecked"}) Map currentGlobalColStats = (Map) req.getContext().getOrDefault(CURRENT_GLOBAL_COL_STATS, Collections.emptyMap()); + @SuppressWarnings({"unchecked"}) Map currentGlobalTermStats = (Map) req.getContext().getOrDefault(CURRENT_GLOBAL_TERM_STATS, Collections.emptyMap()); if (log.isDebugEnabled()) { log.debug("Returning StatsSource. Collection stats={}, Term stats size= {}", currentGlobalColStats, currentGlobalTermStats.size()); @@ -127,12 +129,15 @@ protected void doMergeToGlobalStats(SolrQueryRequest req, List re } protected void addToPerShardColStats(SolrQueryRequest req, String shard, Map colStats) { + @SuppressWarnings({"unchecked"}) Map> perShardColStats = (Map>) req.getContext().computeIfAbsent(PER_SHARD_COL_STATS, Utils.NEW_HASHMAP_FUN); perShardColStats.put(shard, colStats); } protected void printStats(SolrQueryRequest req) { + @SuppressWarnings({"unchecked"}) Map> perShardTermStats = (Map>) req.getContext().getOrDefault(PER_SHARD_TERM_STATS, Collections.emptyMap()); + @SuppressWarnings({"unchecked"}) Map> perShardColStats = (Map>) req.getContext().getOrDefault(PER_SHARD_COL_STATS, Collections.emptyMap()); log.debug("perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats); } @@ -140,6 +145,7 @@ protected void printStats(SolrQueryRequest req) { protected void addToPerShardTermStats(SolrQueryRequest req, String shard, String termStatsString) { Map termStats = StatsUtil.termStatsMapFromString(termStatsString); if (termStats != null) { + @SuppressWarnings({"unchecked"}) Map> perShardTermStats = (Map>) req.getContext().computeIfAbsent(PER_SHARD_TERM_STATS, Utils.NEW_HASHMAP_FUN); perShardTermStats.put(shard, termStats); } @@ -275,11 +281,13 @@ protected void doSendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) { } protected Map getPerShardColStats(ResponseBuilder rb, String shard) { + @SuppressWarnings({"unchecked"}) Map> perShardColStats = (Map>) rb.req.getContext().getOrDefault(PER_SHARD_COL_STATS, Collections.emptyMap()); return perShardColStats.get(shard); } protected TermStats getPerShardTermStats(SolrQueryRequest req, String t, String shard) { + @SuppressWarnings({"unchecked"}) Map> perShardTermStats = (Map>) req.getContext().getOrDefault(PER_SHARD_TERM_STATS, Collections.emptyMap()); Map cache = perShardTermStats.get(shard); return (cache != null) ? cache.get(t) : null; //Term doesn't exist in shard @@ -309,11 +317,13 @@ protected void doReceiveGlobalStats(SolrQueryRequest req) { protected void addToGlobalColStats(SolrQueryRequest req, Entry e) { + @SuppressWarnings({"unchecked"}) Map currentGlobalColStats = (Map) req.getContext().computeIfAbsent(CURRENT_GLOBAL_COL_STATS, Utils.NEW_HASHMAP_FUN); currentGlobalColStats.put(e.getKey(), e.getValue()); } protected void addToGlobalTermStats(SolrQueryRequest req, Entry e) { + @SuppressWarnings({"unchecked"}) Map currentGlobalTermStats = (Map) req.getContext().computeIfAbsent(CURRENT_GLOBAL_TERM_STATS, Utils.NEW_HASHMAP_FUN); currentGlobalTermStats.put(e.getKey(), e.getValue()); } diff --git a/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java index 0760fa7584a3..7e94f5651268 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java +++ b/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java @@ -185,7 +185,9 @@ protected Map getPerShardColStats(ResponseBuilder rb, St protected void addToPerShardTermStats(SolrQueryRequest req, String shard, String termStatsString) { Map termStats = StatsUtil.termStatsMapFromString(termStatsString); if (termStats != null) { + @SuppressWarnings({"unchecked"}) SolrCache cache = perShardTermStats.computeIfAbsent(shard, s -> { + @SuppressWarnings({"rawtypes"}) CaffeineCache c = new CaffeineCache<>(); Map map = new HashMap<>(lruCacheInitArgs); map.put(CommonParams.NAME, s); diff --git a/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java b/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java index 68082a7c8405..acc00ee2201a 100644 --- a/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java @@ -99,6 +99,7 @@ public abstract class AuditLoggerPlugin implements Closeable, Runnable, SolrInfo * This method removes parameters from config object after consuming, so subclasses can check for config errors. * @param pluginConfig the config for the plugin */ + @SuppressWarnings({"unchecked"}) public void init(Map pluginConfig) { formatter = new JSONAuditEventFormatter(); if (pluginConfig.containsKey(PARAM_EVENT_TYPES)) { @@ -346,6 +347,7 @@ protected void waitForQueueToDrain(int timeoutSeconds) { /** * Set of rules for when audit logging should be muted. */ + @SuppressWarnings({"unchecked", "rawtypes"}) private class MuteRules { private List> rules; diff --git a/solr/core/src/java/org/apache/solr/security/AuthorizationContext.java b/solr/core/src/java/org/apache/solr/security/AuthorizationContext.java index 41236fffd45f..a3545dca2456 100644 --- a/solr/core/src/java/org/apache/solr/security/AuthorizationContext.java +++ b/solr/core/src/java/org/apache/solr/security/AuthorizationContext.java @@ -46,6 +46,7 @@ public String toString() { public abstract String getHttpHeader(String header); + @SuppressWarnings({"rawtypes"}) public abstract Enumeration getHeaderNames(); public abstract String getRemoteAddr(); diff --git a/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java b/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java index fc8b8771d6a9..a080d724b3d4 100644 --- a/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java +++ b/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java @@ -54,6 +54,7 @@ public Map edit(Map latestConf, CommandOperation }, SET_PERMISSION("set-permission") { @Override + @SuppressWarnings({"unchecked"}) public Map edit(Map latestConf, CommandOperation op) { Integer index = op.getInt("index", null); Integer beforeIdx = op.getInt("before",null); @@ -76,12 +77,15 @@ public Map edit(Map latestConf, CommandOperation return null; } if(op.hasError()) return null; + @SuppressWarnings({"rawtypes"}) List permissions = getListValue(latestConf, "permissions"); setIndex(permissions); + @SuppressWarnings({"rawtypes"}) List permissionsCopy = new ArrayList<>(); boolean beforeSatisfied = beforeIdx == null; boolean indexSatisfied = index == null; for (int i = 0; i < permissions.size(); i++) { + @SuppressWarnings({"rawtypes"}) Map perm = permissions.get(i); Integer thisIdx = (Integer) perm.get("index"); if (thisIdx.equals(beforeIdx)) { @@ -115,6 +119,7 @@ public Map edit(Map latestConf, CommandOperation }, UPDATE_PERMISSION("update-permission") { @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Map edit(Map latestConf, CommandOperation op) { Integer index = op.getInt("index"); if (op.hasError()) return null; @@ -134,11 +139,14 @@ public Map edit(Map latestConf, CommandOperation }, DELETE_PERMISSION("delete-permission") { @Override + @SuppressWarnings({"unchecked"}) public Map edit(Map latestConf, CommandOperation op) { Integer id = op.getInt(""); if(op.hasError()) return null; + @SuppressWarnings({"rawtypes"}) List p = getListValue(latestConf, "permissions"); setIndex(p); + @SuppressWarnings({"rawtypes"}) List c = p.stream().filter(map -> !id.equals(map.get("index"))).collect(Collectors.toList()); if(c.size() == p.size()){ op.addError("No such index :"+ id); @@ -166,6 +174,7 @@ public static AutorizationEditOperation get(String name) { return null; } + @SuppressWarnings({"unchecked", "rawtypes"}) static void setIndex(List permissionsCopy) { AtomicInteger counter = new AtomicInteger(0); permissionsCopy.stream().forEach(map -> map.put("index", counter.incrementAndGet())); diff --git a/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java index 81d9bec2c6ae..82405bb0fc7b 100644 --- a/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java @@ -187,7 +187,7 @@ public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse re * @return map of headers */ private Map getPromptHeaders(boolean isAjaxRequest) { - Map headers = new HashMap(authenticationProvider.getPromptHeaders()); + Map headers = new HashMap<>(authenticationProvider.getPromptHeaders()); if (isAjaxRequest && headers.containsKey(HttpHeaders.WWW_AUTHENTICATE) && headers.get(HttpHeaders.WWW_AUTHENTICATE).startsWith("Basic ")) { headers.put(HttpHeaders.WWW_AUTHENTICATE, "x" + headers.get(HttpHeaders.WWW_AUTHENTICATE)); diff --git a/solr/core/src/java/org/apache/solr/security/CertAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/CertAuthPlugin.java new file mode 100644 index 000000000000..765aa894f367 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/security/CertAuthPlugin.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.security; + +import org.apache.http.HttpHeaders; + +import javax.servlet.FilterChain; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.security.cert.X509Certificate; +import java.util.Map; + +/** + * An authentication plugin that sets principal based on the certificate subject + */ +public class CertAuthPlugin extends AuthenticationPlugin { + @Override + public void init(Map pluginConfig) { + + } + + @Override + public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws Exception { + X509Certificate[] certs = (X509Certificate[]) request.getAttribute("javax.servlet.request.X509Certificate"); + if (certs == null || certs.length == 0) { + numMissingCredentials.inc(); + response.setHeader(HttpHeaders.WWW_AUTHENTICATE, "Certificate"); + response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "require certificate"); + return false; + } + + HttpServletRequest wrapped = wrapWithPrincipal(request, certs[0].getSubjectX500Principal()); + numAuthenticated.inc(); + filterChain.doFilter(wrapped, response); + return true; + } +} diff --git a/solr/core/src/java/org/apache/solr/security/ExternalRoleRuleBasedAuthorizationPlugin.java b/solr/core/src/java/org/apache/solr/security/ExternalRoleRuleBasedAuthorizationPlugin.java new file mode 100644 index 000000000000..7575c167d9cc --- /dev/null +++ b/solr/core/src/java/org/apache/solr/security/ExternalRoleRuleBasedAuthorizationPlugin.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.security; + +import java.lang.invoke.MethodHandles; +import java.security.Principal; +import java.util.Collections; +import java.util.Map; +import java.util.Set; + +import org.apache.solr.common.SolrException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Rule Based Authz plugin implementation which reads user roles from the request. This requires + * a Principal implementing VerifiedUserRoles interface, e.g. JWTAuthenticationPlugin + */ +public class ExternalRoleRuleBasedAuthorizationPlugin extends RuleBasedAuthorizationPluginBase { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + @Override + public void init(Map initInfo) { + super.init(initInfo); + if (initInfo.containsKey("user-role")) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Configuration should not contain 'user-role' mappings"); + } + } + + /** + * Pulls roles from the Principal + * @param principal the user Principal which should contain roles + * @return set of roles as strings + */ + @Override + public Set getUserRoles(Principal principal) { + if(principal instanceof VerifiedUserRoles) { + return ((VerifiedUserRoles) principal).getVerifiedRoles(); + } else { + return Collections.emptySet(); + } + } +} diff --git a/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java index 36667f3272d7..469624112549 100644 --- a/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java @@ -230,12 +230,10 @@ public String getFilterName() { public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws Exception { if (TRACE_HTTP) { - log.info("----------HTTP Request---------{}"); + log.info("----------HTTP Request---------"); if (log.isInfoEnabled()) { log.info("{} : {}", request.getMethod(), request.getRequestURI()); - } - if (log.isInfoEnabled()) { - log.info("Query : {}", request.getQueryString()); + log.info("Query : {}", request.getQueryString()); // logOk } log.info("Headers :"); Enumeration headers = request.getHeaderNames(); diff --git a/solr/core/src/java/org/apache/solr/security/JWTAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/JWTAuthPlugin.java index 79875864668a..fb7b9db02c9e 100644 --- a/solr/core/src/java/org/apache/solr/security/JWTAuthPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/JWTAuthPlugin.java @@ -74,6 +74,7 @@ public class JWTAuthPlugin extends AuthenticationPlugin implements SpecProvider, private static final String PARAM_REQUIRE_SUBJECT = "requireSub"; private static final String PARAM_REQUIRE_ISSUER = "requireIss"; private static final String PARAM_PRINCIPAL_CLAIM = "principalClaim"; + private static final String PARAM_ROLES_CLAIM = "rolesClaim"; private static final String PARAM_REQUIRE_EXPIRATIONTIME = "requireExp"; private static final String PARAM_ALG_WHITELIST = "algWhitelist"; private static final String PARAM_JWK_CACHE_DURATION = "jwkCacheDur"; @@ -92,7 +93,7 @@ public class JWTAuthPlugin extends AuthenticationPlugin implements SpecProvider, private static final Set PROPS = ImmutableSet.of(PARAM_BLOCK_UNKNOWN, PARAM_REQUIRE_SUBJECT, PARAM_PRINCIPAL_CLAIM, PARAM_REQUIRE_EXPIRATIONTIME, PARAM_ALG_WHITELIST, - PARAM_JWK_CACHE_DURATION, PARAM_CLAIMS_MATCH, PARAM_SCOPE, PARAM_REALM, + PARAM_JWK_CACHE_DURATION, PARAM_CLAIMS_MATCH, PARAM_SCOPE, PARAM_REALM, PARAM_ROLES_CLAIM, PARAM_ADMINUI_SCOPE, PARAM_REDIRECT_URIS, PARAM_REQUIRE_ISSUER, PARAM_ISSUERS, // These keys are supported for now to enable PRIMARY issuer config through top-level keys JWTIssuerConfig.PARAM_JWK_URL, JWTIssuerConfig.PARAM_JWKS_URL, JWTIssuerConfig.PARAM_JWK, JWTIssuerConfig.PARAM_ISSUER, @@ -103,6 +104,7 @@ public class JWTAuthPlugin extends AuthenticationPlugin implements SpecProvider, private boolean requireExpirationTime; private List algWhitelist; private String principalClaim; + private String rolesClaim; private HashMap claimsMatchCompiled; private boolean blockUnknown; private List requiredScopes = new ArrayList<>(); @@ -140,6 +142,8 @@ public void init(Map pluginConfig) { PARAM_REQUIRE_SUBJECT); } principalClaim = (String) pluginConfig.getOrDefault(PARAM_PRINCIPAL_CLAIM, "sub"); + + rolesClaim = (String) pluginConfig.get(PARAM_ROLES_CLAIM); algWhitelist = (List) pluginConfig.get(PARAM_ALG_WHITELIST); realm = (String) pluginConfig.getOrDefault(PARAM_REALM, DEFAULT_AUTH_REALM); @@ -403,6 +407,8 @@ protected JWTAuthenticationResponse authenticate(String authorizationHeader) { // Fail if we require scopes but they don't exist return new JWTAuthenticationResponse(AuthCode.CLAIM_MISMATCH, "Claim " + CLAIM_SCOPE + " is required but does not exist in JWT"); } + + // Find scopes for user Set scopes = Collections.emptySet(); Object scopesObj = jwtClaims.getClaimValue(CLAIM_SCOPE); if (scopesObj != null) { @@ -417,10 +423,27 @@ protected JWTAuthenticationResponse authenticate(String authorizationHeader) { return new JWTAuthenticationResponse(AuthCode.SCOPE_MISSING, "Claim " + CLAIM_SCOPE + " does not contain any of the required scopes: " + requiredScopes); } } - final Set finalScopes = new HashSet<>(scopes); - finalScopes.remove("openid"); // Remove standard scope + } + + // Determine roles of user, either from 'rolesClaim' or from 'scope' as parsed above + final Set finalRoles = new HashSet<>(); + if (rolesClaim == null) { // Pass scopes with principal to signal to any Authorization plugins that user has some verified role claims - return new JWTAuthenticationResponse(AuthCode.AUTHENTICATED, new JWTPrincipalWithUserRoles(principal, jwtCompact, jwtClaims.getClaimsMap(), finalScopes)); + finalRoles.addAll(scopes); + finalRoles.remove("openid"); // Remove standard scope + } else { + // Pull roles from separate claim, either as whitespace separated list or as JSON array + Object rolesObj = jwtClaims.getClaimValue(rolesClaim); + if (rolesObj != null) { + if (rolesObj instanceof String) { + finalRoles.addAll(Arrays.asList(((String) rolesObj).split("\\s+"))); + } else if (rolesObj instanceof List) { + finalRoles.addAll(jwtClaims.getStringListClaimValue(rolesClaim)); + } + } + } + if (finalRoles.size() > 0) { + return new JWTAuthenticationResponse(AuthCode.AUTHENTICATED, new JWTPrincipalWithUserRoles(principal, jwtCompact, jwtClaims.getClaimsMap(), finalRoles)); } else { return new JWTAuthenticationResponse(AuthCode.AUTHENTICATED, new JWTPrincipal(principal, jwtCompact, jwtClaims.getClaimsMap())); } diff --git a/solr/core/src/java/org/apache/solr/security/JWTPrincipal.java b/solr/core/src/java/org/apache/solr/security/JWTPrincipal.java index 737f3fa8e4a8..810e49ce83c6 100644 --- a/solr/core/src/java/org/apache/solr/security/JWTPrincipal.java +++ b/solr/core/src/java/org/apache/solr/security/JWTPrincipal.java @@ -17,7 +17,6 @@ package org.apache.solr.security; -import java.io.Serializable; import java.security.Principal; import java.util.Map; import java.util.Objects; @@ -27,8 +26,7 @@ /** * Principal object that carries JWT token and claims for authenticated user. */ -public class JWTPrincipal implements Principal, Serializable { - private static final long serialVersionUID = 4144666467522831388L; +public class JWTPrincipal implements Principal { final String username; String token; Map claims; diff --git a/solr/core/src/java/org/apache/solr/security/KerberosFilter.java b/solr/core/src/java/org/apache/solr/security/KerberosFilter.java index 0937d699e3f0..6dd6c7fd6688 100644 --- a/solr/core/src/java/org/apache/solr/security/KerberosFilter.java +++ b/solr/core/src/java/org/apache/solr/security/KerberosFilter.java @@ -91,7 +91,7 @@ private HttpServletRequest substituteOriginalUserRequest(HttpServletRequest requ if (authzPlugin instanceof RuleBasedAuthorizationPlugin) { RuleBasedAuthorizationPlugin ruleBased = (RuleBasedAuthorizationPlugin) authzPlugin; if (request.getHeader(KerberosPlugin.ORIGINAL_USER_PRINCIPAL_HEADER) != null && - ruleBased.doesUserHavePermission(request.getUserPrincipal().getName(), PermissionNameProvider.Name.ALL)) { + ruleBased.doesUserHavePermission(request.getUserPrincipal(), PermissionNameProvider.Name.ALL)) { request = new HttpServletRequestWrapper(request) { @Override public Principal getUserPrincipal() { diff --git a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java index 9a8bda45e93e..7877643b43aa 100644 --- a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java @@ -36,6 +36,7 @@ import org.apache.http.HttpRequest; import org.apache.http.protocol.HttpContext; import org.apache.solr.client.solrj.impl.Http2SolrClient; +import org.apache.solr.client.solrj.impl.HttpListenerFactory; import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder; import org.apache.solr.client.solrj.impl.SolrHttpClientBuilder; import org.apache.solr.cloud.ZkController; @@ -45,6 +46,7 @@ import org.apache.solr.core.CoreContainer; import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.servlet.SolrDispatchFilter; +import org.eclipse.jetty.client.api.Request; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -96,9 +98,10 @@ public void init(Map pluginConfig) { } } + @VisibleForTesting protected FilterConfig getInitFilterConfig(Map pluginConfig, boolean skipKerberosChecking) { - Map params = new HashMap(); + Map params = new HashMap<>(); params.put("type", "kerberos"); putParam(params, "kerberos.name.rules", NAME_RULES_PARAM, "DEFAULT"); putParam(params, "token.valid", TOKEN_VALID_PARAM, "30"); @@ -159,7 +162,7 @@ protected FilterConfig getInitFilterConfig(Map pluginConfig, boo } // check impersonator config - for (Enumeration e = System.getProperties().propertyNames(); e.hasMoreElements();) { + for (@SuppressWarnings({"rawtypes"})Enumeration e = System.getProperties().propertyNames(); e.hasMoreElements();) { String key = e.nextElement().toString(); if (key.startsWith(IMPERSONATOR_PREFIX)) { if (!delegationTokenEnabled) { @@ -258,6 +261,22 @@ protected boolean interceptInternodeRequest(HttpRequest httpRequest, HttpContext return false; } + @Override + protected boolean interceptInternodeRequest(Request request) { + SolrRequestInfo info = SolrRequestInfo.getRequestInfo(); + if (info != null && (info.getAction() == SolrDispatchFilter.Action.FORWARD || + info.getAction() == SolrDispatchFilter.Action.REMOTEQUERY)) { + if (info.getUserPrincipal() != null) { + if (log.isInfoEnabled()) { + log.info("Setting original user principal: {}", info.getUserPrincipal().getName()); + } + request.header(ORIGINAL_USER_PRINCIPAL_HEADER, info.getUserPrincipal().getName()); + return true; + } + } + return false; + } + @Override public SolrHttpClientBuilder getHttpClientBuilder(SolrHttpClientBuilder builder) { return kerberosBuilder.getBuilder(builder); @@ -265,6 +284,14 @@ public SolrHttpClientBuilder getHttpClientBuilder(SolrHttpClientBuilder builder) @Override public void setup(Http2SolrClient client) { + final HttpListenerFactory.RequestResponseListener listener = new HttpListenerFactory.RequestResponseListener() { + @Override + public void onQueued(Request request) { + interceptInternodeRequest(request); + } + }; + client.addListenerFactory(() -> listener); + kerberosBuilder.setup(client); } diff --git a/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java b/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java index 66f8bdf10a3b..bdf298f9acc1 100644 --- a/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java @@ -195,6 +195,7 @@ PublicKey getRemotePublicKey(String nodename) { .execute(new HttpGet(uri), HttpClientUtil.createNewHttpClientRequestContext()); entity = rsp.getEntity(); byte[] bytes = EntityUtils.toByteArray(entity); + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSON(bytes); String key = (String) m.get("key"); if (key == null) { diff --git a/solr/core/src/java/org/apache/solr/security/Permission.java b/solr/core/src/java/org/apache/solr/security/Permission.java index 33ae8f72434d..5c47902c6af2 100644 --- a/solr/core/src/java/org/apache/solr/security/Permission.java +++ b/solr/core/src/java/org/apache/solr/security/Permission.java @@ -41,12 +41,14 @@ class Permission { Set path, role, collections, method; Map> params; PermissionNameProvider.Name wellknownName; + @SuppressWarnings({"rawtypes"}) Map originalConfig; private Permission() { } - static Permission load(Map m) { + @SuppressWarnings({"unchecked", "rawtypes"}) + static Permission load(@SuppressWarnings({"rawtypes"})Map m) { Permission p = new Permission(); p.originalConfig = new LinkedHashMap<>(m); String name = (String) m.get(NAME); @@ -106,7 +108,7 @@ static Permission load(Map m) { /** * This checks for the defaults available other rules for the keys */ - private static Set readSetSmart(String permissionName, Map m, String key) { + private static Set readSetSmart(String permissionName, @SuppressWarnings({"rawtypes"})Map m, String key) { if(PermissionNameProvider.values.containsKey(permissionName) && !m.containsKey(key) && "collection".equals(key)) { return PermissionNameProvider.Name.get(permissionName).collName; } @@ -126,7 +128,7 @@ private static Set readSetSmart(String permissionName, Map m, String key * @param m the map from which to lookup * @param key the key with which to do lookup */ - static Set readValueAsSet(Map m, String key) { + static Set readValueAsSet(@SuppressWarnings({"rawtypes"})Map m, String key) { Set result = new HashSet<>(); Object val = m.get(key); if (val == null) { @@ -138,6 +140,7 @@ static Set readValueAsSet(Map m, String key) { return null; } if (val instanceof Collection) { + @SuppressWarnings({"rawtypes"}) Collection list = (Collection) val; for (Object o : list) result.add(String.valueOf(o)); } else if (val instanceof String) { diff --git a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java index ef6ae263ec36..618a57241ac3 100644 --- a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java +++ b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java @@ -62,6 +62,7 @@ enum Name { final String name; final Set collName; + @SuppressWarnings({"unchecked"}) Name(String s, Object collName) { name = s; this.collName = collName instanceof Set? (Set)collName : singleton((String)collName); diff --git a/solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPlugin.java b/solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPlugin.java index eceb0a1e30a6..78af04010b40 100644 --- a/solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPlugin.java @@ -16,329 +16,46 @@ */ package org.apache.solr.security; -import java.io.IOException; import java.lang.invoke.MethodHandles; import java.security.Principal; -import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; -import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.Function; -import org.apache.solr.common.SpecProvider; -import org.apache.solr.common.util.Utils; -import org.apache.solr.common.util.ValidatingJsonMap; -import org.apache.solr.common.util.CommandOperation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Arrays.asList; -import static java.util.Collections.unmodifiableMap; -import static java.util.function.Function.identity; -import static java.util.stream.Collectors.toMap; -import static org.apache.solr.handler.admin.SecurityConfHandler.getListValue; import static org.apache.solr.handler.admin.SecurityConfHandler.getMapValue; - -public class RuleBasedAuthorizationPlugin implements AuthorizationPlugin, ConfigEditablePlugin, SpecProvider { +/** + * Original implementation of Rule Based Authz plugin which configures user/role + * mapping in the security.json configuration + */ +public class RuleBasedAuthorizationPlugin extends RuleBasedAuthorizationPluginBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final Map> usersVsRoles = new HashMap<>(); - private final Map mapping = new HashMap<>(); - private final List permissions = new ArrayList<>(); - - - private static class WildCardSupportMap extends HashMap> { - final Set wildcardPrefixes = new HashSet<>(); - - @Override - public List put(String key, List value) { - if (key != null && key.endsWith("/*")) { - key = key.substring(0, key.length() - 2); - wildcardPrefixes.add(key); - } - return super.put(key, value); - } - - @Override - public List get(Object key) { - List result = super.get(key); - if (key == null || result != null) return result; - if (!wildcardPrefixes.isEmpty()) { - for (String s : wildcardPrefixes) { - if (key.toString().startsWith(s)) { - List l = super.get(s); - if (l != null) { - result = result == null ? new ArrayList<>() : new ArrayList<>(result); - result.addAll(l); - } - } - } - } - return result; - } - } - - @Override - public AuthorizationResponse authorize(AuthorizationContext context) { - List collectionRequests = context.getCollectionRequests(); - if (log.isDebugEnabled()) { - log.debug("Attempting to authorize request to [{}] of type: [{}], associated with collections [{}]", - context.getResource(), context.getRequestType(), collectionRequests); - } - - if (context.getRequestType() == AuthorizationContext.RequestType.ADMIN) { - log.debug("Authorizing an ADMIN request, checking admin permissions"); - MatchStatus flag = checkCollPerm(mapping.get(null), context); - return flag.rsp; - } - - for (AuthorizationContext.CollectionRequest collreq : collectionRequests) { - //check permissions for each collection - log.debug("Authorizing collection-aware request, checking perms applicable to specific collection [{}]", - collreq.collectionName); - MatchStatus flag = checkCollPerm(mapping.get(collreq.collectionName), context); - if (flag != MatchStatus.NO_PERMISSIONS_FOUND) return flag.rsp; - } - - log.debug("Authorizing collection-aware request, checking perms applicable to all (*) collections"); - //check wildcard (all=*) permissions. - MatchStatus flag = checkCollPerm(mapping.get("*"), context); - return flag.rsp; - } - - private MatchStatus checkCollPerm(Map> pathVsPerms, - AuthorizationContext context) { - if (pathVsPerms == null) return MatchStatus.NO_PERMISSIONS_FOUND; - - if (log.isTraceEnabled()) { - log.trace("Following perms are associated with collection"); - for (String pathKey : pathVsPerms.keySet()) { - final List permsAssociatedWithPath = pathVsPerms.get(pathKey); - log.trace("Path: [{}], Perms: [{}]", pathKey, permsAssociatedWithPath); - } - } - - String path = context.getResource(); - MatchStatus flag = checkPathPerm(pathVsPerms.get(path), context); - if (flag != MatchStatus.NO_PERMISSIONS_FOUND) return flag; - return checkPathPerm(pathVsPerms.get(null), context); - } - - private MatchStatus checkPathPerm(List permissions, AuthorizationContext context) { - if (permissions == null || permissions.isEmpty()) { - return MatchStatus.NO_PERMISSIONS_FOUND; - } - Principal principal = context.getUserPrincipal(); - - log.trace("Following perms are associated with this collection and path: [{}]", permissions); - final Permission governingPermission = findFirstGoverningPermission(permissions, context); - if (governingPermission == null) { - if (log.isDebugEnabled()) { - log.debug("No perms configured for the resource {} . So allowed to access", context.getResource()); - } - return MatchStatus.NO_PERMISSIONS_FOUND; - } - if (log.isDebugEnabled()) { - log.debug("Found perm [{}] to govern resource [{}]", governingPermission, context.getResource()); - } - - return determineIfPermissionPermitsPrincipal(principal, governingPermission); - } - - private Permission findFirstGoverningPermission(List permissions, AuthorizationContext context) { - for (int i = 0; i < permissions.size(); i++) { - Permission permission = permissions.get(i); - if (permissionAppliesToRequest(permission, context)) return permission; - } - - return null; - } - - private boolean permissionAppliesToRequest(Permission permission, AuthorizationContext context) { - if (log.isTraceEnabled()) { - log.trace("Testing whether permission [{}] applies to request [{}]", permission, context.getResource()); - } - if (PermissionNameProvider.values.containsKey(permission.name)) { - return predefinedPermissionAppliesToRequest(permission, context); - } else { - return customPermissionAppliesToRequest(permission, context); - } - } - - private boolean predefinedPermissionAppliesToRequest(Permission predefinedPermission, AuthorizationContext context) { - log.trace("Permission [{}] is a predefined perm", predefinedPermission); - if (predefinedPermission.wellknownName == PermissionNameProvider.Name.ALL) { - log.trace("'ALL' perm applies to all requests; perm applies."); - return true; //'ALL' applies to everything! - } else if (! (context.getHandler() instanceof PermissionNameProvider)) { - if (log.isTraceEnabled()) { - log.trace("Request handler [{}] is not a PermissionNameProvider, perm doesnt apply", context.getHandler()); - } - return false; // We're not 'ALL', and the handler isn't associated with any other predefined permissions - } else { - PermissionNameProvider handler = (PermissionNameProvider) context.getHandler(); - PermissionNameProvider.Name permissionName = handler.getPermissionName(context); - - boolean applies = permissionName != null && predefinedPermission.name.equals(permissionName.name); - log.trace("Request handler [{}] is associated with predefined perm [{}]? {}", - handler, predefinedPermission.name, applies); - return applies; - } - } - - private boolean customPermissionAppliesToRequest(Permission customPermission, AuthorizationContext context) { - log.trace("Permission [{}] is a custom permission", customPermission); - if (customPermission.method != null && !customPermission.method.contains(context.getHttpMethod())) { - if (log.isTraceEnabled()) { - log.trace("Custom permission requires method [{}] but request had method [{}]; permission doesn't apply", - customPermission.method, context.getHttpMethod()); - } - //this permissions HTTP method does not match this rule. try other rules - return false; - } - if (customPermission.params != null) { - for (Map.Entry> e : customPermission.params.entrySet()) { - String[] paramVal = context.getParams().getParams(e.getKey()); - if(!e.getValue().apply(paramVal)) { - if (log.isTraceEnabled()) { - log.trace("Request has param [{}] which is incompatible with custom perm [{}]; perm doesnt apply", - e.getKey(), customPermission); - } - return false; - } - } - } - - log.trace("Perm [{}] matches method and params for request; permission applies", customPermission); - return true; - } - - private MatchStatus determineIfPermissionPermitsPrincipal(Principal principal, Permission governingPermission) { - if (governingPermission.role == null) { - log.debug("Governing permission [{}] has no role; permitting access", governingPermission); - return MatchStatus.PERMITTED; - } - if (principal == null) { - log.debug("Governing permission [{}] has role, but request principal cannot be identified; forbidding access", governingPermission); - return MatchStatus.USER_REQUIRED; - } else if (governingPermission.role.contains("*")) { - log.debug("Governing permission [{}] allows all roles; permitting access", governingPermission); - return MatchStatus.PERMITTED; - } - - Set userRoles = usersVsRoles.get(principal.getName()); - for (String role : governingPermission.role) { - if (userRoles != null && userRoles.contains(role)) { - log.debug("Governing permission [{}] allows access to role [{}]; permitting access", governingPermission, role); - return MatchStatus.PERMITTED; - } - } - log.info("This resource is configured to have a permission {}, The principal {} does not have the right role ", governingPermission, principal); - return MatchStatus.FORBIDDEN; - } - - public Set getRoles(String user) { - Set roles = usersVsRoles.get(user); - return roles; - } - - public boolean doesUserHavePermission(String user, PermissionNameProvider.Name permission) { - Set roles = usersVsRoles.get(user); - if (roles != null) { - for (String role: roles) { - if (mapping.get(null) == null) continue; - List permissions = mapping.get(null).get(null); - if (permissions != null) { - for (Permission p: permissions) { - if (permission.equals(p.wellknownName) && p.role.contains(role)) { - return true; - } - } - } - } - } - return false; - } @Override public void init(Map initInfo) { - mapping.put(null, new WildCardSupportMap()); + super.init(initInfo); Map map = getMapValue(initInfo, "user-role"); for (Object o : map.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry e = (Map.Entry) o; String roleName = (String) e.getKey(); usersVsRoles.put(roleName, Permission.readValueAsSet(map, roleName)); } - List perms = getListValue(initInfo, "permissions"); - for (Map o : perms) { - Permission p; - try { - p = Permission.load(o); - } catch (Exception exp) { - log.error("Invalid permission ", exp); - continue; - } - permissions.add(p); - add2Mapping(p); - } - } - - //this is to do optimized lookup of permissions for a given collection/path - private void add2Mapping(Permission permission) { - for (String c : permission.collections) { - WildCardSupportMap m = mapping.get(c); - if (m == null) mapping.put(c, m = new WildCardSupportMap()); - for (String path : permission.path) { - List perms = m.get(path); - if (perms == null) m.put(path, perms = new ArrayList<>()); - perms.add(permission); - } - } - } - - - @Override - public void close() throws IOException { } - - enum MatchStatus { - USER_REQUIRED(AuthorizationResponse.PROMPT), - NO_PERMISSIONS_FOUND(AuthorizationResponse.OK), - PERMITTED(AuthorizationResponse.OK), - FORBIDDEN(AuthorizationResponse.FORBIDDEN); - - final AuthorizationResponse rsp; - - MatchStatus(AuthorizationResponse rsp) { - this.rsp = rsp; - } } - - - @Override - public Map edit(Map latestConf, List commands) { - for (CommandOperation op : commands) { - AutorizationEditOperation operation = ops.get(op.name); - if (operation == null) { - op.unknownOperation(); - return null; - } - latestConf = operation.edit(latestConf, op); - if (latestConf == null) return null; - - } - return latestConf; - } - - private static final Map ops = unmodifiableMap(asList(AutorizationEditOperation.values()).stream().collect(toMap(AutorizationEditOperation::getOperationName, identity()))); - - + /** + * Look up user's role from the explicit user-role mapping + * + * @param principal the user Principal from the request + * @return set of roles as strings + */ @Override - public ValidatingJsonMap getSpec() { - return Utils.getSpec("cluster.security.RuleBasedAuthorization").getSpec(); - + public Set getUserRoles(Principal principal) { + return usersVsRoles.get(principal.getName()); } } diff --git a/solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPluginBase.java b/solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPluginBase.java new file mode 100644 index 000000000000..8a485f7b2459 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPluginBase.java @@ -0,0 +1,341 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.security; + +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.security.Principal; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; + +import org.apache.solr.common.SpecProvider; +import org.apache.solr.common.util.Utils; +import org.apache.solr.common.util.ValidatingJsonMap; +import org.apache.solr.common.util.CommandOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static java.util.Arrays.asList; +import static java.util.Collections.unmodifiableMap; +import static java.util.function.Function.identity; +import static java.util.stream.Collectors.toMap; +import static org.apache.solr.handler.admin.SecurityConfHandler.getListValue; + +/** + * Base class for rule based authorization plugins + */ +public abstract class RuleBasedAuthorizationPluginBase implements AuthorizationPlugin, ConfigEditablePlugin, SpecProvider { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private final Map mapping = new HashMap<>(); + private final List permissions = new ArrayList<>(); + + + private static class WildCardSupportMap extends HashMap> { + final Set wildcardPrefixes = new HashSet<>(); + + @Override + public List put(String key, List value) { + if (key != null && key.endsWith("/*")) { + key = key.substring(0, key.length() - 2); + wildcardPrefixes.add(key); + } + return super.put(key, value); + } + + @Override + public List get(Object key) { + List result = super.get(key); + if (key == null || result != null) return result; + if (!wildcardPrefixes.isEmpty()) { + for (String s : wildcardPrefixes) { + if (key.toString().startsWith(s)) { + List l = super.get(s); + if (l != null) { + result = result == null ? new ArrayList<>() : new ArrayList<>(result); + result.addAll(l); + } + } + } + } + return result; + } + } + + @Override + public AuthorizationResponse authorize(AuthorizationContext context) { + List collectionRequests = context.getCollectionRequests(); + if (log.isDebugEnabled()) { + log.debug("Attempting to authorize request to [{}] of type: [{}], associated with collections [{}]", + context.getResource(), context.getRequestType(), collectionRequests); + } + + if (context.getRequestType() == AuthorizationContext.RequestType.ADMIN) { + log.debug("Authorizing an ADMIN request, checking admin permissions"); + MatchStatus flag = checkCollPerm(mapping.get(null), context); + return flag.rsp; + } + + for (AuthorizationContext.CollectionRequest collreq : collectionRequests) { + //check permissions for each collection + log.debug("Authorizing collection-aware request, checking perms applicable to specific collection [{}]", + collreq.collectionName); + MatchStatus flag = checkCollPerm(mapping.get(collreq.collectionName), context); + if (flag != MatchStatus.NO_PERMISSIONS_FOUND) return flag.rsp; + } + + log.debug("Authorizing collection-aware request, checking perms applicable to all (*) collections"); + //check wildcard (all=*) permissions. + MatchStatus flag = checkCollPerm(mapping.get("*"), context); + return flag.rsp; + } + + private MatchStatus checkCollPerm(Map> pathVsPerms, + AuthorizationContext context) { + if (pathVsPerms == null) return MatchStatus.NO_PERMISSIONS_FOUND; + + if (log.isTraceEnabled()) { + log.trace("Following perms are associated with collection"); + for (String pathKey : pathVsPerms.keySet()) { + final List permsAssociatedWithPath = pathVsPerms.get(pathKey); + log.trace("Path: [{}], Perms: [{}]", pathKey, permsAssociatedWithPath); + } + } + + String path = context.getResource(); + MatchStatus flag = checkPathPerm(pathVsPerms.get(path), context); + if (flag != MatchStatus.NO_PERMISSIONS_FOUND) return flag; + return checkPathPerm(pathVsPerms.get(null), context); + } + + private MatchStatus checkPathPerm(List permissions, AuthorizationContext context) { + if (permissions == null || permissions.isEmpty()) { + return MatchStatus.NO_PERMISSIONS_FOUND; + } + Principal principal = context.getUserPrincipal(); + + log.trace("Following perms are associated with this collection and path: [{}]", permissions); + final Permission governingPermission = findFirstGoverningPermission(permissions, context); + if (governingPermission == null) { + if (log.isDebugEnabled()) { + log.debug("No perms configured for the resource {} . So allowed to access", context.getResource()); + } + return MatchStatus.NO_PERMISSIONS_FOUND; + } + if (log.isDebugEnabled()) { + log.debug("Found perm [{}] to govern resource [{}]", governingPermission, context.getResource()); + } + + return determineIfPermissionPermitsPrincipal(principal, governingPermission); + } + + private Permission findFirstGoverningPermission(List permissions, AuthorizationContext context) { + for (int i = 0; i < permissions.size(); i++) { + Permission permission = permissions.get(i); + if (permissionAppliesToRequest(permission, context)) return permission; + } + + return null; + } + + private boolean permissionAppliesToRequest(Permission permission, AuthorizationContext context) { + if (log.isTraceEnabled()) { + log.trace("Testing whether permission [{}] applies to request [{}]", permission, context.getResource()); + } + if (PermissionNameProvider.values.containsKey(permission.name)) { + return predefinedPermissionAppliesToRequest(permission, context); + } else { + return customPermissionAppliesToRequest(permission, context); + } + } + + private boolean predefinedPermissionAppliesToRequest(Permission predefinedPermission, AuthorizationContext context) { + log.trace("Permission [{}] is a predefined perm", predefinedPermission); + if (predefinedPermission.wellknownName == PermissionNameProvider.Name.ALL) { + log.trace("'ALL' perm applies to all requests; perm applies."); + return true; //'ALL' applies to everything! + } else if (! (context.getHandler() instanceof PermissionNameProvider)) { + if (log.isTraceEnabled()) { + log.trace("Request handler [{}] is not a PermissionNameProvider, perm doesnt apply", context.getHandler()); + } + return false; // We're not 'ALL', and the handler isn't associated with any other predefined permissions + } else { + PermissionNameProvider handler = (PermissionNameProvider) context.getHandler(); + PermissionNameProvider.Name permissionName = handler.getPermissionName(context); + + boolean applies = permissionName != null && predefinedPermission.name.equals(permissionName.name); + log.trace("Request handler [{}] is associated with predefined perm [{}]? {}", + handler, predefinedPermission.name, applies); + return applies; + } + } + + private boolean customPermissionAppliesToRequest(Permission customPermission, AuthorizationContext context) { + log.trace("Permission [{}] is a custom permission", customPermission); + if (customPermission.method != null && !customPermission.method.contains(context.getHttpMethod())) { + if (log.isTraceEnabled()) { + log.trace("Custom permission requires method [{}] but request had method [{}]; permission doesn't apply", + customPermission.method, context.getHttpMethod()); + } + //this permissions HTTP method does not match this rule. try other rules + return false; + } + if (customPermission.params != null) { + for (Map.Entry> e : customPermission.params.entrySet()) { + String[] paramVal = context.getParams().getParams(e.getKey()); + if(!e.getValue().apply(paramVal)) { + if (log.isTraceEnabled()) { + log.trace("Request has param [{}] which is incompatible with custom perm [{}]; perm doesnt apply", + e.getKey(), customPermission); + } + return false; + } + } + } + + log.trace("Perm [{}] matches method and params for request; permission applies", customPermission); + return true; + } + + private MatchStatus determineIfPermissionPermitsPrincipal(Principal principal, Permission governingPermission) { + if (governingPermission.role == null) { + log.debug("Governing permission [{}] has no role; permitting access", governingPermission); + return MatchStatus.PERMITTED; + } + if (principal == null) { + log.debug("Governing permission [{}] has role, but request principal cannot be identified; forbidding access", governingPermission); + return MatchStatus.USER_REQUIRED; + } else if (governingPermission.role.contains("*")) { + log.debug("Governing permission [{}] allows all roles; permitting access", governingPermission); + return MatchStatus.PERMITTED; + } + + Set userRoles = getUserRoles(principal); + for (String role : governingPermission.role) { + if (userRoles != null && userRoles.contains(role)) { + log.debug("Governing permission [{}] allows access to role [{}]; permitting access", governingPermission, role); + return MatchStatus.PERMITTED; + } + } + log.info("This resource is configured to have a permission {}, The principal {} does not have the right role ", governingPermission, principal); + return MatchStatus.FORBIDDEN; + } + + public boolean doesUserHavePermission(Principal principal, PermissionNameProvider.Name permission) { + Set roles = getUserRoles(principal); + if (roles != null) { + for (String role: roles) { + if (mapping.get(null) == null) continue; + List permissions = mapping.get(null).get(null); + if (permissions != null) { + for (Permission p: permissions) { + if (permission.equals(p.wellknownName) && p.role.contains(role)) { + return true; + } + } + } + } + } + return false; + } + + @Override + @SuppressWarnings({"unchecked"}) + public void init(@SuppressWarnings({"rawtypes"})Map initInfo) { + mapping.put(null, new WildCardSupportMap()); + @SuppressWarnings({"rawtypes"}) + List perms = getListValue(initInfo, "permissions"); + for (@SuppressWarnings({"rawtypes"})Map o : perms) { + Permission p; + try { + p = Permission.load(o); + } catch (Exception exp) { + log.error("Invalid permission ", exp); + continue; + } + permissions.add(p); + add2Mapping(p); + } + } + + //this is to do optimized lookup of permissions for a given collection/path + private void add2Mapping(Permission permission) { + for (String c : permission.collections) { + WildCardSupportMap m = mapping.get(c); + if (m == null) mapping.put(c, m = new WildCardSupportMap()); + for (String path : permission.path) { + List perms = m.get(path); + if (perms == null) m.put(path, perms = new ArrayList<>()); + perms.add(permission); + } + } + } + + /** + * Finds users roles + * @param principal the user Principal to fetch roles for + * @return set of roles as strings or empty set if no roles found + */ + public abstract Set getUserRoles(Principal principal); + + @Override + public void close() throws IOException { } + + enum MatchStatus { + USER_REQUIRED(AuthorizationResponse.PROMPT), + NO_PERMISSIONS_FOUND(AuthorizationResponse.OK), + PERMITTED(AuthorizationResponse.OK), + FORBIDDEN(AuthorizationResponse.FORBIDDEN); + + final AuthorizationResponse rsp; + + MatchStatus(AuthorizationResponse rsp) { + this.rsp = rsp; + } + } + + + + @Override + public Map edit(Map latestConf, List commands) { + for (CommandOperation op : commands) { + AutorizationEditOperation operation = ops.get(op.name); + if (operation == null) { + op.unknownOperation(); + return null; + } + latestConf = operation.edit(latestConf, op); + if (latestConf == null) return null; + + } + return latestConf; + } + + private static final Map ops = unmodifiableMap(asList(AutorizationEditOperation.values()).stream().collect(toMap(AutorizationEditOperation::getOperationName, identity()))); + + + @Override + public ValidatingJsonMap getSpec() { + return Utils.getSpec("cluster.security.RuleBasedAuthorization").getSpec(); + + } +} diff --git a/solr/core/src/java/org/apache/solr/security/Sha256AuthenticationProvider.java b/solr/core/src/java/org/apache/solr/security/Sha256AuthenticationProvider.java index 4db01eafea2f..a320ca2728b4 100644 --- a/solr/core/src/java/org/apache/solr/security/Sha256AuthenticationProvider.java +++ b/solr/core/src/java/org/apache/solr/security/Sha256AuthenticationProvider.java @@ -49,7 +49,8 @@ public class Sha256AuthenticationProvider implements ConfigEditablePlugin, Basi private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - static void putUser(String user, String pwd, Map credentials) { + @SuppressWarnings({"unchecked"}) + static void putUser(String user, String pwd, @SuppressWarnings({"rawtypes"})Map credentials) { if (user == null || pwd == null) return; String val = getSaltedHashedValue(pwd); credentials.put(user, val); @@ -74,6 +75,7 @@ public void init(Map pluginConfig) { promptHeader = Collections.unmodifiableMap(Collections.singletonMap("WWW-Authenticate", "Basic realm=\"" + realm + "\"")); credentials = new LinkedHashMap<>(); + @SuppressWarnings({"unchecked"}) Map users = (Map) pluginConfig.get("credentials"); if (users == null || users.isEmpty()) { throw new IllegalStateException("No users configured yet. At least one user must be configured in security.json"); @@ -129,6 +131,7 @@ public static String sha256(String password, String saltKey) { } @Override + @SuppressWarnings({"unchecked"}) public Map edit(Map latestConf, List commands) { for (CommandOperation cmd : commands) { if (!supported_ops.contains(cmd.name)) { @@ -138,6 +141,7 @@ public Map edit(Map latestConf, List names = cmd.getStrs(""); + @SuppressWarnings({"rawtypes"}) Map map = (Map) latestConf.get("credentials"); if (map == null || !map.keySet().containsAll(names)) { cmd.addError("No such user(s) " +names ); @@ -155,9 +159,12 @@ public Map edit(Map latestConf, List headers = (Map) getReq().getAttribute(AuthenticationPlugin.class.getName()); if (headers != null) { for (Map.Entry e : headers.entrySet()) response.setHeader(e.getKey(), e.getValue()); @@ -563,6 +566,7 @@ public Action call() throws IOException { return RETURN; case REMOTEQUERY: SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, new SolrQueryResponse(), action)); + mustClearSolrRequestInfo = true; remoteQuery(coreUrl + path, resp); return RETURN; case PROCESS: @@ -579,6 +583,7 @@ public Action call() throws IOException { * Content-Type) */ SolrRequestInfo.setRequestInfo(new SolrRequestInfo(solrReq, solrRsp, action)); + mustClearSolrRequestInfo = true; execute(solrRsp); if (shouldAudit()) { EventType eventType = solrRsp.getException() == null ? EventType.COMPLETED : EventType.ERROR; @@ -651,7 +656,9 @@ void destroy() { try { if (core != null) core.close(); } finally { - SolrRequestInfo.clearRequestInfo(); + if (mustClearSolrRequestInfo) { + SolrRequestInfo.clearRequestInfo(); + } } AuthenticationPlugin authcPlugin = cores.getAuthenticationPlugin(); if (authcPlugin != null) authcPlugin.closeRequest(); @@ -726,7 +733,7 @@ private void remoteQuery(String coreUrl, HttpServletResponse resp) throws IOExce if (httpEntity != null) { if (httpEntity.getContentEncoding() != null) - resp.setCharacterEncoding(httpEntity.getContentEncoding().getValue()); + resp.setHeader(httpEntity.getContentEncoding().getName(), httpEntity.getContentEncoding().getValue()); if (httpEntity.getContentType() != null) resp.setContentType(httpEntity.getContentType().getValue()); InputStream is = httpEntity.getContent(); @@ -775,6 +782,7 @@ protected void sendError(Throwable ex) throws IOException { } finally { try { if (exp != null) { + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap info = new SimpleOrderedMap(); int code = ResponseUtils.getErrorInfo(ex, info, log); sendError(code, info.toString()); @@ -885,6 +893,7 @@ private void writeResponse(SolrQueryResponse solrRsp, QueryResponseWriter respon if (null != ct) response.setContentType(ct); if (solrRsp.getException() != null) { + @SuppressWarnings({"rawtypes"}) NamedList info = new SimpleOrderedMap(); int code = ResponseUtils.getErrorInfo(solrRsp.getException(), info, log); solrRsp.add("error", info); @@ -1188,6 +1197,7 @@ public String getRemoteHost() { static final String CONTENT_LENGTH_HEADER = "Content-Length"; List parsedCommands; + @SuppressWarnings({"unchecked"}) public List getCommands(boolean validateInput) { if (parsedCommands == null) { Iterable contentStreams = solrReq.getContentStreams(); @@ -1202,6 +1212,7 @@ protected ValidatingJsonMap getSpec() { return null; } + @SuppressWarnings({"unchecked"}) protected Map getValidators(){ return Collections.EMPTY_MAP; } diff --git a/solr/core/src/java/org/apache/solr/servlet/LoadAdminUiServlet.java b/solr/core/src/java/org/apache/solr/servlet/LoadAdminUiServlet.java index 44763517b4b4..54d592435d61 100644 --- a/solr/core/src/java/org/apache/solr/servlet/LoadAdminUiServlet.java +++ b/solr/core/src/java/org/apache/solr/servlet/LoadAdminUiServlet.java @@ -15,6 +15,13 @@ * limitations under the License. */ package org.apache.solr.servlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.nio.charset.StandardCharsets; import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.CloseShieldOutputStream; @@ -24,15 +31,6 @@ import org.apache.solr.core.CoreContainer; import org.apache.solr.core.SolrCore; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStreamWriter; -import java.io.Writer; -import java.nio.charset.StandardCharsets; - /** * A simple servlet to load the Solr Admin UI * @@ -40,13 +38,20 @@ */ public final class LoadAdminUiServlet extends BaseSolrServlet { + // check system properties for whether or not admin UI is disabled, default is false + private static final boolean disabled = Boolean.parseBoolean(System.getProperty("disableAdminUI", "false")); + @Override - public void doGet(HttpServletRequest _request, - HttpServletResponse _response) - throws IOException { + public void doGet(HttpServletRequest _request, HttpServletResponse _response) throws IOException { + if(disabled){ + _response.sendError(404, "Solr Admin UI is disabled. To enable it, change the default value of SOLR_ADMIN_UI_" + + "ENABLED in bin/solr.in.sh or solr.in.cmd."); + return; + } HttpServletRequest request = SolrDispatchFilter.closeShield(_request, false); HttpServletResponse response = SolrDispatchFilter.closeShield(_response, false); - + + response.addHeader("X-Frame-Options", "DENY"); // security: SOLR-7966 - avoid clickjacking for admin interface // This attribute is set by the SolrDispatchFilter diff --git a/solr/core/src/java/org/apache/solr/servlet/ResponseUtils.java b/solr/core/src/java/org/apache/solr/servlet/ResponseUtils.java index c1ff02e1948c..8a5f2eb6ecab 100644 --- a/solr/core/src/java/org/apache/solr/servlet/ResponseUtils.java +++ b/solr/core/src/java/org/apache/solr/servlet/ResponseUtils.java @@ -38,7 +38,8 @@ private ResponseUtils() {} *

    * Status codes less than 100 are adjusted to be 500. */ - public static int getErrorInfo(Throwable ex, NamedList info, Logger log) { + @SuppressWarnings({"unchecked"}) + public static int getErrorInfo(Throwable ex, @SuppressWarnings({"rawtypes"})NamedList info, Logger log) { int code = 500; if (ex instanceof SolrException) { SolrException solrExc = (SolrException)ex; diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java index eb2f74a8f863..c18c7a12a7ad 100644 --- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java +++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java @@ -58,7 +58,6 @@ import io.opentracing.SpanContext; import io.opentracing.Tracer; import io.opentracing.tag.Tags; -import org.apache.commons.io.FileCleaningTracker; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpHeaders; import org.apache.http.client.HttpClient; @@ -79,11 +78,11 @@ import org.apache.solr.metrics.OperatingSystemMetricSet; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.security.AuditEvent; import org.apache.solr.security.AuthenticationPlugin; import org.apache.solr.security.PKIAuthenticationPlugin; import org.apache.solr.security.PublicKeyHandler; -import org.apache.solr.util.SolrFileCleaningTracker; import org.apache.solr.util.tracing.GlobalTracer; import org.apache.solr.util.StartupLoggingUtils; import org.apache.solr.util.configuration.SSLConfigurationsFactory; @@ -152,8 +151,6 @@ public void init(FilterConfig config) throws ServletException CoreContainer coresInit = null; try{ - SolrRequestParsers.fileCleaningTracker = new SolrFileCleaningTracker(); - StartupLoggingUtils.checkLogDir(); if (log.isInfoEnabled()) { log.info("Using logger factory {}", StartupLoggingUtils.getLoggerImplStr()); @@ -324,19 +321,6 @@ public void close() { CoreContainer cc = cores; cores = null; try { - try { - FileCleaningTracker fileCleaningTracker = SolrRequestParsers.fileCleaningTracker; - if (fileCleaningTracker != null) { - fileCleaningTracker.exitWhenFinished(); - } - } catch (NullPointerException e) { - // okay - } catch (Exception e) { - log.warn("Exception closing FileCleaningTracker", e); - } finally { - SolrRequestParsers.fileCleaningTracker = null; - } - if (metricManager != null) { try { metricManager.unregisterGauges(registryName, metricTag); @@ -456,6 +440,8 @@ public void doFilter(ServletRequest _request, ServletResponse _response, FilterC GlobalTracer.get().clearContext(); consumeInputFully(request, response); + SolrRequestInfo.reset(); + SolrRequestParsers.cleanupMultipartFiles(request); } } diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java b/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java index c8e886acf185..52696baba9eb 100644 --- a/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java +++ b/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java @@ -16,11 +16,14 @@ */ package org.apache.solr.servlet; +import javax.servlet.MultipartConfigElement; import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.Part; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.lang.invoke.MethodHandles; import java.net.URL; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; @@ -38,10 +41,6 @@ import java.util.List; import java.util.Map; -import org.apache.commons.fileupload.FileItem; -import org.apache.commons.fileupload.disk.DiskFileItemFactory; -import org.apache.commons.fileupload.servlet.ServletFileUpload; -import org.apache.commons.io.FileCleaningTracker; import org.apache.commons.io.input.CloseShieldInputStream; import org.apache.lucene.util.IOUtils; import org.apache.solr.api.V2HttpCall; @@ -60,14 +59,21 @@ import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.util.RTimerTree; -import org.apache.solr.util.SolrFileCleaningTracker; import org.apache.solr.util.tracing.GlobalTracer; +import org.eclipse.jetty.http.HttpFields; +import org.eclipse.jetty.http.MimeTypes; +import org.eclipse.jetty.server.MultiParts; +import org.eclipse.jetty.server.Request; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.solr.common.params.CommonParams.PATH; -public class SolrRequestParsers -{ +public class SolrRequestParsers { + + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + // Should these constants be in a more public place? public static final String MULTIPART = "multipart"; public static final String FORMDATA = "formdata"; @@ -92,9 +98,7 @@ public class SolrRequestParsers /** Default instance for e.g. admin requests. Limits to 2 MB uploads and does not allow remote streams. */ public static final SolrRequestParsers DEFAULT = new SolrRequestParsers(); - - public static volatile SolrFileCleaningTracker fileCleaningTracker; - + /** * Pass in an xml configuration. A null configuration will enable * everything with maximum values. @@ -167,7 +171,7 @@ public SolrQueryRequest parse( SolrCore core, String path, HttpServletRequest re ArrayList streams = new ArrayList<>(1); SolrParams params = parser.parseParamsAndFillStreams( req, streams ); if (GlobalTracer.get().tracing()) { - GlobalTracer.get().getTracer().activeSpan().setTag("params", params.toString()); + GlobalTracer.getTracer().activeSpan().setTag("params", params.toString()); } SolrQueryRequest sreq = buildRequestFrom(core, params, streams, getRequestTimer(req), req); @@ -534,29 +538,6 @@ public InputStream getStream() throws IOException { } } - - /** - * Wrap a FileItem as a ContentStream - */ - static class FileItemContentStream extends ContentStreamBase - { - private final FileItem item; - - public FileItemContentStream( FileItem f ) - { - item = f; - contentType = item.getContentType(); - name = item.getName(); - sourceInfo = item.getFieldName(); - size = item.getSize(); - } - - @Override - public InputStream getStream() throws IOException { - return item.getInputStream(); - } - } - /** * The raw parser just uses the params directly */ @@ -571,59 +552,99 @@ public SolrParams parseParamsAndFillStreams( } } - - /** * Extract Multipart streams */ static class MultipartRequestParser implements SolrRequestParser { - private final int uploadLimitKB; - private DiskFileItemFactory factory = new DiskFileItemFactory(); - - public MultipartRequestParser(int limit) { - uploadLimitKB = limit; + private final MultipartConfigElement multipartConfigElement; - // Set factory constraints - FileCleaningTracker fct = fileCleaningTracker; - if (fct != null) { - factory.setFileCleaningTracker(fileCleaningTracker); - } - // TODO - configure factory.setSizeThreshold(yourMaxMemorySize); - // TODO - configure factory.setRepository(yourTempDirectory); + public MultipartRequestParser(int uploadLimitKB) { + multipartConfigElement = new MultipartConfigElement( + null, // temp dir (null=default) + -1, // maxFileSize (-1=none) + uploadLimitKB * 1024, // maxRequestSize + 100 * 1024 ); // fileSizeThreshold after which will go to disk } @Override public SolrParams parseParamsAndFillStreams( final HttpServletRequest req, ArrayList streams) throws Exception { - if( !ServletFileUpload.isMultipartContent(req) ) { + if (!isMultipart(req)) { throw new SolrException( ErrorCode.BAD_REQUEST, "Not multipart content! "+req.getContentType() ); } - + // Magic way to tell Jetty dynamically we want multi-part processing. "Request" here is a Jetty class + req.setAttribute(Request.MULTIPART_CONFIG_ELEMENT, multipartConfigElement); + MultiMapSolrParams params = parseQueryString( req.getQueryString() ); - // Create a new file upload handler - ServletFileUpload upload = new ServletFileUpload(factory); - upload.setSizeMax( ((long) uploadLimitKB) * 1024L ); + // IMPORTANT: the Parts will all have the delete() method called by cleanupMultipartFiles() - // Parse the request - List items = upload.parseRequest(req); - for (FileItem item : items) { - // If it's a form field, put it in our parameter map - if (item.isFormField()) { + for (Part part : req.getParts()) { + if (part.getSubmittedFileName() == null) { // thus a form field and not file upload + // If it's a form field, put it in our parameter map + String partAsString = org.apache.commons.io.IOUtils.toString(new PartContentStream(part).getReader()); MultiMapSolrParams.addParam( - item.getFieldName().trim(), - item.getString(), params.getMap() ); - } - // Add the stream - else { - streams.add( new FileItemContentStream( item ) ); + part.getName().trim(), + partAsString, params.getMap() ); + } else { // file upload + streams.add(new PartContentStream(part)); } } return params; } + + boolean isMultipart(HttpServletRequest req) { + // Jetty utilities + return MimeTypes.Type.MULTIPART_FORM_DATA.is(HttpFields.valueParameters(req.getContentType(), null)); + } + + /** Wrap a MultiPart-{@link Part} as a {@link ContentStream} */ + static class PartContentStream extends ContentStreamBase { + private final Part part; + + public PartContentStream(Part part ) { + this.part = part; + contentType = part.getContentType(); + name = part.getName(); + sourceInfo = part.getSubmittedFileName(); + size = part.getSize(); + } + + @Override + public InputStream getStream() throws IOException { + return part.getInputStream(); + } + } } + /** Clean up any tmp files created by MultiPartInputStream. */ + static void cleanupMultipartFiles(HttpServletRequest request) { + // See Jetty MultiPartCleanerListener from which we drew inspiration + MultiParts multiParts = (MultiParts) request.getAttribute(Request.MULTIPARTS); + if (multiParts == null || multiParts.getContext() != request.getServletContext()) { + return; + } + + log.debug("Deleting multipart files"); + + Collection parts; + try { + parts = multiParts.getParts(); + } catch (IOException e) { + log.warn("Errors deleting multipart tmp files", e); + return; + } + + for (Part part : parts) { + try { + part.delete(); + } catch (IOException e) { + log.warn("Errors deleting multipart tmp files", e); + } + } + } + /** * Extract application/x-www-form-urlencoded form data for POST requests */ @@ -791,7 +812,7 @@ public SolrParams parseParamsAndFillStreams(final HttpServletRequest req, ArrayL return formdata.parseParamsAndFillStreams(req, streams, input); } - if (ServletFileUpload.isMultipartContent(req)) { + if (multipart.isMultipart(req)) { return multipart.parseParamsAndFillStreams(req, streams); } diff --git a/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java b/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java index 9fc31105882b..2368aa90b764 100644 --- a/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java +++ b/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java @@ -80,7 +80,8 @@ public abstract class AbstractLuceneSpellChecker extends SolrSpellChecker { protected StringDistance sd; @Override - public String init(NamedList config, SolrCore core) { + @SuppressWarnings({"unchecked"}) + public String init(@SuppressWarnings({"rawtypes"})NamedList config, SolrCore core) { super.init(config, core); indexDir = (String) config.get(INDEX_DIR); String accuracy = (String) config.get(ACCURACY); diff --git a/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java b/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java index 1b7ac8a9b391..95cf0ea7b50a 100644 --- a/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java +++ b/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java @@ -96,7 +96,8 @@ public class DirectSolrSpellChecker extends SolrSpellChecker { private DirectSpellChecker checker = new DirectSpellChecker(); @Override - public String init(NamedList config, SolrCore core) { + @SuppressWarnings({"unchecked"}) + public String init(@SuppressWarnings({"rawtypes"})NamedList config, SolrCore core) { SolrParams params = config.toSolrParams(); diff --git a/solr/core/src/java/org/apache/solr/spelling/FileBasedSpellChecker.java b/solr/core/src/java/org/apache/solr/spelling/FileBasedSpellChecker.java index b9559f577fd3..c22e7dccc76c 100644 --- a/solr/core/src/java/org/apache/solr/spelling/FileBasedSpellChecker.java +++ b/solr/core/src/java/org/apache/solr/spelling/FileBasedSpellChecker.java @@ -60,7 +60,7 @@ public class FileBasedSpellChecker extends AbstractLuceneSpellChecker { public static final String WORD_FIELD_NAME = "word"; @Override - public String init(NamedList config, SolrCore core) { + public String init(@SuppressWarnings({"rawtypes"})NamedList config, SolrCore core) { super.init(config, core); characterEncoding = (String) config.get(SOURCE_FILE_CHAR_ENCODING); return name; diff --git a/solr/core/src/java/org/apache/solr/spelling/IndexBasedSpellChecker.java b/solr/core/src/java/org/apache/solr/spelling/IndexBasedSpellChecker.java index aa6e0499eb42..d833dd069e96 100644 --- a/solr/core/src/java/org/apache/solr/spelling/IndexBasedSpellChecker.java +++ b/solr/core/src/java/org/apache/solr/spelling/IndexBasedSpellChecker.java @@ -48,7 +48,7 @@ public class IndexBasedSpellChecker extends AbstractLuceneSpellChecker { protected IndexReader reader; @Override - public String init(NamedList config, SolrCore core) { + public String init(@SuppressWarnings({"rawtypes"})NamedList config, SolrCore core) { super.init(config, core); threshold = config.get(THRESHOLD_TOKEN_FREQUENCY) == null ? 0.0f : (Float) config.get(THRESHOLD_TOKEN_FREQUENCY); diff --git a/solr/core/src/java/org/apache/solr/spelling/QueryConverter.java b/solr/core/src/java/org/apache/solr/spelling/QueryConverter.java index 3c3a42be2dd5..72fad4b95b3b 100644 --- a/solr/core/src/java/org/apache/solr/spelling/QueryConverter.java +++ b/solr/core/src/java/org/apache/solr/spelling/QueryConverter.java @@ -45,6 +45,7 @@ * @since solr 1.3 */ public abstract class QueryConverter implements NamedListInitializedPlugin { + @SuppressWarnings({"rawtypes"}) private NamedList args; protected Analyzer analyzer; @@ -75,7 +76,7 @@ public abstract class QueryConverter implements NamedListInitializedPlugin { */ public static final int TERM_IN_BOOLEAN_QUERY_FLAG = 131072; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { this.args = args; } diff --git a/solr/core/src/java/org/apache/solr/spelling/SolrSpellChecker.java b/solr/core/src/java/org/apache/solr/spelling/SolrSpellChecker.java index 5543867e03fd..e513ad15a71f 100644 --- a/solr/core/src/java/org/apache/solr/spelling/SolrSpellChecker.java +++ b/solr/core/src/java/org/apache/solr/spelling/SolrSpellChecker.java @@ -56,7 +56,7 @@ public abstract class SolrSpellChecker { protected String field; protected String fieldTypeName; - public String init(NamedList config, SolrCore core) { + public String init(@SuppressWarnings({"rawtypes"})NamedList config, SolrCore core) { name = (String) config.get(DICTIONARY_NAME); if (name == null) { name = DEFAULT_DICTIONARY_NAME; diff --git a/solr/core/src/java/org/apache/solr/spelling/WordBreakSolrSpellChecker.java b/solr/core/src/java/org/apache/solr/spelling/WordBreakSolrSpellChecker.java index f96233fd551d..0a759ad12d3a 100644 --- a/solr/core/src/java/org/apache/solr/spelling/WordBreakSolrSpellChecker.java +++ b/solr/core/src/java/org/apache/solr/spelling/WordBreakSolrSpellChecker.java @@ -117,7 +117,7 @@ public enum BreakSuggestionTieBreaker { private static final Pattern spacePattern = Pattern.compile("\\s+"); @Override - public String init(@SuppressWarnings("unchecked") NamedList config, + public String init(@SuppressWarnings("rawtypes") NamedList config, SolrCore core) { String name = super.init(config, core); combineWords = boolParam(config, PARAM_COMBINE_WORDS); @@ -160,13 +160,13 @@ public String init(@SuppressWarnings("unchecked") NamedList config, return name; } - private String strParam(@SuppressWarnings("unchecked") NamedList config, + private String strParam(@SuppressWarnings("rawtypes") NamedList config, String paramName) { Object o = config.get(paramName); return o == null ? null : o.toString(); } - private boolean boolParam(@SuppressWarnings("unchecked") NamedList config, + private boolean boolParam(@SuppressWarnings("rawtypes") NamedList config, String paramName) { String s = strParam(config, paramName); if ("true".equalsIgnoreCase(s) || "on".equalsIgnoreCase(s)) { @@ -175,7 +175,7 @@ private boolean boolParam(@SuppressWarnings("unchecked") NamedList config, return false; } - private int intParam(@SuppressWarnings("unchecked") NamedList config, + private int intParam(@SuppressWarnings("rawtypes") NamedList config, String paramName) { Object o = config.get(paramName); if (o == null) { diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/DictionaryFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/DictionaryFactory.java index 1bb6e05b18f0..cae856002315 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/DictionaryFactory.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/DictionaryFactory.java @@ -31,10 +31,11 @@ public abstract class DictionaryFactory { /** Default dictionary implementation to use for IndexBasedDictionaries */ public static String DEFAULT_INDEX_BASED_DICT = HighFrequencyDictionaryFactory.class.getName(); + @SuppressWarnings({"rawtypes"}) protected NamedList params; /** Sets the parameters available to SolrSuggester for use in Dictionary creation */ - public void setParams(NamedList params) { + public void setParams(@SuppressWarnings({"rawtypes"})NamedList params) { this.params = params; } diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/LookupFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/LookupFactory.java index f6d0312f9cfe..b54162bbd5f5 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/LookupFactory.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/LookupFactory.java @@ -37,7 +37,7 @@ public abstract class LookupFactory { * Create a Lookup using config options in params and * current core */ - public abstract Lookup create(NamedList params, SolrCore core); + public abstract Lookup create(@SuppressWarnings({"rawtypes"})NamedList params, SolrCore core); /** *

    Returns the filename in which the in-memory data structure is stored

    diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java b/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java index 984436a2074d..d31cf21f669c 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java @@ -99,6 +99,7 @@ public class SolrSuggester implements Accountable { * Uses the config and the core to initialize the underlying * Lucene suggester * */ + @SuppressWarnings({"unchecked"}) public String init(NamedList config, SolrCore core) { log.info("init: {}", config); diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java b/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java index 8b26690e4ffb..c5cd82fbc64a 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java @@ -85,7 +85,7 @@ public class Suggester extends SolrSpellChecker { private LookupFactory factory; @Override - public String init(NamedList config, SolrCore core) { + public String init(@SuppressWarnings({"rawtypes"})NamedList config, SolrCore core) { log.info("init: {}", config); String name = super.init(config, core); threshold = config.get(THRESHOLD_TOKEN_FREQUENCY) == null ? 0.0f diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/AnalyzingInfixLookupFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/AnalyzingInfixLookupFactory.java index 68bfaf64de31..1ce2206ba1b2 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/AnalyzingInfixLookupFactory.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/AnalyzingInfixLookupFactory.java @@ -82,7 +82,7 @@ public class AnalyzingInfixLookupFactory extends LookupFactory { @Override - public Lookup create(NamedList params, SolrCore core) { + public Lookup create(@SuppressWarnings({"rawtypes"})NamedList params, SolrCore core) { // mandatory parameter Object fieldTypeName = params.get(QUERY_ANALYZER); if (fieldTypeName == null) { diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/AnalyzingLookupFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/AnalyzingLookupFactory.java index eb3ab5fdc489..b963cff24861 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/AnalyzingLookupFactory.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/AnalyzingLookupFactory.java @@ -75,7 +75,7 @@ public class AnalyzingLookupFactory extends LookupFactory { private static final String FILENAME = "wfsta.bin"; @Override - public Lookup create(NamedList params, SolrCore core) { + public Lookup create(@SuppressWarnings({"rawtypes"})NamedList params, SolrCore core) { // mandatory parameter Object fieldTypeName = params.get(QUERY_ANALYZER); if (fieldTypeName == null) { diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/BlendedInfixLookupFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/BlendedInfixLookupFactory.java index 32a0ff0545d3..4e27197e913f 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/BlendedInfixLookupFactory.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/BlendedInfixLookupFactory.java @@ -70,7 +70,7 @@ public class BlendedInfixLookupFactory extends AnalyzingInfixLookupFactory { @Override - public Lookup create(NamedList params, SolrCore core) { + public Lookup create(@SuppressWarnings({"rawtypes"})NamedList params, SolrCore core) { // mandatory parameter Object fieldTypeName = params.get(QUERY_ANALYZER); if (fieldTypeName == null) { diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FSTLookupFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FSTLookupFactory.java index ff8d11942f86..7b4d4d859733 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FSTLookupFactory.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FSTLookupFactory.java @@ -50,7 +50,7 @@ public class FSTLookupFactory extends LookupFactory { public static final String EXACT_MATCH_FIRST = "exactMatchFirst"; @Override - public Lookup create(NamedList params, SolrCore core) { + public Lookup create(@SuppressWarnings({"rawtypes"})NamedList params, SolrCore core) { int buckets = params.get(WEIGHT_BUCKETS) != null ? Integer.parseInt(params.get(WEIGHT_BUCKETS).toString()) : 10; diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FreeTextLookupFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FreeTextLookupFactory.java index dfd9d66903ec..15bbbae7a990 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FreeTextLookupFactory.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FreeTextLookupFactory.java @@ -53,7 +53,7 @@ public class FreeTextLookupFactory extends LookupFactory { @Override - public Lookup create(NamedList params, SolrCore core) { + public Lookup create(@SuppressWarnings({"rawtypes"})NamedList params, SolrCore core) { Object fieldTypeName = params.get(QUERY_ANALYZER); if (fieldTypeName == null) { throw new IllegalArgumentException("Error in configuration: " + QUERY_ANALYZER + " parameter is mandatory"); diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FuzzyLookupFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FuzzyLookupFactory.java index 236bc7b662da..0fb3dbcb191c 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FuzzyLookupFactory.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/FuzzyLookupFactory.java @@ -68,7 +68,7 @@ public class FuzzyLookupFactory extends LookupFactory { private static final String FILENAME = "fwfsta.bin"; @Override - public Lookup create(NamedList params, SolrCore core) { + public Lookup create(@SuppressWarnings({"rawtypes"})NamedList params, SolrCore core) { // mandatory parameter Object fieldTypeName = params.get(AnalyzingLookupFactory.QUERY_ANALYZER); diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/WFSTLookupFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/WFSTLookupFactory.java index 4eca516ff1c7..b9b397eec2da 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/fst/WFSTLookupFactory.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/fst/WFSTLookupFactory.java @@ -40,7 +40,7 @@ public class WFSTLookupFactory extends LookupFactory { private static final String FILENAME = "wfst.bin"; @Override - public Lookup create(NamedList params, SolrCore core) { + public Lookup create(@SuppressWarnings({"rawtypes"})NamedList params, SolrCore core) { boolean exactMatchFirst = params.get(EXACT_MATCH_FIRST) != null ? Boolean.valueOf(params.get(EXACT_MATCH_FIRST).toString()) : true; diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/jaspell/JaspellLookupFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/jaspell/JaspellLookupFactory.java index e199376427d9..205760d0b14e 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/jaspell/JaspellLookupFactory.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/jaspell/JaspellLookupFactory.java @@ -35,7 +35,7 @@ public class JaspellLookupFactory extends LookupFactory { private static final String FILENAME = "jaspell.dat"; @Override - public Lookup create(NamedList params, SolrCore core) { + public Lookup create(@SuppressWarnings({"rawtypes"})NamedList params, SolrCore core) { log.info("init: {}", params); return new JaspellLookup(); } diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/tst/TSTLookupFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/tst/TSTLookupFactory.java index 22d350397c4c..c6bbc9352791 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/tst/TSTLookupFactory.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/tst/TSTLookupFactory.java @@ -29,7 +29,7 @@ public class TSTLookupFactory extends LookupFactory { private static final String FILENAME = "tst.dat"; @Override - public Lookup create(NamedList params, SolrCore core) { + public Lookup create(@SuppressWarnings({"rawtypes"})NamedList params, SolrCore core) { return new TSTLookup(getTempDir(), "suggester"); } diff --git a/solr/core/src/java/org/apache/solr/store/blockcache/BlockDirectory.java b/solr/core/src/java/org/apache/solr/store/blockcache/BlockDirectory.java index f9f1f659c9c5..fe19f9269a5d 100644 --- a/solr/core/src/java/org/apache/solr/store/blockcache/BlockDirectory.java +++ b/solr/core/src/java/org/apache/solr/store/blockcache/BlockDirectory.java @@ -173,7 +173,7 @@ public CachedIndexInput(IndexInput source, int blockSize, String name, @Override public IndexInput clone() { CachedIndexInput clone = (CachedIndexInput) super.clone(); - clone.source = (IndexInput) source.clone(); + clone.source = source.clone(); return clone; } diff --git a/solr/core/src/java/org/apache/solr/update/AddUpdateCommand.java b/solr/core/src/java/org/apache/solr/update/AddUpdateCommand.java index a56427ded5ce..1e4384df340e 100644 --- a/solr/core/src/java/org/apache/solr/update/AddUpdateCommand.java +++ b/solr/core/src/java/org/apache/solr/update/AddUpdateCommand.java @@ -255,6 +255,7 @@ private void flattenLabelled(List unwrappedDocs, SolrInputDoc flattenLabelled(unwrappedDocs, ((SolrInputDocument) val)); continue; } + @SuppressWarnings({"unchecked"}) Collection childrenList = ((Collection) val); for (SolrInputDocument child : childrenList) { flattenLabelled(unwrappedDocs, child); diff --git a/solr/core/src/java/org/apache/solr/update/CdcrUpdateLog.java b/solr/core/src/java/org/apache/solr/update/CdcrUpdateLog.java index 19f19478c620..eee312725caf 100644 --- a/solr/core/src/java/org/apache/solr/update/CdcrUpdateLog.java +++ b/solr/core/src/java/org/apache/solr/update/CdcrUpdateLog.java @@ -202,6 +202,7 @@ public void deleteByQuery(DeleteUpdateCommand cmd) { * Creates a new {@link org.apache.solr.update.CdcrUpdateLog.CdcrLogReader} * initialised with the current list of tlogs. */ + @SuppressWarnings({"unchecked", "rawtypes"}) public CdcrLogReader newLogReader() { return new CdcrLogReader(new ArrayList(logs), tlog); } @@ -362,6 +363,7 @@ public void initForRecovery(File bufferedTlog, long offset) { // populate recent deleteByQuery commands for (int i=startingUpdates.deleteByQueryList.size()-1; i>=0; i--) { Update update = startingUpdates.deleteByQueryList.get(i); + @SuppressWarnings({"unchecked"}) List dbq = (List) update.log.lookup(update.pointer); long version = (Long) dbq.get(1); String q = (String) dbq.get(2); @@ -402,6 +404,7 @@ private void copyBufferedUpdates(File tlogSrc, long offsetSrc, long latestVersio Object o = tlogReader.next(); if (o == null) break; // we reached the end of the tlog // should currently be a List + @SuppressWarnings({"rawtypes"}) List entry = (List) o; operationAndFlags = (Integer) entry.get(0); int oper = operationAndFlags & OPERATION_MASK; @@ -729,6 +732,7 @@ private boolean seekTLog(long targetVersion) { * Extracts the version number and converts it to its absolute form. */ private long getVersion(Object o) { + @SuppressWarnings({"rawtypes"}) List entry = (List) o; // version is negative for delete, ensure that we are manipulating absolute version numbers return Math.abs((Long) entry.get(1)); diff --git a/solr/core/src/java/org/apache/solr/update/CommitTracker.java b/solr/core/src/java/org/apache/solr/update/CommitTracker.java index cf8dee6a53c9..0cf62116c3a1 100644 --- a/solr/core/src/java/org/apache/solr/update/CommitTracker.java +++ b/solr/core/src/java/org/apache/solr/update/CommitTracker.java @@ -29,6 +29,7 @@ import org.apache.solr.common.SolrException; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.core.SolrCore; +import org.apache.solr.logging.MDCLoggingContext; import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.common.util.SolrNamedThreadFactory; @@ -57,9 +58,12 @@ public final class CommitTracker implements Runnable { private int docsUpperBound; private long timeUpperBound; private long tLogFileSizeUpperBound; - - private final ScheduledExecutorService scheduler = + + // note: can't use ExecutorsUtil because it doesn't have a *scheduled* ExecutorService. + // Not a big deal but it means we must take care of MDC logging here. + private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1, new SolrNamedThreadFactory("commitScheduler")); + @SuppressWarnings({"rawtypes"}) private ScheduledFuture pending; // state @@ -248,9 +252,8 @@ public void run() { pending = null; // allow a new commit to be scheduled } - SolrQueryRequest req = new LocalSolrQueryRequest(core, - new ModifiableSolrParams()); - try { + MDCLoggingContext.setCore(core); + try (SolrQueryRequest req = new LocalSolrQueryRequest(core, new ModifiableSolrParams())) { CommitUpdateCommand command = new CommitUpdateCommand(req, false); command.openSearcher = openSearcher; command.waitSearcher = WAIT_SEARCHER; @@ -271,9 +274,9 @@ public void run() { } catch (Exception e) { SolrException.log(log, "auto commit error...", e); } finally { - // log.info("###done committing"); - req.close(); + MDCLoggingContext.clear(); } + // log.info("###done committing"); } // to facilitate testing: blocks if called during commit diff --git a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java index a3ef4e757633..53dcb3e0bccc 100644 --- a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java +++ b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java @@ -84,6 +84,7 @@ public final class DefaultSolrCoreState extends SolrCoreState implements Recover private volatile Future cdcrBootstrapFuture; + @SuppressWarnings({"rawtypes"}) private volatile Callable cdcrBootstrapCallable; @Deprecated @@ -448,12 +449,13 @@ public void setCdcrBootstrapFuture(Future cdcrBootstrapFuture) { } @Override + @SuppressWarnings({"rawtypes"}) public Callable getCdcrBootstrapCallable() { return cdcrBootstrapCallable; } @Override - public void setCdcrBootstrapCallable(Callable cdcrBootstrapCallable) { + public void setCdcrBootstrapCallable(@SuppressWarnings({"rawtypes"})Callable cdcrBootstrapCallable) { this.cdcrBootstrapCallable = cdcrBootstrapCallable; } } diff --git a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java index 1f3dc86f1339..523a35d4a6c6 100644 --- a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java +++ b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java @@ -587,7 +587,7 @@ public void prepareCommit(CommitUpdateCommand cmd) throws IOException { boolean error=true; try { - log.info("start {}", cmd); + log.debug("start {}", cmd); RefCounted iw = solrCoreState.getIndexWriter(core); try { SolrIndexWriter.setCommitData(iw.get(), cmd.getVersion()); @@ -596,7 +596,7 @@ public void prepareCommit(CommitUpdateCommand cmd) throws IOException { iw.decref(); } - log.info("end_prepareCommit"); + log.debug("end_prepareCommit"); error=false; } @@ -609,6 +609,7 @@ public void prepareCommit(CommitUpdateCommand cmd) throws IOException { } @Override + @SuppressWarnings({"rawtypes"}) public void commit(CommitUpdateCommand cmd) throws IOException { TestInjection.injectDirectUpdateLatch(); if (cmd.prepareCommit) { @@ -635,7 +636,7 @@ public void commit(CommitUpdateCommand cmd) throws IOException { solrCoreState.getCommitLock().lock(); } - log.info("start {}", cmd); + log.debug("start {}", cmd); // We must cancel pending commits *before* we actually execute the commit. @@ -672,7 +673,7 @@ public void commit(CommitUpdateCommand cmd) throws IOException { SolrIndexWriter.setCommitData(writer, cmd.getVersion()); writer.commit(); } else { - log.info("No uncommitted changes. Skipping IW.commit."); + log.debug("No uncommitted changes. Skipping IW.commit."); } // SolrCore.verbose("writer.commit() end"); @@ -721,7 +722,7 @@ public void commit(CommitUpdateCommand cmd) throws IOException { commitTracker.didCommit(); } - log.info("end_commit_flush"); + log.debug("end_commit_flush"); error=false; } @@ -1024,10 +1025,5 @@ public CommitTracker getCommitTracker() { public CommitTracker getSoftCommitTracker() { return softCommitTracker; } - - @Override - public SolrMetricsContext getSolrMetricsContext() { - return solrMetricsContext; - } } diff --git a/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java b/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java index 64c625392acc..aca8e85cdc24 100644 --- a/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java +++ b/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java @@ -159,6 +159,7 @@ public static Document toDocument(SolrInputDocument doc, IndexSchema schema, boo // load each field value boolean hasField = false; try { + @SuppressWarnings({"rawtypes"}) Iterator it = field.iterator(); while (it.hasNext()) { Object v = it.next(); diff --git a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java index e2e7a5c37773..957a1cacc7a6 100644 --- a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java +++ b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java @@ -164,6 +164,7 @@ public boolean endsWithCommit() throws IOException { return true; } + @SuppressWarnings({"unchecked", "rawtypes"}) private void readHeader(FastInputStream fis) throws IOException { // read existing header boolean closeFis = false; @@ -488,6 +489,7 @@ public Object next() throws IOException, InterruptedException { long lastVersion = Long.MIN_VALUE; while ( (o = super.next()) != null) { + @SuppressWarnings({"rawtypes"}) List entry = (List) o; long version = (Long) entry.get(UpdateLog.VERSION_IDX); version = Math.abs(version); diff --git a/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java b/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java index 50fae873ac2c..f11101e0aea8 100644 --- a/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java +++ b/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java @@ -231,6 +231,7 @@ public void init(UpdateHandler uhandler, SolrCore core) { // populate recent deleteByQuery commands for (int i = startingUpdates.deleteByQueryList.size() - 1; i >= 0; i--) { Update update = startingUpdates.deleteByQueryList.get(i); + @SuppressWarnings({"unchecked"}) List dbq = (List) update.log.lookup(update.pointer); long version = (Long) dbq.get(1); String q = (String) dbq.get(2); diff --git a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java index 0b7e655608f7..8e5af2add79e 100644 --- a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java +++ b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java @@ -105,10 +105,12 @@ public static IndexFingerprint getFingerprint(SolrCore core, long maxVersion) th } } + @SuppressWarnings({"unchecked"}) public static IndexFingerprint getFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, Long maxVersion) throws IOException { SchemaField versionField = VersionInfo.getAndCheckVersionField(searcher.getSchema()); ValueSource vs = versionField.getType().getValueSource(versionField, null); + @SuppressWarnings({"rawtypes"}) Map funcContext = ValueSource.newContext(searcher); vs.createWeight(funcContext, searcher); @@ -184,7 +186,7 @@ public Map toMap(Map map) { return map; } - private static long getLong(Map m, String key, long def) { + private static long getLong(@SuppressWarnings({"rawtypes"})Map m, String key, long def) { Object oval = m.get(key); return oval != null ? ((Number)oval).longValue() : def; } @@ -194,6 +196,7 @@ private static long getLong(Map m, String key, long def) { */ public static IndexFingerprint fromObject(Object o) { if (o instanceof IndexFingerprint) return (IndexFingerprint) o; + @SuppressWarnings({"rawtypes"}) Map map = null; if (o instanceof Map) { map = (Map) o; diff --git a/solr/core/src/java/org/apache/solr/update/PeerSync.java b/solr/core/src/java/org/apache/solr/update/PeerSync.java index c4df00af25bc..e4b94fc05136 100644 --- a/solr/core/src/java/org/apache/solr/update/PeerSync.java +++ b/solr/core/src/java/org/apache/solr/update/PeerSync.java @@ -216,6 +216,7 @@ public PeerSyncResult sync() { ShardResponse srsp = shardHandler.takeCompletedOrError(); if (srsp == null) break; if (srsp.getException() == null) { + @SuppressWarnings({"unchecked"}) List otherVersions = (List)srsp.getSolrResponse().getResponse().get("versions"); if (otherVersions != null && !otherVersions.isEmpty()) { syncErrors.inc(); @@ -432,6 +433,7 @@ private boolean canHandleVersionRanges(String replica) { private boolean handleVersions(ShardResponse srsp) { // we retrieved the last N updates from the replica + @SuppressWarnings({"unchecked"}) List otherVersions = (List)srsp.getSolrResponse().getResponse().get("versions"); // TODO: how to handle short lists? @@ -516,6 +518,7 @@ private boolean requestUpdates(ShardResponse srsp, String versionsAndRanges, lon private boolean handleUpdates(ShardResponse srsp) { // we retrieved the last N updates from the replica + @SuppressWarnings({"unchecked"}) List updates = (List)srsp.getSolrResponse().getResponse().get("updates"); SyncShardRequest sreq = (SyncShardRequest) srsp.getShardRequest(); @@ -580,7 +583,9 @@ static class Updater { if (!(o1 instanceof List)) return 1; if (!(o2 instanceof List)) return -1; + @SuppressWarnings({"rawtypes"}) List lst1 = (List) o1; + @SuppressWarnings({"rawtypes"}) List lst2 = (List) o2; long l1 = Math.abs((Long) lst1.get(1)); @@ -616,6 +621,7 @@ void applyUpdates(List updates, Object updateFrom) throws Exception { for (Object obj : updates) { // should currently be a List o = obj; + @SuppressWarnings({"unchecked"}) List entry = (List)o; if (debug) { diff --git a/solr/core/src/java/org/apache/solr/update/PeerSyncWithLeader.java b/solr/core/src/java/org/apache/solr/update/PeerSyncWithLeader.java index bdb31e4f9bed..5e81b9d5797b 100644 --- a/solr/core/src/java/org/apache/solr/update/PeerSyncWithLeader.java +++ b/solr/core/src/java/org/apache/solr/update/PeerSyncWithLeader.java @@ -228,6 +228,7 @@ private boolean doSync(List ourUpdates, long ourLowThreshold, long ourHigh private MissedUpdatesRequest buildMissedUpdatesRequest(NamedList rsp) { // we retrieved the last N updates from the replica + @SuppressWarnings({"unchecked"}) List otherVersions = (List)rsp.get("versions"); if (log.isInfoEnabled()) { log.info("{} Received {} versions from {}", msg(), otherVersions.size(), leaderUrl); @@ -264,6 +265,7 @@ private NamedList requestUpdates(MissedUpdatesRequest missedUpdatesReque private boolean handleUpdates(NamedList rsp, long numRequestedUpdates, IndexFingerprint leaderFingerprint) { // missed updates from leader, it does not contains updates from bufferedUpdates + @SuppressWarnings({"unchecked"}) List updates = (List)rsp.get("updates"); if (updates.size() < numRequestedUpdates) { @@ -285,6 +287,7 @@ private boolean handleUpdates(NamedList rsp, long numRequestedUpdates, I // TODO leader should do fingerprint and retrieve recent updates version in atomic if (leaderFingerprint != null) { boolean existDBIOrDBQInTheGap = updates.stream().anyMatch(e -> { + @SuppressWarnings({"unchecked"}) List u = (List) e; long version = (Long) u.get(1); int oper = (Integer)u.get(0) & UpdateLog.OPERATION_MASK; @@ -294,6 +297,7 @@ private boolean handleUpdates(NamedList rsp, long numRequestedUpdates, I if (!existDBIOrDBQInTheGap) { // it is safe to use leaderFingerprint.maxVersionEncountered as cut point now. updates.removeIf(e -> { + @SuppressWarnings({"unchecked"}) List u = (List) e; long version = (Long) u.get(1); return version > leaderFingerprint.getMaxVersionEncountered(); diff --git a/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java b/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java index 842024276473..9d2377efdeac 100644 --- a/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java +++ b/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java @@ -428,6 +428,7 @@ private int getRfFromResponse(InputStream inputStream) { NamedList nl = brp.processResponse(inputStream, null); Object hdr = nl.get("responseHeader"); if (hdr != null && hdr instanceof NamedList) { + @SuppressWarnings({"unchecked"}) NamedList hdrList = (NamedList) hdr; Object rfObj = hdrList.get(UpdateRequest.REPFACT); if (rfObj != null && rfObj instanceof Integer) { diff --git a/solr/core/src/java/org/apache/solr/update/SolrCoreState.java b/solr/core/src/java/org/apache/solr/update/SolrCoreState.java index 380bc9acca1b..eddd5b795fdf 100644 --- a/solr/core/src/java/org/apache/solr/update/SolrCoreState.java +++ b/solr/core/src/java/org/apache/solr/update/SolrCoreState.java @@ -196,9 +196,10 @@ public CoreIsClosedException(String s) { public abstract void setCdcrBootstrapFuture(Future cdcrBootstrapFuture); + @SuppressWarnings("rawtypes") public abstract Callable getCdcrBootstrapCallable(); - public abstract void setCdcrBootstrapCallable(Callable cdcrBootstrapCallable); + public abstract void setCdcrBootstrapCallable(@SuppressWarnings("rawtypes") Callable cdcrBootstrapCallable); public Throwable getTragicException() throws IOException { RefCounted ref = getIndexWriter(null); diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java b/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java index 0bdfe84f073e..e189ad16b46e 100644 --- a/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java +++ b/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java @@ -105,7 +105,7 @@ private SolrIndexConfig(SolrConfig solrConfig) { public SolrIndexConfig(SolrConfig solrConfig, String prefix, SolrIndexConfig def) { if (prefix == null) { prefix = "indexConfig"; - log.debug("Defaulting to prefix \""+prefix+"\" for index configuration"); + log.debug("Defaulting to prefix '{}' for index configuration", prefix); } if (def == null) { @@ -247,6 +247,7 @@ public IndexWriterConfig toIndexWriterConfig(SolrCore core) throws IOException { if (mergedSegmentWarmerInfo != null) { // TODO: add infostream -> normal logging system (there is an issue somewhere) + @SuppressWarnings({"rawtypes"}) IndexReaderWarmer warmer = core.getResourceLoader().newInstance(mergedSegmentWarmerInfo.className, IndexReaderWarmer.class, null, @@ -262,7 +263,7 @@ public IndexWriterConfig toIndexWriterConfig(SolrCore core) throws IOException { * Builds a MergePolicy using the configured MergePolicyFactory * or if no factory is configured uses the configured mergePolicy PluginInfo. */ - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) private MergePolicy buildMergePolicy(SolrResourceLoader resourceLoader, IndexSchema schema) { final String mpfClassName; @@ -285,6 +286,7 @@ private MergePolicy buildMergePolicy(SolrResourceLoader resourceLoader, IndexSch return mpf.getMergePolicy(); } + @SuppressWarnings({"unchecked"}) private MergeScheduler buildMergeScheduler(SolrResourceLoader resourceLoader) { String msClassName = mergeSchedulerInfo == null ? SolrIndexConfig.DEFAULT_MERGE_SCHEDULER_CLASSNAME : mergeSchedulerInfo.className; MergeScheduler scheduler = resourceLoader.newInstance(msClassName, MergeScheduler.class); @@ -293,6 +295,7 @@ private MergeScheduler buildMergeScheduler(SolrResourceLoader resourceLoader) { // LUCENE-5080: these two setters are removed, so we have to invoke setMaxMergesAndThreads // if someone has them configured. if (scheduler instanceof ConcurrentMergeScheduler) { + @SuppressWarnings({"rawtypes"}) NamedList args = mergeSchedulerInfo.initArgs.clone(); Integer maxMergeCount = (Integer) args.remove("maxMergeCount"); if (maxMergeCount == null) { diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java index 6cbf54125d4d..0e1806e57a34 100644 --- a/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java +++ b/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java @@ -430,6 +430,7 @@ public void doSplit() throws IOException { } private void openNewSearcher(SolrCore core) throws Exception { + @SuppressWarnings({"rawtypes"}) Future[] waitSearcher = new Future[1]; core.getSearcher(true, false, waitSearcher, true); if (waitSearcher[0] != null) { diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java index 12323dfcce97..aa841f30e0a3 100644 --- a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java +++ b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java @@ -179,7 +179,7 @@ private SolrIndexWriter(SolrCore core, String name, String path, Directory direc @SuppressForbidden(reason = "Need currentTimeMillis, commit time should be used only for debugging purposes, " + " but currently suspiciously used for replication as well") public static void setCommitData(IndexWriter iw, long commitCommandVersion) { - log.info("Calling setCommitData with IW:{} commitCommandVersion:{}", iw, commitCommandVersion); + log.debug("Calling setCommitData with IW:{} commitCommandVersion:{}", iw, commitCommandVersion); final Map commitData = new HashMap<>(); commitData.put(COMMIT_TIME_MSEC_KEY, String.valueOf(System.currentTimeMillis())); commitData.put(COMMIT_COMMAND_VERSION, String.valueOf(commitCommandVersion)); diff --git a/solr/core/src/java/org/apache/solr/update/TransactionLog.java b/solr/core/src/java/org/apache/solr/update/TransactionLog.java index 9fd2f2f5bd28..555f0eafd0b8 100644 --- a/solr/core/src/java/org/apache/solr/update/TransactionLog.java +++ b/solr/core/src/java/org/apache/solr/update/TransactionLog.java @@ -69,7 +69,7 @@ public class TransactionLog implements Closeable { private boolean debug = log.isDebugEnabled(); private boolean trace = log.isTraceEnabled(); - public final static String END_MESSAGE="SOLR_TLOG_END"; + public final static String END_MESSAGE = "SOLR_TLOG_END"; long id; File tlogFile; @@ -83,7 +83,7 @@ public class TransactionLog implements Closeable { protected volatile boolean deleteOnClose = true; // we can delete old tlogs since they are currently only used for real-time-get (and in the future, recovery) AtomicInteger refcount = new AtomicInteger(1); - Map globalStringMap = new HashMap<>(); + Map globalStringMap = new HashMap<>(); List globalStringList = new ArrayList<>(); // write a BytesRef as a byte array @@ -91,13 +91,13 @@ public class TransactionLog implements Closeable { @Override public Object resolve(Object o, JavaBinCodec codec) throws IOException { if (o instanceof BytesRef) { - BytesRef br = (BytesRef)o; + BytesRef br = (BytesRef) o; codec.writeByteArray(br.bytes, br.offset, br.length); return null; } // Fallback: we have no idea how to serialize this. Be noisy to prevent insidious bugs throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "TransactionLog doesn't know how to serialize " + o.getClass() + "; try implementing ObjectResolver?"); + "TransactionLog doesn't know how to serialize " + o.getClass() + "; try implementing ObjectResolver?"); } }; @@ -167,12 +167,12 @@ public boolean writePrimitive(Object val) throws IOException { try { if (debug) { log.debug("New TransactionLog file= {}, exists={}, size={} openExisting={}" - , tlogFile, tlogFile.exists(), tlogFile.length(), openExisting); + , tlogFile, tlogFile.exists(), tlogFile.length(), openExisting); } // Parse tlog id from the filename String filename = tlogFile.getName(); - id = Long.parseLong(filename.substring(filename.lastIndexOf('.')+1)); + id = Long.parseLong(filename.substring(filename.lastIndexOf('.') + 1)); this.tlogFile = tlogFile; raf = new RandomAccessFile(this.tlogFile, "rw"); @@ -197,7 +197,7 @@ public boolean writePrimitive(Object val) throws IOException { log.warn("New transaction log already exists:{} size={}", tlogFile, raf.length()); return; } - + if (start > 0) { raf.setLength(0); } @@ -205,7 +205,7 @@ public boolean writePrimitive(Object val) throws IOException { } success = true; - + assert ObjectReleaseTracker.track(this); } catch (IOException e) { @@ -222,7 +222,8 @@ public boolean writePrimitive(Object val) throws IOException { } // for subclasses - protected TransactionLog() {} + protected TransactionLog() { + } /** Returns the number of records in the log (currently includes the header and an optional commit). * Note: currently returns 0 for reopened existing log files. @@ -241,12 +242,12 @@ public boolean endsWithCommit() throws IOException { } // the end of the file should have the end message (added during a commit) plus a 4 byte size - byte[] buf = new byte[ END_MESSAGE.length() ]; + byte[] buf = new byte[END_MESSAGE.length()]; long pos = size - END_MESSAGE.length() - 4; if (pos < 0) return false; @SuppressWarnings("resource") final ChannelFastInputStream is = new ChannelFastInputStream(channel, pos); is.read(buf); - for (int i=0; i)header.get("strings"); + globalStringList = (List) header.get("strings"); globalStringMap = new HashMap<>(globalStringList.size()); - for (int i=0; i getGlobalStrings() { } } + @SuppressWarnings({"unchecked"}) protected void writeLogHeader(LogCodec codec) throws IOException { long pos = fos.size(); assert pos == 0; - Map header = new LinkedHashMap(); - header.put("SOLR_TLOG",1); // a magic string + version number - header.put("strings",globalStringList); + @SuppressWarnings({"rawtypes"}) + Map header = new LinkedHashMap(); + header.put("SOLR_TLOG", 1); // a magic string + version number + header.put("strings", globalStringList); codec.marshal(header, fos); endRecord(pos); } protected void endRecord(long startRecordPosition) throws IOException { - fos.writeInt((int)(fos.size() - startRecordPosition)); + fos.writeInt((int) (fos.size() - startRecordPosition)); numRecords++; } @@ -347,7 +352,7 @@ protected void checkWriteHeader(LogCodec codec, SolrInputDocument optional) thro * the command to the transaction log.) * @param cmd The add update command to be written * @return Returns the position pointer of the written update command - * + * * @see #write(AddUpdateCommand, long) */ public long write(AddUpdateCommand cmd) { @@ -357,14 +362,14 @@ public long write(AddUpdateCommand cmd) { /** * Writes an add update command to the transaction log. This should be called only for * writing in-place updates, or else pass -1 as the prevPointer. - * @param cmd The add update command to be written - * @param prevPointer The pointer in the transaction log which this update depends - * on (applicable for in-place updates) + * @param cmd The add update command to be written + * @param prevPointer The pointer in the transaction log which this update depends + * on (applicable for in-place updates) * @return Returns the position pointer of the written update command */ public long write(AddUpdateCommand cmd, long prevPointer) { assert (-1 <= prevPointer && (cmd.isInPlaceUpdate() || (-1 == prevPointer))); - + LogCodec codec = new LogCodec(resolver); SolrInputDocument sdoc = cmd.getSolrInputDocument(); @@ -374,7 +379,7 @@ public long write(AddUpdateCommand cmd, long prevPointer) { // adaptive buffer sizing int bufSize = lastAddSize; // unsynchronized access of lastAddSize should be fine // at least 256 bytes and at most 1 MB - bufSize = Math.min(1024*1024, Math.max(256, bufSize+(bufSize>>3)+256)); + bufSize = Math.min(1024 * 1024, Math.max(256, bufSize + (bufSize >> 3) + 256)); MemOutputStream out = new MemOutputStream(new byte[bufSize]); codec.init(out); @@ -391,7 +396,7 @@ public long write(AddUpdateCommand cmd, long prevPointer) { codec.writeLong(cmd.getVersion()); codec.writeSolrInputDocument(cmd.getSolrInputDocument()); } - lastAddSize = (int)out.size(); + lastAddSize = (int) out.size(); synchronized (this) { long pos = fos.size(); // if we had flushed, this should be equal to channel.position() @@ -465,9 +470,9 @@ public long writeDeleteByQuery(DeleteUpdateCommand cmd) { // fos.flushBuffer(); // flush later return pos; } - } catch (IOException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); - } + } catch (IOException e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); + } } @@ -515,10 +520,10 @@ public Object lookup(long pos) { fos.flushBuffer(); /*** System.out.println("###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos); - if (fos.size() != raf.length() || pos >= fos.size() ) { - throw new RuntimeException("ERROR" + "###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos); - } - ***/ + if (fos.size() != raf.length() || pos >= fos.size() ) { + throw new RuntimeException("ERROR" + "###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos); + } + ***/ } ChannelFastInputStream fis = new ChannelFastInputStream(channel, pos); @@ -633,7 +638,8 @@ public synchronized long getLogSizeFromStream() { /** Returns a reader that can be used while a log is still in use. * Currently only *one* LogReader may be outstanding, and that log may only - * be used from a single thread. */ + * be used from a single thread. + */ public LogReader getReader(long startingPos) { return new LogReader(startingPos); } @@ -744,7 +750,8 @@ public Object next() throws IOException, InterruptedException { long pos = startingPos; long lastVersion = Long.MIN_VALUE; - while ( (o = super.next()) != null) { + while ((o = super.next()) != null) { + @SuppressWarnings({"rawtypes"}) List entry = (List) o; long version = (Long) entry.get(UpdateLog.VERSION_IDX); version = Math.abs(version); @@ -780,10 +787,11 @@ public abstract class ReverseReader { /* returns the position in the log file of the last record returned by next() */ public abstract long position(); + public abstract void close(); @Override - public abstract String toString() ; + public abstract String toString(); } @@ -812,7 +820,7 @@ public FSReverseReader() throws IOException { } fis = new ChannelFastInputStream(channel, 0); - if (sz >=4) { + if (sz >= 4) { // readHeader(fis); // should not be needed prevPos = sz - 4; fis.seek(prevPos); @@ -843,7 +851,7 @@ public Object next() throws IOException { } else { // Position buffer so that this record is at the end. // For small records, this will cause subsequent calls to next() to be within the buffer. - long seekPos = endOfThisRecord - fis.getBufferSize(); + long seekPos = endOfThisRecord - fis.getBufferSize(); seekPos = Math.min(seekPos, prevPos); // seek to the start of the record if it's larger then the block size. seekPos = Math.max(seekPos, 0); fis.seek(seekPos); @@ -880,57 +888,54 @@ public String toString() { } -} - - + static class ChannelFastInputStream extends FastInputStream { + private FileChannel ch; -class ChannelFastInputStream extends FastInputStream { - private FileChannel ch; - - public ChannelFastInputStream(FileChannel ch, long chPosition) { - // super(null, new byte[10],0,0); // a small buffer size for testing purposes - super(null); - this.ch = ch; - super.readFromStream = chPosition; - } + public ChannelFastInputStream(FileChannel ch, long chPosition) { + // super(null, new byte[10],0,0); // a small buffer size for testing purposes + super(null); + this.ch = ch; + super.readFromStream = chPosition; + } - @Override - public int readWrappedStream(byte[] target, int offset, int len) throws IOException { - ByteBuffer bb = ByteBuffer.wrap(target, offset, len); - int ret = ch.read(bb, readFromStream); - return ret; - } + @Override + public int readWrappedStream(byte[] target, int offset, int len) throws IOException { + ByteBuffer bb = ByteBuffer.wrap(target, offset, len); + int ret = ch.read(bb, readFromStream); + return ret; + } - public void seek(long position) throws IOException { - if (position <= readFromStream && position >= getBufferPos()) { - // seek within buffer - pos = (int)(position - getBufferPos()); - } else { - // long currSize = ch.size(); // not needed - underlying read should handle (unless read never done) - // if (position > currSize) throw new EOFException("Read past EOF: seeking to " + position + " on file of size " + currSize + " file=" + ch); - readFromStream = position; - end = pos = 0; + public void seek(long position) throws IOException { + if (position <= readFromStream && position >= getBufferPos()) { + // seek within buffer + pos = (int) (position - getBufferPos()); + } else { + // long currSize = ch.size(); // not needed - underlying read should handle (unless read never done) + // if (position > currSize) throw new EOFException("Read past EOF: seeking to " + position + " on file of size " + currSize + " file=" + ch); + readFromStream = position; + end = pos = 0; + } + assert position() == position; } - assert position() == position; - } /** where is the start of the buffer relative to the whole file */ - public long getBufferPos() { - return readFromStream - end; - } + public long getBufferPos() { + return readFromStream - end; + } - public int getBufferSize() { - return buf.length; - } + public int getBufferSize() { + return buf.length; + } - @Override - public void close() throws IOException { - ch.close(); - } + @Override + public void close() throws IOException { + ch.close(); + } - @Override - public String toString() { - return "readFromStream="+readFromStream +" pos="+pos +" end="+end + " bufferPos="+getBufferPos() + " position="+position() ; + @Override + public String toString() { + return "readFromStream=" + readFromStream + " pos=" + pos + " end=" + end + " bufferPos=" + getBufferPos() + " position=" + position(); + } } } diff --git a/solr/core/src/java/org/apache/solr/update/UpdateLog.java b/solr/core/src/java/org/apache/solr/update/UpdateLog.java index 7a6876daf49e..79323c247922 100644 --- a/solr/core/src/java/org/apache/solr/update/UpdateLog.java +++ b/solr/core/src/java/org/apache/solr/update/UpdateLog.java @@ -210,7 +210,7 @@ public String toString() { // keep track of deletes only... this is not updated on an add protected LinkedHashMap oldDeletes = new LinkedHashMap(numDeletesToKeep) { @Override - protected boolean removeEldestEntry(Map.Entry eldest) { + protected boolean removeEldestEntry(@SuppressWarnings({"rawtypes"})Map.Entry eldest) { return size() > numDeletesToKeep; } }; @@ -420,6 +420,7 @@ public void init(UpdateHandler uhandler, SolrCore core) { // populate recent deleteByQuery commands for (int i = startingUpdates.deleteByQueryList.size() - 1; i >= 0; i--) { Update update = startingUpdates.deleteByQueryList.get(i); + @SuppressWarnings({"unchecked"}) List dbq = (List) update.log.lookup(update.pointer); long version = (Long) dbq.get(1); String q = (String) dbq.get(2); @@ -894,14 +895,16 @@ public void postSoftCommit(CommitUpdateCommand cmd) { * @return Returns 0 if a full document was found in the log, -1 if no full document was found. If full document was supposed * to be found in the tlogs, but couldn't be found (because the logs were rotated) then the prevPointer is returned. */ + @SuppressWarnings({"unchecked"}) synchronized public long applyPartialUpdates(BytesRef id, long prevPointer, long prevVersion, - Set onlyTheseFields, SolrDocumentBase latestPartialDoc) { + Set onlyTheseFields, @SuppressWarnings({"rawtypes"})SolrDocumentBase latestPartialDoc) { SolrInputDocument partialUpdateDoc = null; List lookupLogs = Arrays.asList(tlog, prevMapLog, prevMapLog2); while (prevPointer >= 0) { //go through each partial update and apply it on the incoming doc one after another + @SuppressWarnings({"rawtypes"}) List entry; entry = getEntryFromTLog(prevPointer, prevVersion, lookupLogs); if (entry == null) { @@ -942,7 +945,7 @@ synchronized public long applyPartialUpdates(BytesRef id, long prevPointer, long /** * Add all fields from olderDoc into newerDoc if not already present in newerDoc */ - private void applyOlderUpdates(SolrDocumentBase newerDoc, SolrInputDocument olderDoc, Set mergeFields) { + private void applyOlderUpdates(@SuppressWarnings({"rawtypes"})SolrDocumentBase newerDoc, SolrInputDocument olderDoc, Set mergeFields) { for (String fieldName : olderDoc.getFieldNames()) { // if the newerDoc has this field, then this field from olderDoc can be ignored if (!newerDoc.containsKey(fieldName) && (mergeFields == null || mergeFields.contains(fieldName))) { @@ -959,6 +962,7 @@ private void applyOlderUpdates(SolrDocumentBase newerDoc, SolrInputDocument olde * * @return The entry if found, otherwise null */ + @SuppressWarnings({"rawtypes"}) private synchronized List getEntryFromTLog(long lookupPointer, long lookupVersion, List lookupLogs) { for (TransactionLog lookupLog : lookupLogs) { if (lookupLog != null && lookupLog.getLogSize() > lookupPointer) { @@ -1255,6 +1259,7 @@ public void copyOverOldUpdates(long commitVersion, TransactionLog oldTlog) { try { while ( (o = logReader.next()) != null ) { try { + @SuppressWarnings({"rawtypes"}) List entry = (List)o; int operationAndFlags = (Integer) entry.get(0); int oper = operationAndFlags & OPERATION_MASK; @@ -1491,6 +1496,7 @@ private void update() { if (o==null) break; // should currently be a List + @SuppressWarnings({"rawtypes"}) List entry = (List)o; // TODO: refactor this out so we get common error handling @@ -1875,6 +1881,7 @@ public void doReplay(TransactionLog translog) { try { // should currently be a List + @SuppressWarnings({"rawtypes"}) List entry = (List) o; operationAndFlags = (Integer) entry.get(UpdateLog.FLAGS_IDX); int oper = operationAndFlags & OPERATION_MASK; @@ -2082,7 +2089,8 @@ private void execute(UpdateCommand cmd, OrderedExecutor executor, * if it is UPDATE_INPLACE then the previous version will also be read from the entry * @param version Version already obtained from the entry. */ - public static AddUpdateCommand convertTlogEntryToAddUpdateCommand(SolrQueryRequest req, List entry, + public static AddUpdateCommand convertTlogEntryToAddUpdateCommand(SolrQueryRequest req, + @SuppressWarnings({"rawtypes"})List entry, int operation, long version) { assert operation == UpdateLog.ADD || operation == UpdateLog.UPDATE_INPLACE; SolrInputDocument sdoc = (SolrInputDocument) entry.get(entry.size()-1); diff --git a/solr/core/src/java/org/apache/solr/update/VersionInfo.java b/solr/core/src/java/org/apache/solr/update/VersionInfo.java index a0cdcda39e98..b97f8129ae78 100644 --- a/solr/core/src/java/org/apache/solr/update/VersionInfo.java +++ b/solr/core/src/java/org/apache/solr/update/VersionInfo.java @@ -208,6 +208,7 @@ public Long lookupVersion(BytesRef idBytes) { * Returns the latest version from the index, searched by the given id (bytes) as seen from the realtime searcher. * Returns null if no document can be found in the index for the given id. */ + @SuppressWarnings({"unchecked"}) public Long getVersionFromIndex(BytesRef idBytes) { // TODO: we could cache much of this and invalidate during a commit. // TODO: most DocValues classes are threadsafe - expose which. @@ -219,6 +220,7 @@ public Long getVersionFromIndex(BytesRef idBytes) { if (lookup < 0) return null; // this means the doc doesn't exist in the index yet ValueSource vs = versionField.getType().getValueSource(versionField, null); + @SuppressWarnings({"rawtypes"}) Map context = ValueSource.newContext(searcher); vs.createWeight(context, searcher); FunctionValues fv = vs.getValues(context, searcher.getTopReaderContext().leaves().get((int) (lookup >> 32))); @@ -237,6 +239,7 @@ public Long getVersionFromIndex(BytesRef idBytes) { /** * Returns the highest version from the index, or 0L if no versions can be found in the index. */ + @SuppressWarnings({"unchecked"}) public Long getMaxVersionFromIndex(IndexSearcher searcher) throws IOException { final String versionFieldName = versionField.getName(); @@ -254,6 +257,7 @@ public Long getMaxVersionFromIndex(IndexSearcher searcher) throws IOException { long maxVersionInIndex = 0L; ValueSource vs = versionField.getType().getValueSource(versionField, null); + @SuppressWarnings({"rawtypes"}) Map funcContext = ValueSource.newContext(searcher); vs.createWeight(funcContext, searcher); // TODO: multi-thread this diff --git a/solr/core/src/java/org/apache/solr/update/processor/AbstractDefaultValueUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/AbstractDefaultValueUpdateProcessorFactory.java index 2186e2d9ed23..89f6af6dddc6 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/AbstractDefaultValueUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/AbstractDefaultValueUpdateProcessorFactory.java @@ -46,7 +46,7 @@ public abstract class AbstractDefaultValueUpdateProcessorFactory @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { Object obj = args.remove("fieldName"); if (null == obj && null == fieldName) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java index b6239f6787fa..2ded9c7ac947 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java @@ -158,7 +158,7 @@ public UpdateRequestProcessor getInstance(SolrQueryRequest req, } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { inclusions = FieldMutatingUpdateProcessorFactory.parseSelectorParams(args); validateSelectorParams(inclusions); inclusions.fieldNameMatchesSchemaField = false; // Explicitly (non-configurably) require unknown field names @@ -192,8 +192,9 @@ public void inform(SolrCore core) { } } - private static List parseTypeMappings(NamedList args) { + private static List parseTypeMappings(@SuppressWarnings({"rawtypes"})NamedList args) { List typeMappings = new ArrayList<>(); + @SuppressWarnings({"unchecked"}) List typeMappingsParams = args.getAll(TYPE_MAPPING_PARAM); for (Object typeMappingObj : typeMappingsParams) { if (null == typeMappingObj) { @@ -202,6 +203,7 @@ private static List parseTypeMappings(NamedList args) { if ( ! (typeMappingObj instanceof NamedList) ) { throw new SolrException(SERVER_ERROR, "'" + TYPE_MAPPING_PARAM + "' init param must be a "); } + @SuppressWarnings({"rawtypes"}) NamedList typeMappingNamedList = (NamedList)typeMappingObj; Object fieldTypeObj = typeMappingNamedList.remove(FIELD_TYPE_PARAM); @@ -218,6 +220,7 @@ private static List parseTypeMappings(NamedList args) { } String fieldType = fieldTypeObj.toString(); + @SuppressWarnings({"unchecked"}) Collection valueClasses = typeMappingNamedList.removeConfigArgs(VALUE_CLASS_PARAM); if (valueClasses.isEmpty()) { @@ -245,6 +248,7 @@ private static List parseTypeMappings(NamedList args) { if ( ! (copyFieldObj instanceof NamedList)) { throw new SolrException(SERVER_ERROR, "'" + COPY_FIELD_PARAM + "' init param must be a "); } + @SuppressWarnings({"rawtypes"}) NamedList copyFieldNamedList = (NamedList)copyFieldObj; // dest Object destObj = copyFieldNamedList.remove(DEST_PARAM); @@ -556,6 +560,7 @@ private FieldNameSelector buildSelector(IndexSchema schema) { } private boolean isImmutableConfigSet(SolrCore core) { + @SuppressWarnings({"rawtypes"}) NamedList args = core.getConfigSetProperties(); Object immutable = args != null ? args.get(IMMUTABLE_CONFIGSET_ARG) : null; return immutable != null ? Boolean.parseBoolean(immutable.toString()) : false; diff --git a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java index 9ebcde56210d..673c4fae191d 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java +++ b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java @@ -97,6 +97,7 @@ public static boolean isAtomicUpdate(final AddUpdateCommand cmd) { * @param toDoc the final SolrInputDocument that will be mutated with the values from the fromDoc atomic commands * @return toDoc with mutated values */ + @SuppressWarnings({"unchecked"}) public SolrInputDocument merge(final SolrInputDocument fromDoc, SolrInputDocument toDoc) { for (SolrInputField sif : fromDoc.values()) { Object val = sif.getValue(); @@ -164,6 +165,7 @@ public static boolean isSupportedFieldForInPlaceUpdate(SchemaField schemaField) * * @return Return a set of fields that can be in-place updated. */ + @SuppressWarnings({"unchecked"}) public static Set computeInPlaceUpdatableFields(AddUpdateCommand cmd) throws IOException { SolrInputDocument sdoc = cmd.getSolrInputDocument(); IndexSchema schema = cmd.getReq().getSchema(); @@ -300,7 +302,7 @@ public static boolean isDerivedFromDoc(SolrInputDocument fullDoc, SolrInputDocum /** * * @param completeHierarchy SolrInputDocument that represents the nested document hierarchy from its root - * @param fieldPath the path to fetch, seperated by a '/' e.g. /children/grandChildren + * @param fieldPath the path to fetch, separated by a '/' e.g. /children/grandChildren * @return the SolrInputField of fieldPath */ public static SolrInputField getFieldFromHierarchy(SolrInputDocument completeHierarchy, String fieldPath) { @@ -353,7 +355,7 @@ public boolean doInPlaceUpdateMerge(AddUpdateCommand cmd, Set updatedFie // This can happen, despite requesting for these fields in the call to RTGC.getInputDocument, if the document was // fetched from the tlog and had all these fields (possibly because it was a full document ADD operation). if (updatedFields != null) { - Collection names = new HashSet(oldDocument.getFieldNames()); + Collection names = new HashSet<>(oldDocument.getFieldNames()); for (String fieldName: names) { if (fieldName.equals(CommonParams.VERSION_FIELD)==false && fieldName.equals(ID)==false && updatedFields.contains(fieldName)==false) { oldDocument.remove(fieldName); @@ -415,7 +417,9 @@ public SolrInputDocument mergeChildDoc(SolrInputDocument sdoc, SolrInputDocument * @param updateDoc the document that was sent as part of the Add Update Command * @return updated SolrInputDocument */ + @SuppressWarnings({"unchecked"}) public SolrInputDocument updateDocInSif(SolrInputField updateSif, SolrInputDocument cmdDocWChildren, SolrInputDocument updateDoc) { + @SuppressWarnings({"rawtypes"}) List sifToReplaceValues = (List) updateSif.getValues(); final boolean wasList = updateSif.getValue() instanceof Collection; int index = getDocIndexFromCollection(cmdDocWChildren, sifToReplaceValues); @@ -446,33 +450,33 @@ protected void doAddDistinct(SolrInputDocument toDoc, SolrInputField sif, Object final String name = sif.getName(); SolrInputField existingField = toDoc.get(name); + // throws exception if field doesn't exist SchemaField sf = schema.getField(name); - if (sf != null) { - Collection original = existingField != null ? - existingField.getValues() : - new ArrayList<>(); + Collection original = existingField != null ? + existingField.getValues() : + new ArrayList<>(); - int initialSize = original.size(); - if (fieldVal instanceof Collection) { - for (Object object : (Collection) fieldVal) { - if (!original.contains(object)) { - original.add(object); - } - } - } else { - Object object = sf.getType().toNativeType(fieldVal); - if (!original.contains(object)) { - original.add(object); + int initialSize = original.size(); + if (fieldVal instanceof Collection) { + for (Object object : (Collection) fieldVal) { + Object obj = sf.getType().toNativeType(object); + if (!original.contains(obj)) { + original.add(obj); } } + } else { + Object object = sf.getType().toNativeType(fieldVal); + if (!original.contains(object)) { + original.add(object); + } + } - if (original.size() > initialSize) { // update only if more are added - if (original.size() == 1) { // if single value, pass the value instead of List - doAdd(toDoc, sif, original.toArray()[0]); - } else { - toDoc.setField(name, original); - } + if (original.size() > initialSize) { // update only if more are added + if (original.size() == 1) { // if single value, pass the value instead of List + doAdd(toDoc, sif, original.toArray()[0]); + } else { + toDoc.setField(name, original); } } } @@ -511,6 +515,7 @@ protected void doRemove(SolrInputDocument toDoc, SolrInputField sif, Object fiel final String name = sif.getName(); SolrInputField existingField = toDoc.get(name); if (existingField == null) return; + @SuppressWarnings({"rawtypes"}) final Collection original = existingField.getValues(); if (fieldVal instanceof Collection) { for (Object object : (Collection) fieldVal) { @@ -546,6 +551,7 @@ protected void doRemoveRegex(SolrInputDocument toDoc, SolrInputField sif, Object private Collection preparePatterns(Object fieldVal) { final Collection patterns = new LinkedHashSet<>(1); if (fieldVal instanceof Collection) { + @SuppressWarnings({"unchecked"}) Collection patternVals = (Collection) fieldVal; for (Object patternVal : patternVals) { patterns.add(Pattern.compile(patternVal.toString())); @@ -568,6 +574,7 @@ private static boolean isChildDoc(Object obj) { if(!(obj instanceof Collection)) { return obj instanceof SolrDocumentBase; } + @SuppressWarnings({"rawtypes"}) Collection objValues = (Collection) obj; if(objValues.size() == 0) { return false; @@ -575,7 +582,7 @@ private static boolean isChildDoc(Object obj) { return objValues.iterator().next() instanceof SolrDocumentBase; } - private void removeObj(Collection original, Object toRemove, String fieldName) { + private void removeObj(@SuppressWarnings({"rawtypes"})Collection original, Object toRemove, String fieldName) { if(isChildDoc(toRemove)) { removeChildDoc(original, (SolrInputDocument) toRemove); } else { @@ -583,7 +590,8 @@ private void removeObj(Collection original, Object toRemove, String fieldName) { } } - private static void removeChildDoc(Collection original, SolrInputDocument docToRemove) { + @SuppressWarnings({"unchecked"}) + private static void removeChildDoc(@SuppressWarnings({"rawtypes"})Collection original, SolrInputDocument docToRemove) { for(SolrInputDocument doc: (Collection) original) { if(isDerivedFromDoc(doc, docToRemove)) { original.remove(doc); diff --git a/solr/core/src/java/org/apache/solr/update/processor/CdcrUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/CdcrUpdateProcessorFactory.java index 6de642078277..08cec4f41e48 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/CdcrUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/CdcrUpdateProcessorFactory.java @@ -31,7 +31,7 @@ public class CdcrUpdateProcessorFactory implements DistributingUpdateProcessorFactory { @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { } diff --git a/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessorFactory.java index d252d691bce3..c4a9ce3d3a8a 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessorFactory.java @@ -63,7 +63,7 @@ public enum Algorithm {KNN, BAYES} private ClassificationUpdateProcessorParams classificationParams; @Override - public void init(final NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})final NamedList args) { if (args != null) { params = args.toSolrParams(); classificationParams = new ClassificationUpdateProcessorParams(); diff --git a/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java index 3cffc11479a8..11fe324afb16 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java @@ -198,7 +198,7 @@ protected final FieldNameSelector getSourceSelector() { @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { // high level (loose) check for which type of config we have. // @@ -227,7 +227,7 @@ public void init(NamedList args) { * "source" and "dest" init params do not exist. */ @SuppressWarnings("unchecked") - private void initSimpleRegexReplacement(NamedList args) { + private void initSimpleRegexReplacement(@SuppressWarnings({"rawtypes"})NamedList args) { // The syntactic sugar for the case where there is only one regex pattern for source and the same pattern // is used for the destination pattern... // @@ -283,7 +283,7 @@ private void initSimpleRegexReplacement(NamedList args) { * "source" and "dest" init params do exist. */ @SuppressWarnings("unchecked") - private void initSourceSelectorSyntax(NamedList args) { + private void initSourceSelectorSyntax(@SuppressWarnings({"rawtypes"})NamedList args) { // Full and complete syntax where source and dest are mandatory. // // source may be a single string or a selector. @@ -307,6 +307,7 @@ private void initSourceSelectorSyntax(NamedList args) { if (1 == sources.size()) { if (sources.get(0) instanceof NamedList) { // nested set of selector options + @SuppressWarnings({"rawtypes"}) NamedList selectorConfig = (NamedList) args.remove(SOURCE_PARAM); srcInclusions = parseSelectorParams(selectorConfig); @@ -322,6 +323,7 @@ private void initSourceSelectorSyntax(NamedList args) { throw new SolrException(SERVER_ERROR, "Init param '" + SOURCE_PARAM + "' child 'exclude' must be "); } + @SuppressWarnings({"rawtypes"}) NamedList exc = (NamedList) excObj; srcExclusions.add(parseSelectorParams(exc)); if (0 < exc.size()) { @@ -353,6 +355,7 @@ private void initSourceSelectorSyntax(NamedList args) { } if (d instanceof NamedList) { + @SuppressWarnings({"rawtypes"}) NamedList destList = (NamedList) d; Object patt = destList.remove(PATTERN_PARAM); @@ -469,7 +472,7 @@ public void processAdd(AddUpdateCommand cmd) throws IOException { } /** macro */ - private static SelectorParams parseSelectorParams(NamedList args) { + private static SelectorParams parseSelectorParams(@SuppressWarnings({"rawtypes"})NamedList args) { return FieldMutatingUpdateProcessorFactory.parseSelectorParams(args); } diff --git a/solr/core/src/java/org/apache/solr/update/processor/ConcatFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/ConcatFieldUpdateProcessorFactory.java index bcddd1eb1240..d20d3b585c89 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/ConcatFieldUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/ConcatFieldUpdateProcessorFactory.java @@ -67,7 +67,7 @@ public final class ConcatFieldUpdateProcessorFactory extends FieldMutatingUpdate @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { Object d = args.remove("delimiter"); if (null != d) delimiter = d.toString(); diff --git a/solr/core/src/java/org/apache/solr/update/processor/DefaultValueUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/DefaultValueUpdateProcessorFactory.java index 852e5d3bcb3e..55ec6ccb3acd 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DefaultValueUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DefaultValueUpdateProcessorFactory.java @@ -54,7 +54,7 @@ public class DefaultValueUpdateProcessorFactory @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { Object obj = args.remove("value"); if (null == obj) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java index 158900d65d01..8da2df75fb48 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java @@ -131,7 +131,7 @@ private boolean isCloneRequiredOnLeader(UpdateRequestProcessor next) { while (nextInChain != null) { Class klass = nextInChain.getClass(); if (klass != LogUpdateProcessorFactory.LogUpdateProcessor.class - && klass != RunUpdateProcessor.class + && klass != RunUpdateProcessorFactory.RunUpdateProcessor.class && klass != TolerantUpdateProcessor.class) { shouldClone = true; break; diff --git a/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessor.java index 8e78a5519186..6fdb8e05f67e 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessor.java @@ -325,7 +325,9 @@ protected boolean versionInUpdateIsAcceptable(Object[] newUserVersions, * @param userVersionFieldName Field name of the user versions being compared * @return True if acceptable, false if not. */ - protected boolean newUpdateComparePasses(Comparable newUserVersion, Comparable oldUserVersion, String userVersionFieldName) { + @SuppressWarnings({"unchecked"}) + protected boolean newUpdateComparePasses(@SuppressWarnings({"rawtypes"})Comparable newUserVersion, + @SuppressWarnings({"rawtypes"})Comparable oldUserVersion, String userVersionFieldName) { return oldUserVersion.compareTo(newUserVersion) < 0; } @@ -351,10 +353,12 @@ private static FunctionValues[] getManyFunctionValues(LeafReaderContext segmentC return values; } + @SuppressWarnings({"unchecked"}) private static FunctionValues getFunctionValues(LeafReaderContext segmentContext, SchemaField field, SolrIndexSearcher searcher) throws IOException { ValueSource vs = field.getType().getValueSource(field, null); + @SuppressWarnings({"rawtypes"}) Map context = ValueSource.newContext(searcher); vs.createWeight(context, searcher); return vs.getValues(context, segmentContext); diff --git a/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java index fba694a2d3d0..1292d2f18de9 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java @@ -105,7 +105,7 @@ public class DocBasedVersionConstraintsProcessorFactory extends UpdateRequestPro @SuppressWarnings("unchecked") @Override - public void init( NamedList args ) { + public void init( @SuppressWarnings({"rawtypes"})NamedList args ) { Object tmp = args.remove("versionField"); if (null == tmp) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java index 5305fe1ca94f..5e709f319606 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java @@ -195,7 +195,8 @@ private SolrException confErr(final String msg) { private SolrException confErr(final String msg, SolrException root) { return new SolrException(SERVER_ERROR, this.getClass().getSimpleName()+": "+msg, root); } - private String removeArgStr(final NamedList args, final String arg, final String def, + private String removeArgStr(@SuppressWarnings({"rawtypes"})final NamedList args, + final String arg, final String def, final String errMsg) { if (args.indexOf(arg,0) < 0) return def; @@ -210,7 +211,7 @@ private String removeArgStr(final NamedList args, final String arg, final String @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { deleteChainName = removeArgStr(args, DEL_CHAIN_NAME_CONF, null, "must be a or for default chain"); diff --git a/solr/core/src/java/org/apache/solr/update/processor/FieldLengthUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/FieldLengthUpdateProcessorFactory.java index 0b35400be925..2189f9dacd58 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/FieldLengthUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/FieldLengthUpdateProcessorFactory.java @@ -53,7 +53,7 @@ public final class FieldLengthUpdateProcessorFactory extends FieldMutatingUpdate @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { // no length specific init args super.init(args); } diff --git a/solr/core/src/java/org/apache/solr/update/processor/FieldMutatingUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/FieldMutatingUpdateProcessor.java index d009b17ec16e..ed43b323f4ee 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/FieldMutatingUpdateProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/FieldMutatingUpdateProcessor.java @@ -209,12 +209,14 @@ public static FieldNameSelector wrap(final FieldNameSelector includes, private static final class ConfigurableFieldNameSelectorHelper { final SelectorParams params; + @SuppressWarnings({"rawtypes"}) final Collection classes; private ConfigurableFieldNameSelectorHelper(final SolrResourceLoader loader, final SelectorParams params) { this.params = params; + @SuppressWarnings({"rawtypes"}) final Collection classes = new ArrayList<>(params.typeClass.size()); for (String t : params.typeClass) { @@ -267,8 +269,9 @@ public boolean shouldMutateBasedOnSchema(final String fieldName, IndexSchema sch * returns true if the Object 'o' is an instance of any class in * the Collection */ - private static boolean instanceOfAny(Object o, Collection classes) { - for (Class c : classes) { + private static boolean instanceOfAny(Object o, + @SuppressWarnings({"rawtypes"})Collection classes) { + for (@SuppressWarnings({"rawtypes"})Class c : classes) { if ( c.isInstance(o) ) return true; } return false; diff --git a/solr/core/src/java/org/apache/solr/update/processor/FieldMutatingUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/FieldMutatingUpdateProcessorFactory.java index 541b1f0b0549..9e316df58633 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/FieldMutatingUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/FieldMutatingUpdateProcessorFactory.java @@ -138,7 +138,9 @@ protected final FieldNameSelector getSelector() { "selector was never initialized, inform(SolrCore) never called???"); } - public static SelectorParams parseSelectorParams(NamedList args) { + + @SuppressWarnings({"unchecked"}) + public static SelectorParams parseSelectorParams(@SuppressWarnings({"rawtypes"})NamedList args) { SelectorParams params = new SelectorParams(); params.fieldName = new HashSet<>(args.removeConfigArgs("fieldName")); @@ -168,8 +170,10 @@ public static SelectorParams parseSelectorParams(NamedList args) { return params; } - public static Collection parseSelectorExclusionParams(NamedList args) { + public static Collection parseSelectorExclusionParams( + @SuppressWarnings({"rawtypes"})NamedList args) { Collection exclusions = new ArrayList<>(); + @SuppressWarnings({"unchecked"}) List excList = args.getAll("exclude"); for (Object excObj : excList) { if (null == excObj) { @@ -180,6 +184,7 @@ public static Collection parseSelectorExclusionParams(NamedList throw new SolrException (SolrException.ErrorCode.SERVER_ERROR, "'exclude' init param must be "); } + @SuppressWarnings({"rawtypes"}) NamedList exc = (NamedList) excObj; exclusions.add(parseSelectorParams(exc)); if (0 < exc.size()) { @@ -203,7 +208,7 @@ public static Collection parseSelectorExclusionParams(NamedList */ @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { inclusions = parseSelectorParams(args); exclusions = parseSelectorExclusionParams(args); diff --git a/solr/core/src/java/org/apache/solr/update/processor/FieldNameMutatingUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/FieldNameMutatingUpdateProcessorFactory.java index 03afdf87fcaf..16d3671ef4c1 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/FieldNameMutatingUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/FieldNameMutatingUpdateProcessorFactory.java @@ -83,7 +83,7 @@ public void processDelete(DeleteUpdateCommand cmd) throws IOException { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { sourcePattern = (String) args.get("pattern"); replacement = (String) args.get("replacement"); if(sourcePattern ==null || replacement == null) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/IgnoreCommitOptimizeUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/IgnoreCommitOptimizeUpdateProcessorFactory.java index 315d6cd2bcce..66d45e75d472 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/IgnoreCommitOptimizeUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/IgnoreCommitOptimizeUpdateProcessorFactory.java @@ -49,7 +49,7 @@ public class IgnoreCommitOptimizeUpdateProcessorFactory extends UpdateRequestPro protected boolean ignoreOptimizeOnly = false; // default behavior is to ignore commits and optimize @Override - public void init(final NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})final NamedList args) { SolrParams params = (args != null) ? args.toSolrParams() : null; if (params == null) { errorCode = ErrorCode.FORBIDDEN; // default is 403 error diff --git a/solr/core/src/java/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactory.java index 3354ca6af49a..9ef92821765c 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactory.java @@ -48,7 +48,7 @@ public class IgnoreLargeDocumentProcessorFactory extends UpdateRequestProcessorF private long maxDocumentSize = 1024 * 1024; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { maxDocumentSize = args.toSolrParams().required().getLong(LIMIT_SIZE_PARAM); args.remove(LIMIT_SIZE_PARAM); @@ -125,6 +125,7 @@ static long estimate(SolrInputDocument doc) { return size; } + @SuppressWarnings({"unchecked", "rawtypes"}) static long estimate(Object obj) { if (obj instanceof SolrInputDocument) { return estimate((SolrInputDocument) obj); @@ -162,7 +163,7 @@ private static long estimate(Map map) { return size; } - private static long estimate(Collection collection) { + private static long estimate(@SuppressWarnings({"rawtypes"})Collection collection) { if (collection.isEmpty()) return 0; long size = 0; for (Object obj : collection) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/LastFieldValueUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/LastFieldValueUpdateProcessorFactory.java index 35d9f3fd2bb5..a1ebe3f4e23b 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/LastFieldValueUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/LastFieldValueUpdateProcessorFactory.java @@ -59,6 +59,7 @@ public Collection pickSubset(Collection values) { if (values instanceof List) { // optimize index lookup + @SuppressWarnings({"rawtypes"}) List l = (List)values; result = l.get(l.size()-1); } else if (values instanceof SortedSet) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/LogUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/LogUpdateProcessorFactory.java index 98d685616bab..2be93c344eb6 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/LogUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/LogUpdateProcessorFactory.java @@ -52,7 +52,7 @@ public class LogUpdateProcessorFactory extends UpdateRequestProcessorFactory imp int maxNumToLog = 10; int slowUpdateThresholdMillis = -1; @Override - public void init( final NamedList args ) { + public void init( @SuppressWarnings({"rawtypes"})final NamedList args ) { if( args != null ) { SolrParams params = args.toSolrParams(); maxNumToLog = params.getInt( "maxNumToLog", maxNumToLog ); @@ -62,7 +62,8 @@ public void init( final NamedList args ) { @Override public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { - return log.isInfoEnabled() ? new LogUpdateProcessor(req, rsp, this, next) : next; + return (log.isInfoEnabled() || slowUpdateThresholdMillis >= 0) ? + new LogUpdateProcessor(req, rsp, this, next) : next; } static class LogUpdateProcessor extends UpdateRequestProcessor { diff --git a/solr/core/src/java/org/apache/solr/update/processor/MaxFieldValueUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/MaxFieldValueUpdateProcessorFactory.java index fe872b0a8fe6..06f69ddd8b18 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/MaxFieldValueUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/MaxFieldValueUpdateProcessorFactory.java @@ -55,8 +55,9 @@ public final class MaxFieldValueUpdateProcessorFactory extends FieldValueSubsetUpdateProcessorFactory { @Override - @SuppressWarnings("unchecked") - public Collection pickSubset(Collection values) { + @SuppressWarnings({"unchecked"}) + public Collection pickSubset(@SuppressWarnings({"rawtypes"})Collection values) { + @SuppressWarnings({"rawtypes"}) Collection result = values; try { // NOTE: the extra cast to Object is needed to prevent compile diff --git a/solr/core/src/java/org/apache/solr/update/processor/MinFieldValueUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/MinFieldValueUpdateProcessorFactory.java index b64d16239b49..8ef1602b2d00 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/MinFieldValueUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/MinFieldValueUpdateProcessorFactory.java @@ -55,8 +55,9 @@ public final class MinFieldValueUpdateProcessorFactory extends FieldValueSubsetUpdateProcessorFactory { @Override - @SuppressWarnings("unchecked") - public Collection pickSubset(Collection values) { + @SuppressWarnings({"unchecked"}) + public Collection pickSubset(@SuppressWarnings({"rawtypes"})Collection values) { + @SuppressWarnings({"rawtypes"}) Collection result = values; try { // NOTE: the extra cast to Object is needed to prevent compile diff --git a/solr/core/src/java/org/apache/solr/update/processor/ParseBooleanFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/ParseBooleanFieldUpdateProcessorFactory.java index b017e2dd97d1..ea1863f0bb73 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/ParseBooleanFieldUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/ParseBooleanFieldUpdateProcessorFactory.java @@ -103,7 +103,7 @@ protected Object mutateValue(Object srcVal) { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { Object caseSensitiveParam = args.remove(CASE_SENSITIVE_PARAM); if (null != caseSensitiveParam) { if (caseSensitiveParam instanceof Boolean) { @@ -113,6 +113,7 @@ public void init(NamedList args) { } } + @SuppressWarnings({"unchecked"}) Collection trueValuesParam = args.removeConfigArgs(TRUE_VALUES_PARAM); if ( ! trueValuesParam.isEmpty()) { trueValues.clear(); @@ -121,6 +122,7 @@ public void init(NamedList args) { } } + @SuppressWarnings({"unchecked"}) Collection falseValuesParam = args.removeConfigArgs(FALSE_VALUES_PARAM); if ( ! falseValuesParam.isEmpty()) { falseValues.clear(); diff --git a/solr/core/src/java/org/apache/solr/update/processor/ParseDateFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/ParseDateFieldUpdateProcessorFactory.java index 2d28e14ed6fb..bb5a88cc7723 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/ParseDateFieldUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/ParseDateFieldUpdateProcessorFactory.java @@ -166,7 +166,7 @@ protected Object mutateValue(Object srcVal) { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { Locale locale; String localeParam = (String)args.remove(LOCALE_PARAM); @@ -182,6 +182,7 @@ public void init(NamedList args) { defaultTimeZone = ZoneId.of(defaultTimeZoneParam.toString()); } + @SuppressWarnings({"unchecked"}) Collection formatsParam = args.removeConfigArgs(FORMATS_PARAM); if (null != formatsParam) { for (String value : formatsParam) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/ParseNumericFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/ParseNumericFieldUpdateProcessorFactory.java index e00fd081e73b..489970c7ae23 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/ParseNumericFieldUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/ParseNumericFieldUpdateProcessorFactory.java @@ -50,7 +50,7 @@ public abstract class ParseNumericFieldUpdateProcessorFactory extends FieldMutat protected Locale locale = Locale.ROOT; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { String localeParam = (String)args.remove(LOCALE_PARAM); if (null != localeParam) { locale = LocaleUtils.toLocale(localeParam); diff --git a/solr/core/src/java/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorFactory.java index 66b794517170..f40732447624 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorFactory.java @@ -107,7 +107,7 @@ public class PreAnalyzedUpdateProcessorFactory extends FieldMutatingUpdateProces private String parserImpl; @Override - public void init(final NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})final NamedList args) { parserImpl = (String)args.get("parser"); args.remove("parser"); // initialize inclusion / exclusion patterns diff --git a/solr/core/src/java/org/apache/solr/update/processor/RegexReplaceProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/RegexReplaceProcessorFactory.java index 9e0e0e7682a2..edeb43d8557c 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/RegexReplaceProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/RegexReplaceProcessorFactory.java @@ -79,7 +79,7 @@ public final class RegexReplaceProcessorFactory extends FieldMutatingUpdateProce @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { Object patternParam = args.remove(PATTERN_PARAM); diff --git a/solr/core/src/java/org/apache/solr/update/processor/RegexpBoostProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/RegexpBoostProcessor.java index 9c58432a5dce..57e16739a6c4 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/RegexpBoostProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/RegexpBoostProcessor.java @@ -88,6 +88,7 @@ public class RegexpBoostProcessor extends UpdateRequestProcessor { try { synchronized (sharedObjectCache) { + @SuppressWarnings({"unchecked"}) List cachedBoostEntries = (List) sharedObjectCache.get(BOOST_ENTRIES_CACHE_KEY); diff --git a/solr/core/src/java/org/apache/solr/update/processor/RemoveBlankFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/RemoveBlankFieldUpdateProcessorFactory.java index 6c6c2a88bd09..baf3d89925d1 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/RemoveBlankFieldUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/RemoveBlankFieldUpdateProcessorFactory.java @@ -49,7 +49,7 @@ public final class RemoveBlankFieldUpdateProcessorFactory extends FieldMutatingU @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { // no trim specific init args super.init(args); } diff --git a/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java index d49ab2719213..a208d41cc9bd 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java @@ -33,14 +33,12 @@ * @since solr 1.3 * @see DistributingUpdateProcessorFactory */ -public class RunUpdateProcessorFactory extends UpdateRequestProcessorFactory -{ +public class RunUpdateProcessorFactory extends UpdateRequestProcessorFactory { public static final String PRE_RUN_CHAIN_NAME = "_preRun_"; @Override - public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) - { + public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { RunUpdateProcessor runUpdateProcessor = new RunUpdateProcessor(req, next); UpdateRequestProcessorChain preRun = req.getCore().getUpdateProcessingChain(PRE_RUN_CHAIN_NAME); if (preRun != null) { @@ -49,82 +47,79 @@ public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryRespons return runUpdateProcessor; } } -} -class RunUpdateProcessor extends UpdateRequestProcessor -{ - private final SolrQueryRequest req; - private final UpdateHandler updateHandler; - private boolean changesSinceCommit = false; + static class RunUpdateProcessor extends UpdateRequestProcessor { + private final SolrQueryRequest req; + private final UpdateHandler updateHandler; - public RunUpdateProcessor(SolrQueryRequest req, UpdateRequestProcessor next) { - super( next ); - this.req = req; - this.updateHandler = req.getCore().getUpdateHandler(); - } + private boolean changesSinceCommit = false; - @Override - public void processAdd(AddUpdateCommand cmd) throws IOException { - - if (AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) { - throw new SolrException - (SolrException.ErrorCode.BAD_REQUEST, - "RunUpdateProcessor has received an AddUpdateCommand containing a document that appears to still contain Atomic document update operations, most likely because DistributedUpdateProcessorFactory was explicitly disabled from this updateRequestProcessorChain"); + public RunUpdateProcessor(SolrQueryRequest req, UpdateRequestProcessor next) { + super(next); + this.req = req; + this.updateHandler = req.getCore().getUpdateHandler(); } - updateHandler.addDoc(cmd); - super.processAdd(cmd); - changesSinceCommit = true; - } + @Override + public void processAdd(AddUpdateCommand cmd) throws IOException { - @Override - public void processDelete(DeleteUpdateCommand cmd) throws IOException { - if( cmd.isDeleteById()) { - updateHandler.delete(cmd); + if (AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) { + throw new SolrException + (SolrException.ErrorCode.BAD_REQUEST, + "RunUpdateProcessor has received an AddUpdateCommand containing a document that appears to still contain Atomic document update operations, most likely because DistributedUpdateProcessorFactory was explicitly disabled from this updateRequestProcessorChain"); + } + + updateHandler.addDoc(cmd); + super.processAdd(cmd); + changesSinceCommit = true; } - else { - updateHandler.deleteByQuery(cmd); + + @Override + public void processDelete(DeleteUpdateCommand cmd) throws IOException { + if (cmd.isDeleteById()) { + updateHandler.delete(cmd); + } else { + updateHandler.deleteByQuery(cmd); + } + super.processDelete(cmd); + changesSinceCommit = true; } - super.processDelete(cmd); - changesSinceCommit = true; - } - @Override - public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException { - updateHandler.mergeIndexes(cmd); - super.processMergeIndexes(cmd); - } + @Override + public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException { + updateHandler.mergeIndexes(cmd); + super.processMergeIndexes(cmd); + } - @Override - public void processCommit(CommitUpdateCommand cmd) throws IOException - { - updateHandler.commit(cmd); - super.processCommit(cmd); - if (!cmd.softCommit) { - // a hard commit means we don't need to flush the transaction log - changesSinceCommit = false; + @Override + public void processCommit(CommitUpdateCommand cmd) throws IOException { + updateHandler.commit(cmd); + super.processCommit(cmd); + if (!cmd.softCommit) { + // a hard commit means we don't need to flush the transaction log + changesSinceCommit = false; + } } - } - /** - * @since Solr 1.4 - */ - @Override - public void processRollback(RollbackUpdateCommand cmd) throws IOException - { - updateHandler.rollback(cmd); - super.processRollback(cmd); - changesSinceCommit = false; - } + /** + * @since Solr 1.4 + */ + @Override + public void processRollback(RollbackUpdateCommand cmd) throws IOException { + updateHandler.rollback(cmd); + super.processRollback(cmd); + changesSinceCommit = false; + } - @Override - public void finish() throws IOException { - if (changesSinceCommit && updateHandler.getUpdateLog() != null) { - updateHandler.getUpdateLog().finish(null); + @Override + public void finish() throws IOException { + if (changesSinceCommit && updateHandler.getUpdateLog() != null) { + updateHandler.getUpdateLog().finish(null); + } + super.finish(); } - super.finish(); } } diff --git a/solr/core/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java index 7ea021942f44..a6cceaee9f3f 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java @@ -53,7 +53,7 @@ public class SignatureUpdateProcessorFactory private SolrParams params; @Override - public void init(final NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})final NamedList args) { if (args != null) { SolrParams params = args.toSolrParams(); boolean enabled = params.getBool("enabled", true); diff --git a/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java index caa8c1702dfc..faf61c219375 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java @@ -31,6 +31,7 @@ */ public abstract class SimpleUpdateProcessorFactory extends UpdateRequestProcessorFactory { private String myName; // if classname==XyzUpdateProcessorFactory myName=Xyz + @SuppressWarnings({"rawtypes"}) protected NamedList initArgs = new NamedList(); private static ThreadLocal REQ = new ThreadLocal<>(); @@ -39,7 +40,7 @@ protected SimpleUpdateProcessorFactory() { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); this.initArgs = args; diff --git a/solr/core/src/java/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactory.java index 7fc33d7abadd..f2f119b8794d 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactory.java @@ -97,7 +97,7 @@ public class SkipExistingDocumentsProcessorFactory extends UpdateRequestProcesso private boolean skipUpdateIfMissing = true; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { Object tmp = args.remove(PARAM_SKIP_INSERT_IF_EXISTS); if (null != tmp) { if (! (tmp instanceof Boolean) ) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactory.java index 10f82ad679de..d2f5a07173fb 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactory.java @@ -177,8 +177,9 @@ public class StatelessScriptUpdateProcessorFactory extends UpdateRequestProcesso private ScriptEngineCustomizer scriptEngineCustomizer; @Override - public void init(NamedList args) { - Collection scripts = + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { + @SuppressWarnings({"unchecked"}) + Collection scripts = args.removeConfigArgs(SCRIPT_ARG); if (scripts.isEmpty()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, diff --git a/solr/core/src/java/org/apache/solr/update/processor/TrimFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/TrimFieldUpdateProcessorFactory.java index 54614eb5fe50..ab37d39ace20 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/TrimFieldUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/TrimFieldUpdateProcessorFactory.java @@ -47,7 +47,7 @@ public final class TrimFieldUpdateProcessorFactory extends FieldMutatingUpdatePr @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { // no trim specific init args super.init(args); } diff --git a/solr/core/src/java/org/apache/solr/update/processor/TruncateFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/TruncateFieldUpdateProcessorFactory.java index 8084b3ebf33c..f97e65899aa9 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/TruncateFieldUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/TruncateFieldUpdateProcessorFactory.java @@ -54,7 +54,7 @@ public final class TruncateFieldUpdateProcessorFactory @SuppressWarnings("unchecked") @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { Object lengthParam = args.remove(MAX_LENGTH_PARAM); if (null == lengthParam) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java index a3697e202c0d..9d727c78fdad 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java @@ -33,14 +33,70 @@ import org.slf4j.LoggerFactory; /** + *

    * Update processor which examines a URL and outputs to various other fields * characteristics of that URL, including length, number of path levels, whether * it is a top level URL (levels==0), whether it looks like a landing/index page, * a canonical representation of the URL (e.g. stripping index.html), the domain * and path parts of the URL etc. + *

    + * *

    * This processor is intended used in connection with processing web resources, * and helping to produce values which may be used for boosting or filtering later. + *

    + * + *

    + * In the example configuration below, we construct a custom + * updateRequestProcessorChain and then instruct the + * /update requesthandler to use it for every incoming document. + *

    + *
    + * <updateRequestProcessorChain name="urlProcessor">
    + *   <processor class="org.apache.solr.update.processor.URLClassifyProcessorFactory">
    + *     <bool name="enabled">true</bool>
    + *     <str name="inputField">id</str>
    + *     <str name="domainOutputField">hostname</str>
    + *   </processor>
    + *   <processor class="solr.RunUpdateProcessorFactory" />
    + * </updateRequestProcessorChain>
    + *
    + * <requestHandler name="/update" class="solr.UpdateRequestHandler">
    + * <lst name="defaults">
    + * <str name="update.chain">urlProcessor</str>
    + * </lst>
    + * </requestHandler>
    + * 
    + *

    + * Then, at index time, Solr will look at the id field value and extract + * it's domain portion into a new hostname field. By default, the + * following fields will also be added: + *

    + *
      + *
    • url_length
    • + *
    • url_levels
    • + *
    • url_toplevel
    • + *
    • url_landingpage
    • + *
    + *

    + * For example, adding the following document + *

    + * { "id":"http://wwww.mydomain.com/subpath/document.html" }
    + * 
    + *

    + * will result in this document in Solr: + *

    + *
    + * {
    + *  "id":"http://wwww.mydomain.com/subpath/document.html",
    + *  "url_length":["46"],
    + *  "url_levels":["2"],
    + *  "url_toplevel":["0"],
    + *  "url_landingpage":["0"],
    + *  "hostname":["wwww.mydomain.com"],
    + *  "_version_":1603193062117343232}]
    + * }
    + * 
    */ public class URLClassifyProcessor extends UpdateRequestProcessor { diff --git a/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java index 96d726367d23..9e02384869e2 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java @@ -77,7 +77,7 @@ public class UUIDUpdateProcessorFactory extends UpdateRequestProcessorFactory { protected String fieldName = null; @SuppressWarnings("unchecked") - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { Object obj = args.remove(FIELD_PARAM); if (null != obj) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/UniqFieldsUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/UniqFieldsUpdateProcessorFactory.java index c9e27c242d2f..d315aa44c167 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/UniqFieldsUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/UniqFieldsUpdateProcessorFactory.java @@ -55,8 +55,7 @@ public FieldNameSelector getDefaultSelector(SolrCore core) { } @Override - @SuppressWarnings("unchecked") - public Collection pickSubset(Collection values) { + public Collection pickSubset(@SuppressWarnings({"rawtypes"})Collection values) { Set uniqs = new HashSet<>(); List result = new ArrayList<>(values.size()); for (Object o : values) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java index cdb304c9a9c6..c05822ee1c10 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java +++ b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java @@ -121,6 +121,7 @@ public UpdateRequestProcessorChain(SolrCore solrCore) { * @see DistributedUpdateProcessorFactory */ @Override + @SuppressWarnings({"rawtypes"}) public void init(PluginInfo info) { final String infomsg = "updateRequestProcessorChain \"" + (null != info.name ? info.name : "") + "\"" + @@ -173,6 +174,7 @@ public void init(PluginInfo info) { } + @SuppressWarnings({"rawtypes"}) private List createProcessors(PluginInfo info) { List processors = info.getChildren("processor"); return processors.stream().map(it -> { @@ -275,7 +277,8 @@ public static UpdateRequestProcessorChain constructChain(UpdateRequestProcessorC return result; } - private static void insertBefore(LinkedList urps, List newFactories, Class klas, int idx) { + private static void insertBefore(LinkedList urps, List newFactories, + @SuppressWarnings({"rawtypes"})Class klas, int idx) { if (newFactories.isEmpty()) return; for (int i = 0; i < urps.size(); i++) { if (klas.isInstance(urps.get(i))) { @@ -306,6 +309,7 @@ static List getReqProcessors(String processor, So p = core.getUpdateProcessors().get(s); } if (p == null) { + @SuppressWarnings({"unchecked"}) Class factoryClass = implicits.get(s); if(factoryClass != null) { PluginInfo pluginInfo = new PluginInfo("updateProcessor", @@ -356,7 +360,7 @@ public boolean equals(Object obj) { public static class LazyUpdateProcessorFactoryHolder extends PluginBag.PluginHolder { private volatile UpdateRequestProcessorFactory lazyFactory; - public LazyUpdateProcessorFactoryHolder(final PluginBag.PluginHolder holder) { + public LazyUpdateProcessorFactoryHolder(@SuppressWarnings({"rawtypes"})final PluginBag.PluginHolder holder) { super(holder.getPluginInfo()); lazyFactory = new LazyUpdateRequestProcessorFactory(holder); } @@ -370,6 +374,7 @@ public UpdateRequestProcessorFactory get() { public static class LazyUpdateRequestProcessorFactory extends UpdateRequestProcessorFactory { private final PluginBag.PluginHolder holder; + @SuppressWarnings({"unchecked", "rawtypes"}) public LazyUpdateRequestProcessorFactory(PluginBag.PluginHolder holder) { this.holder = holder; } @@ -384,6 +389,7 @@ public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryRespons } } } + @SuppressWarnings({"unchecked", "rawtypes"}) public static final Map implicits = new ImmutableMap.Builder() .put(TemplateUpdateProcessorFactory.NAME, TemplateUpdateProcessorFactory.class) .put(AtomicUpdateProcessorFactory.NAME, AtomicUpdateProcessorFactory.class) diff --git a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorFactory.java index 5e3a4738a4cf..7c8fdcc75d87 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorFactory.java @@ -41,7 +41,7 @@ public abstract class UpdateRequestProcessorFactory implements NamedListInitiali public interface RunAlways {} @Override - public void init( NamedList args ) + public void init( @SuppressWarnings({"rawtypes"})NamedList args ) { // could process the Node } diff --git a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java index e8107d8eb44a..677c94653a51 100644 --- a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java +++ b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java @@ -417,7 +417,7 @@ private void markAndSweepByCacheSize() { int wantToKeep = lowerWaterMark; int wantToRemove = sz - lowerWaterMark; - @SuppressWarnings("unchecked") // generic array's are annoying + @SuppressWarnings({"unchecked", "rawtypes"}) CacheEntry[] eset = new CacheEntry[sz]; int eSize = 0; @@ -552,6 +552,7 @@ private void markAndSweepByCacheSize() { // this loop so far. queue.myMaxSize = sz - lowerWaterMark - numRemoved; while (queue.size() > queue.myMaxSize && queue.size() > 0) { + @SuppressWarnings({"rawtypes"}) CacheEntry otherEntry = queue.pop(); newOldestEntry = Math.min(otherEntry.lastAccessedCopy, newOldestEntry); } @@ -595,7 +596,8 @@ Iterable> getValues() { } @Override - protected boolean lessThan(CacheEntry a, CacheEntry b) { + protected boolean lessThan(@SuppressWarnings({"rawtypes"})CacheEntry a, + @SuppressWarnings({"rawtypes"})CacheEntry b) { // reverse the parameter order so that the queue keeps the oldest items return b.lastAccessedCopy < a.lastAccessedCopy; } @@ -859,17 +861,19 @@ public static interface EvictionListener{ } private static class CleanupThread extends Thread { + @SuppressWarnings({"rawtypes"}) private WeakReference cache; private boolean stop = false; - public CleanupThread(ConcurrentLRUCache c) { + public CleanupThread(@SuppressWarnings({"rawtypes"})ConcurrentLRUCache c) { cache = new WeakReference<>(c); } @Override public void run() { while (true) { + @SuppressWarnings({"rawtypes"}) ConcurrentLRUCache c = cache.get(); if(c == null) break; synchronized (this) { diff --git a/solr/core/src/java/org/apache/solr/util/DOMUtil.java b/solr/core/src/java/org/apache/solr/util/DOMUtil.java index 29dab931aa7d..48d2f41c0789 100644 --- a/solr/core/src/java/org/apache/solr/util/DOMUtil.java +++ b/solr/core/src/java/org/apache/solr/util/DOMUtil.java @@ -107,6 +107,7 @@ public static NamedList childNodesToNamedList(Node nd) { return nodesToNamedList(nd.getChildNodes()); } + @SuppressWarnings({"rawtypes"}) public static List childNodesToList(Node nd) { return nodesToList(nd.getChildNodes()); } @@ -119,6 +120,7 @@ public static NamedList nodesToNamedList(NodeList nlst) { return clst; } + @SuppressWarnings({"rawtypes"}) public static List nodesToList(NodeList nlst) { List lst = new ArrayList(); for (int i=0; i") - .create("out"), - OptionBuilder + .required(false) + .desc("file name . defaults to collection-name.") + .build(), + Option.builder("format") .hasArg() - .isRequired(false) - .withDescription("format json/javabin, default to json. file extension would be .json") - .create("format"), - OptionBuilder + .required(false) + .desc("format json/javabin, default to json. file extension would be .json") + .build(), + Option.builder("limit") .hasArg() - .isRequired(false) - .withDescription("Max number of docs to download. default = 100, use -1 for all docs") - .create("limit"), - OptionBuilder + .required(false) + .desc("Max number of docs to download. default = 100, use -1 for all docs") + .build(), + Option.builder("query") .hasArg() - .isRequired(false) - .withDescription("A custom query, default is *:*") - .create("query"), - OptionBuilder + .required(false) + .desc("A custom query, default is *:*") + .build(), + Option.builder("fields") .hasArg() - .isRequired(false) - .withDescription("Comma separated fields. By default all fields are fetched") - .create("fields") + .required(false) + .desc("Comma separated fields. By default all fields are fetched") + .build() }; static class JsonSink extends DocsSink { @@ -274,6 +273,7 @@ public void end() throws IOException { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public synchronized void accept(SolrDocument doc) throws IOException { charArr.reset(); Map m = new LinkedHashMap(doc.size()); @@ -301,7 +301,7 @@ public synchronized void accept(SolrDocument doc) throws IOException { super.accept(doc); } - private boolean hasdate(List list) { + private boolean hasdate(@SuppressWarnings({"rawtypes"})List list) { boolean hasDate = false; for (Object o : list) { if(o instanceof Date){ @@ -378,10 +378,10 @@ public synchronized void accept(SolrDocument doc) throws IOException { static class MultiThreadedRunner extends Info { ExecutorService producerThreadpool, consumerThreadpool; - ArrayBlockingQueue queue = new ArrayBlockingQueue(1000); + ArrayBlockingQueue queue = new ArrayBlockingQueue<>(1000); SolrDocument EOFDOC = new SolrDocument(); volatile boolean failed = false; - Map corehandlers = new HashMap(); + Map corehandlers = new HashMap<>(); private long startTime ; @SuppressForbidden(reason = "Need to print out time") diff --git a/solr/core/src/java/org/apache/solr/util/MockSearchableSolrClient.java b/solr/core/src/java/org/apache/solr/util/MockSearchableSolrClient.java index 646d00880175..3dcb1617f5ac 100644 --- a/solr/core/src/java/org/apache/solr/util/MockSearchableSolrClient.java +++ b/solr/core/src/java/org/apache/solr/util/MockSearchableSolrClient.java @@ -50,7 +50,8 @@ public void clear() { } @Override - public synchronized NamedList request(SolrRequest request, String coll) throws SolrServerException, IOException { + public synchronized NamedList request(@SuppressWarnings({"rawtypes"})SolrRequest request, + String coll) throws SolrServerException, IOException { if (coll == null) { if (request.getParams() != null) { coll = request.getParams().get("collection"); diff --git a/solr/core/src/java/org/apache/solr/util/PackageTool.java b/solr/core/src/java/org/apache/solr/util/PackageTool.java index 49d476f6a374..96d84ad7b125 100644 --- a/solr/core/src/java/org/apache/solr/util/PackageTool.java +++ b/solr/core/src/java/org/apache/solr/util/PackageTool.java @@ -26,7 +26,6 @@ import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionBuilder; import org.apache.commons.io.FileUtils; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.logging.log4j.Level; @@ -215,7 +214,7 @@ protected void runImpl(CommandLine cli) throws Exception { * @return A pair of package name (first) and version (second) */ private Pair parsePackageVersion(String arg) { - String splits[] = arg.split(":"); + String[] splits = arg.split(":"); if (splits.length > 2) { throw new SolrException(ErrorCode.BAD_REQUEST, "Invalid package name: " + arg + ". Didn't match the pattern: : or "); @@ -223,51 +222,50 @@ private Pair parsePackageVersion(String arg) { String packageName = splits[0]; String version = splits.length == 2? splits[1]: null; - return new Pair(packageName, version); + return new Pair<>(packageName, version); } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[] { - OptionBuilder - .withArgName("URL") + Option.builder("solrUrl") + .argName("URL") .hasArg() - .isRequired(true) - .withDescription("Address of the Solr Web application, defaults to: " + SolrCLI.DEFAULT_SOLR_URL) - .create("solrUrl"), + .required(true) + .desc("Address of the Solr Web application, defaults to: " + SolrCLI.DEFAULT_SOLR_URL) + .build(), - OptionBuilder - .withArgName("COLLECTIONS") + Option.builder("collections") + .argName("COLLECTIONS") .hasArg() - .isRequired(false) - .withDescription("List of collections. Run './solr package help' for more details.") - .create("collections"), + .required(false) + .desc("List of collections. Run './solr package help' for more details.") + .build(), - OptionBuilder - .withArgName("PARAMS") + Option.builder("p") + .argName("PARAMS") .hasArgs() - .isRequired(false) - .withDescription("List of parameters to be used with deploy command. Run './solr package help' for more details.") - .withLongOpt("param") - .create("p"), + .required(false) + .desc("List of parameters to be used with deploy command. Run './solr package help' for more details.") + .longOpt("param") + .build(), - OptionBuilder - .isRequired(false) - .withDescription("If a deployment is an update over a previous deployment. Run './solr package help' for more details.") - .withLongOpt("update") - .create("u"), + Option.builder("u") + .required(false) + .desc("If a deployment is an update over a previous deployment. Run './solr package help' for more details.") + .longOpt("update") + .build(), - OptionBuilder - .isRequired(false) - .withDescription("Run './solr package help' for more details.") - .withLongOpt("collection") - .create("c"), + Option.builder("c") + .required(false) + .desc("Run './solr package help' for more details.") + .longOpt("collection") + .build(), - OptionBuilder - .isRequired(false) - .withDescription("Run './solr package help' for more details.") - .withLongOpt("noprompt") - .create("y") + Option.builder("y") + .required(false) + .desc("Run './solr package help' for more details.") + .longOpt("noprompt") + .build() }; } @@ -285,6 +283,7 @@ private String getZkHost(CommandLine cli) throws Exception { // convert raw JSON into user-friendly output StatusTool statusTool = new StatusTool(); Map status = statusTool.reportStatus(solrUrl+"/", systemInfo, httpClient); + @SuppressWarnings({"unchecked"}) Map cloud = (Map)status.get("cloud"); if (cloud != null) { String zookeeper = (String) cloud.get("ZooKeeper"); diff --git a/solr/core/src/java/org/apache/solr/util/RTimerTree.java b/solr/core/src/java/org/apache/solr/util/RTimerTree.java index 7e27669995d1..712bc4ab1b34 100644 --- a/solr/core/src/java/org/apache/solr/util/RTimerTree.java +++ b/solr/core/src/java/org/apache/solr/util/RTimerTree.java @@ -70,6 +70,7 @@ public String toString() { return asNamedList().toString(); } + @SuppressWarnings({"rawtypes"}) public NamedList asNamedList() { NamedList m = new SimpleOrderedMap<>(); m.add( "time", getTime() ); diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java index a263693ad69a..7260ec1031a7 100755 --- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java +++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java @@ -65,7 +65,6 @@ import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.exec.DefaultExecuteResultHandler; @@ -107,7 +106,6 @@ import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.impl.HttpClientUtil; import org.apache.solr.client.solrj.impl.HttpSolrClient; -import org.apache.solr.client.solrj.impl.HttpSolrClient.Builder; import org.apache.solr.client.solrj.impl.SolrClientCloudManager; import org.apache.solr.client.solrj.impl.ZkClientClusterStateProvider; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -253,25 +251,24 @@ protected abstract void runCloudTool(CloudSolrClient cloudSolrClient, CommandLin public static final String DEFAULT_SOLR_URL = "http://localhost:8983/solr"; public static final String ZK_HOST = "localhost:9983"; - @SuppressWarnings("static-access") public static Option[] cloudOptions = new Option[] { - OptionBuilder - .withArgName("HOST") - .hasArg() - .isRequired(false) - .withDescription("Address of the Zookeeper ensemble; defaults to: "+ZK_HOST) - .create("zkHost"), - OptionBuilder - .withArgName("COLLECTION") - .hasArg() - .isRequired(false) - .withDescription("Name of collection; no default") - .withLongOpt("collection") - .create("c"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + Option.builder("zkHost") + .argName("HOST") + .hasArg() + .required(false) + .desc("Address of the Zookeeper ensemble; defaults to: "+ZK_HOST) + .build(), + Option.builder("c") + .argName("COLLECTION") + .hasArg() + .required(false) + .desc("Name of collection; no default") + .longOpt("collection") + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; private static void exit(int exitStatus) { @@ -330,7 +327,7 @@ public static CommandLine parseCmdLine(String[] args, Option[] toolOptions) thro CommandLine cli = processCommandLineArgs(joinCommonAndToolOptions(toolOptions), toolArgs); - List argList = cli.getArgList(); + List argList = cli.getArgList(); argList.addAll(dashDList); // for SSL support, try to accommodate relative paths set for SSL store props @@ -549,7 +546,7 @@ private static List> findToolClassesInPackage(String packageName) { Enumeration resources = classLoader.getResources(path); Set classes = new TreeSet(); while (resources.hasMoreElements()) { - URL resource = (URL) resources.nextElement(); + URL resource = resources.nextElement(); classes.addAll(findClasses(resource.getFile(), packageName)); } @@ -594,9 +591,9 @@ public static boolean checkCommunicationError(Exception exc) { Throwable rootCause = SolrException.getRootCause(exc); boolean wasCommError = (rootCause instanceof ConnectException || - rootCause instanceof ConnectTimeoutException || - rootCause instanceof NoHttpResponseException || - rootCause instanceof SocketException); + rootCause instanceof ConnectTimeoutException || + rootCause instanceof NoHttpResponseException || + rootCause instanceof SocketException); return wasCommError; } @@ -698,6 +695,7 @@ public static Map getJson(HttpClient httpClient, String getUrl, i return json; } + @SuppressWarnings("unchecked") private static class SolrResponseHandler implements ResponseHandler> { public Map handleResponse(HttpResponse response) throws ClientProtocolException, IOException { HttpEntity entity = response.getEntity(); @@ -711,7 +709,6 @@ public Map handleResponse(HttpResponse response) throws ClientPro throw new ClientProtocolException("Expected JSON response from server but received: "+respBody+ "\nTypically, this indicates a problem with the Solr server; check the Solr server logs for more information."); } - if (resp != null && resp instanceof Map) { return (Map)resp; } else { @@ -812,7 +809,7 @@ public static T pathAs(Class clazz, String jsonPath, Map j } else { // no ok if it's not null and of a different type throw new IllegalStateException("Expected a " + clazz.getName() + " at path "+ - jsonPath+" but found "+obj+" instead! "+json); + jsonPath+" but found "+obj+" instead! "+json); } } // it's ok if it is null return val; @@ -832,7 +829,7 @@ public static Object atPath(String jsonPath, Map json) { if (!jsonPath.startsWith("/")) throw new IllegalArgumentException("Invalid JSON path: "+ - jsonPath+"! Must start with a /"); + jsonPath+"! Must start with a /"); Map parent = json; Object result = null; @@ -878,75 +875,75 @@ public AutoscalingTool(PrintStream stdout) { @Override public Option[] getOptions() { return new Option[] { - OptionBuilder - .withArgName("HOST") + Option.builder("zkHost") + .argName("HOST") .hasArg() - .isRequired(false) - .withDescription("Address of the Zookeeper ensemble; defaults to: "+ZK_HOST) - .create("zkHost"), - OptionBuilder - .withArgName("CONFIG") + .required(false) + .desc("Address of the Zookeeper ensemble; defaults to: "+ZK_HOST) + .build(), + Option.builder("a") + .argName("CONFIG") .hasArg() - .isRequired(false) - .withDescription("Autoscaling config file, defaults to the one deployed in the cluster.") - .withLongOpt("config") - .create("a"), - OptionBuilder - .withDescription("Show calculated suggestions") - .withLongOpt("suggestions") - .create("s"), - OptionBuilder - .withDescription("Show ClusterState (collections layout)") - .withLongOpt("clusterState") - .create("c"), - OptionBuilder - .withDescription("Show calculated diagnostics") - .withLongOpt("diagnostics") - .create("d"), - OptionBuilder - .withDescription("Show sorted nodes with diagnostics") - .withLongOpt("sortedNodes") - .create("n"), - OptionBuilder - .withDescription("Redact node and collection names (original names will be consistently randomized)") - .withLongOpt("redact") - .create("r"), - OptionBuilder - .withDescription("Show summarized collection & node statistics.") - .create("stats"), - OptionBuilder - .withDescription("Store autoscaling snapshot of the current cluster.") - .withArgName("DIR") + .required(false) + .desc("Autoscaling config file, defaults to the one deployed in the cluster.") + .longOpt("config") + .build(), + Option.builder("s") + .desc("Show calculated suggestions") + .longOpt("suggestions") + .build(), + Option.builder("c") + .desc("Show ClusterState (collections layout)") + .longOpt("clusterState") + .build(), + Option.builder("d") + .desc("Show calculated diagnostics") + .longOpt("diagnostics") + .build(), + Option.builder("n") + .desc("Show sorted nodes with diagnostics") + .longOpt("sortedNodes") + .build(), + Option.builder("r") + .desc("Redact node and collection names (original names will be consistently randomized)") + .longOpt("redact") + .build(), + Option.builder("stats") + .desc("Show summarized collection & node statistics.") + .build(), + Option.builder("save") + .desc("Store autoscaling snapshot of the current cluster.") + .argName("DIR") .hasArg() - .create("save"), - OptionBuilder - .withDescription("Load autoscaling snapshot of the cluster instead of using the real one.") - .withArgName("DIR") + .build(), + Option.builder("load") + .desc("Load autoscaling snapshot of the cluster instead of using the real one.") + .argName("DIR") .hasArg() - .create("load"), - OptionBuilder - .withDescription("Simulate execution of all suggestions.") - .create("simulate"), - OptionBuilder - .withDescription("Max number of simulation iterations.") - .withArgName("NUMBER") + .build(), + Option.builder("simulate") + .desc("Simulate execution of all suggestions.") + .build(), + Option.builder("i") + .desc("Max number of simulation iterations.") + .argName("NUMBER") .hasArg() - .withLongOpt("iterations") - .create("i"), - OptionBuilder - .withDescription("Save autoscaling snapshots at each step of simulated execution.") - .withArgName("DIR") - .withLongOpt("saveSimulated") + .longOpt("iterations") + .build(), + Option.builder("ss") + .desc("Save autoscaling snapshots at each step of simulated execution.") + .argName("DIR") + .longOpt("saveSimulated") .hasArg() - .create("ss"), - OptionBuilder - .withDescription("Execute a scenario from a file (and ignore all other options).") - .withArgName("FILE") + .build(), + Option.builder("scenario") + .desc("Execute a scenario from a file (and ignore all other options).") + .argName("FILE") .hasArg() - .create("scenario"), - OptionBuilder - .withDescription("Turn on all options to get all available information.") - .create("all") + .build(), + Option.builder("all") + .desc("Turn on all options to get all available information.") + .build() }; } @@ -1085,7 +1082,6 @@ private Map prepareResults(SolrCloudManager clientCloudManager, Map results = new LinkedHashMap<>(); if (withClusterState) { Map map = new LinkedHashMap<>(); - map.put("znodeVersion", clusterState.getZNodeVersion()); map.put("liveNodes", new TreeSet<>(clusterState.getLiveNodes())); map.put("collections", clusterState.getCollectionsMap()); results.put("CLUSTERSTATE", map); @@ -1162,7 +1158,7 @@ private void simulate(SolrCloudManager cloudManager, perStep.put("opDetails", perStepOps); } for (Suggester.SuggestionInfo suggestion : suggestions) { - SolrRequest operation = suggestion.getOperation(); + SolrRequest operation = suggestion.getOperation(); if (operation == null) { unresolvedCount++; if (suggestion.getViolation() == null) { @@ -1244,21 +1240,20 @@ public String getName() { return "status"; } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[] { - OptionBuilder - .withArgName("URL") - .hasArg() - .isRequired(false) - .withDescription("Address of the Solr Web application, defaults to: "+DEFAULT_SOLR_URL) - .create("solr"), - OptionBuilder - .withArgName("SECS") - .hasArg() - .isRequired(false) - .withDescription("Wait up to the specified number of seconds to see Solr running.") - .create("maxWaitSecs") + Option.builder("solr") + .argName("URL") + .hasArg() + .required(false) + .desc("Address of the Solr Web application, defaults to: "+DEFAULT_SOLR_URL) + .build(), + Option.builder("maxWaitSecs") + .argName("SECS") + .hasArg() + .required(false) + .desc("Wait up to the specified number of seconds to see Solr running.") + .build() }; } @@ -1299,7 +1294,7 @@ public Map waitToSeeSolrUp(String solrUrl, int maxWaitSecs) throw try { return getStatus(solrUrl); } catch (SSLPeerUnverifiedException exc) { - throw exc; + throw exc; } catch (Exception exc) { if (exceptionIsAuthRelated(exc)) { throw exc; @@ -1394,15 +1389,14 @@ public String getName() { return "api"; } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[] { - OptionBuilder - .withArgName("URL") - .hasArg() - .isRequired(false) - .withDescription("Send a GET request to a Solr API endpoint") - .create("get") + Option.builder("get") + .argName("URL") + .hasArg() + .required(false) + .desc("Send a GET request to a Solr API endpoint") + .build() }; } @@ -1451,7 +1445,7 @@ static class ReplicaHealth implements Comparable { String memory; ReplicaHealth(String shard, String name, String url, String status, - long numDocs, boolean isLeader, String uptime, String memory) { + long numDocs, boolean isLeader, String uptime, String memory) { this.shard = shard; this.name = name; this.url = url; @@ -1703,64 +1697,64 @@ protected void runCloudTool(CloudSolrClient cloudSolrClient, CommandLine cli) th } // end HealthcheckTool private static final Option[] CREATE_COLLECTION_OPTIONS = new Option[] { - OptionBuilder - .withArgName("HOST") - .hasArg() - .isRequired(false) - .withDescription("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) - .create("zkHost"), - OptionBuilder - .withArgName("HOST") - .hasArg() - .isRequired(false) - .withDescription("Base Solr URL, which can be used to determine the zkHost if that's not known") - .create("solrUrl"), - OptionBuilder - .withArgName("NAME") - .hasArg() - .isRequired(true) - .withDescription("Name of collection to create.") - .create(NAME), - OptionBuilder - .withArgName("#") - .hasArg() - .isRequired(false) - .withDescription("Number of shards; default is 1") - .create("shards"), - OptionBuilder - .withArgName("#") - .hasArg() - .isRequired(false) - .withDescription("Number of copies of each document across the collection (replicas per shard); default is 1") - .create("replicationFactor"), - OptionBuilder - .withArgName("#") - .hasArg() - .isRequired(false) - .withDescription("Maximum number of shards per Solr node; default is determined based on the number of shards, replication factor, and live nodes.") - .create("maxShardsPerNode"), - OptionBuilder - .withArgName("NAME") - .hasArg() - .isRequired(false) - .withDescription("Configuration directory to copy when creating the new collection; default is "+DEFAULT_CONFIG_SET) - .create("confdir"), - OptionBuilder - .withArgName("NAME") - .hasArg() - .isRequired(false) - .withDescription("Configuration name; default is the collection name") - .create("confname"), - OptionBuilder - .withArgName("DIR") - .hasArg() - .isRequired(true) - .withDescription("Path to configsets directory on the local system.") - .create("configsetsDir"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + Option.builder("zkHost") + .argName("HOST") + .hasArg() + .required(false) + .desc("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) + .build(), + Option.builder("solrUrl") + .argName("HOST") + .hasArg() + .required(false) + .desc("Base Solr URL, which can be used to determine the zkHost if that's not known") + .build(), + Option.builder(NAME) + .argName("NAME") + .hasArg() + .required(true) + .desc("Name of collection to create.") + .build(), + Option.builder("shards") + .argName("#") + .hasArg() + .required(false) + .desc("Number of shards; default is 1") + .build(), + Option.builder("replicationFactor") + .argName("#") + .hasArg() + .required(false) + .desc("Number of copies of each document across the collection (replicas per shard); default is 1") + .build(), + Option.builder("maxShardsPerNode") + .argName("#") + .hasArg() + .required(false) + .desc("Maximum number of shards per Solr node; default is determined based on the number of shards, replication factor, and live nodes.") + .build(), + Option.builder("confdir") + .argName("NAME") + .hasArg() + .required(false) + .desc("Configuration directory to copy when creating the new collection; default is "+DEFAULT_CONFIG_SET) + .build(), + Option.builder("confname") + .argName("NAME") + .hasArg() + .required(false) + .desc("Configuration name; default is the collection name") + .build(), + Option.builder("configsetsDir") + .argName("DIR") + .hasArg() + .required(true) + .desc("Path to configsets directory on the local system.") + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; @@ -1814,6 +1808,7 @@ public static String getZkHost(CommandLine cli) throws Exception { // convert raw JSON into user-friendly output StatusTool statusTool = new StatusTool(); Map status = statusTool.reportStatus(solrUrl, systemInfo, httpClient); + @SuppressWarnings("unchecked") Map cloud = (Map)status.get("cloud"); if (cloud != null) { String zookeeper = (String) cloud.get("ZooKeeper"); @@ -1833,6 +1828,7 @@ public static boolean safeCheckCollectionExists(String url, String collection) { boolean exists = false; try { Map existsCheckResult = getJson(url); + @SuppressWarnings("unchecked") List collections = (List) existsCheckResult.get("collections"); exists = collections != null && collections.contains(collection); } catch (Exception exc) { @@ -1852,8 +1848,11 @@ public static boolean safeCheckCoreExists(String coreStatusUrl, String coreName) Thread.sleep(clamPeriodForStatusPollMs); } Map existsCheckResult = getJson(coreStatusUrl); + @SuppressWarnings("unchecked") Map status = (Map)existsCheckResult.get("status"); + @SuppressWarnings("unchecked") Map coreStatus = (Map)status.get(coreName); + @SuppressWarnings("unchecked") Map failureStatus = (Map)existsCheckResult.get("initFailures"); String errorMsg = (String) failureStatus.get(coreName); final boolean hasName = coreStatus != null && coreStatus.containsKey(NAME); @@ -1884,7 +1883,6 @@ public String getName() { return "create_collection"; } - @SuppressWarnings("static-access") public Option[] getOptions() { return CREATE_COLLECTION_OPTIONS; } @@ -2013,37 +2011,36 @@ public String getName() { return "create_core"; } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[] { - OptionBuilder - .withArgName("URL") + Option.builder("solrUrl") + .argName("URL") .hasArg() - .isRequired(false) - .withDescription("Base Solr URL, default is " + DEFAULT_SOLR_URL) - .create("solrUrl"), - OptionBuilder - .withArgName("NAME") + .required(false) + .desc("Base Solr URL, default is " + DEFAULT_SOLR_URL) + .build(), + Option.builder(NAME) + .argName("NAME") .hasArg() - .isRequired(true) - .withDescription("Name of the core to create.") - .create(NAME), - OptionBuilder - .withArgName("CONFIG") + .required(true) + .desc("Name of the core to create.") + .build(), + Option.builder("confdir") + .argName("CONFIG") .hasArg() - .isRequired(false) - .withDescription("Configuration directory to copy when creating the new core; default is "+DEFAULT_CONFIG_SET) - .create("confdir"), - OptionBuilder - .withArgName("DIR") + .required(false) + .desc("Configuration directory to copy when creating the new core; default is "+DEFAULT_CONFIG_SET) + .build(), + Option.builder("configsetsDir") + .argName("DIR") .hasArg() - .isRequired(true) - .withDescription("Path to configsets directory on the local system.") - .create("configsetsDir"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + .required(true) + .desc("Path to configsets directory on the local system.") + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; } @@ -2153,7 +2150,6 @@ public String getName() { return "create"; } - @SuppressWarnings("static-access") public Option[] getOptions() { return CREATE_COLLECTION_OPTIONS; } @@ -2193,37 +2189,36 @@ public ConfigSetUploadTool(PrintStream stdout) { super(stdout); } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[]{ - OptionBuilder - .withArgName("confname") // Comes out in help message + Option.builder("confname") + .argName("confname") // Comes out in help message .hasArg() // Has one sub-argument - .isRequired(true) // confname argument must be present - .withDescription("Configset name on Zookeeper") - .create("confname"), // passed as -confname value - OptionBuilder - .withArgName("confdir") + .required(true) // confname argument must be present + .desc("Configset name on Zookeeper") + .build(), // passed as -confname value + Option.builder("confdir") + .argName("confdir") .hasArg() - .isRequired(true) - .withDescription("Local directory with configs") - .create("confdir"), - OptionBuilder - .withArgName("configsetsDir") + .required(true) + .desc("Local directory with configs") + .build(), + Option.builder("configsetsDir") + .argName("configsetsDir") .hasArg() - .isRequired(false) - .withDescription("Parent directory of example configsets") - .create("configsetsDir"), - OptionBuilder - .withArgName("HOST") + .required(false) + .desc("Parent directory of example configsets") + .build(), + Option.builder("zkHost") + .argName("HOST") .hasArg() - .isRequired(true) - .withDescription("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) - .create("zkHost"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + .required(true) + .desc("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; } @@ -2266,31 +2261,30 @@ public ConfigSetDownloadTool(PrintStream stdout) { super(stdout); } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[]{ - OptionBuilder - .withArgName("confname") + Option.builder("confname") + .argName("confname") .hasArg() - .isRequired(true) - .withDescription("Configset name on Zookeeper") - .create("confname"), - OptionBuilder - .withArgName("confdir") + .required(true) + .desc("Configset name on Zookeeper") + .build(), + Option.builder("confdir") + .argName("confdir") .hasArg() - .isRequired(true) - .withDescription("Local directory with configs") - .create("confdir"), - OptionBuilder - .withArgName("HOST") + .required(true) + .desc("Local directory with configs") + .build(), + Option.builder("zkHost") + .argName("HOST") .hasArg() - .isRequired(true) - .withDescription("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) - .create("zkHost"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + .required(true) + .desc("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; } @@ -2336,37 +2330,36 @@ public static class ZkRmTool extends ToolBase { public ZkRmTool() { this(CLIO.getOutStream()); - } + } public ZkRmTool(PrintStream stdout) { super(stdout); - } + } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[]{ - OptionBuilder - .withArgName("path") + Option.builder("path") + .argName("path") .hasArg() - .isRequired(true) - .withDescription("Path to remove") - .create("path"), - OptionBuilder - .withArgName("recurse") + .required(true) + .desc("Path to remove") + .build(), + Option.builder("recurse") + .argName("recurse") .hasArg() - .isRequired(false) - .withDescription("Recurse (true|false, default is false)") - .create("recurse"), - OptionBuilder - .withArgName("HOST") + .required(false) + .desc("Recurse (true|false, default is false)") + .build(), + Option.builder("zkHost") + .argName("HOST") .hasArg() - .isRequired(true) - .withDescription("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) - .create("zkHost"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + .required(true) + .desc("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; } @@ -2419,31 +2412,30 @@ public ZkLsTool(PrintStream stdout) { super(stdout); } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[]{ - OptionBuilder - .withArgName("path") + Option.builder("path") + .argName("path") .hasArg() - .isRequired(true) - .withDescription("Path to list") - .create("path"), - OptionBuilder - .withArgName("recurse") + .required(true) + .desc("Path to list") + .build(), + Option.builder("recurse") + .argName("recurse") .hasArg() - .isRequired(false) - .withDescription("Recurse (true|false, default is false)") - .create("recurse"), - OptionBuilder - .withArgName("HOST") + .required(false) + .desc("Recurse (true|false, default is false)") + .build(), + Option.builder("zkHost") + .argName("HOST") .hasArg() - .isRequired(true) - .withDescription("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) - .create("zkHost"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + .required(true) + .desc("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; } @@ -2487,25 +2479,24 @@ public ZkMkrootTool(PrintStream stdout) { super(stdout); } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[]{ - OptionBuilder - .withArgName("path") + Option.builder("path") + .argName("path") .hasArg() - .isRequired(true) - .withDescription("Path to create") - .create("path"), - OptionBuilder - .withArgName("HOST") + .required(true) + .desc("Path to create") + .build(), + Option.builder("zkHost") + .argName("HOST") .hasArg() - .isRequired(true) - .withDescription("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) - .create("zkHost"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + .required(true) + .desc("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; } @@ -2549,37 +2540,36 @@ public ZkCpTool(PrintStream stdout) { super(stdout); } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[]{ - OptionBuilder - .withArgName("src") + Option.builder("src") + .argName("src") .hasArg() - .isRequired(true) - .withDescription("Source file or directory, may be local or a Znode") - .create("src"), - OptionBuilder - .withArgName("dst") + .required(true) + .desc("Source file or directory, may be local or a Znode") + .build(), + Option.builder("dst") + .argName("dst") .hasArg() - .isRequired(true) - .withDescription("Destination of copy, may be local or a Znode.") - .create("dst"), - OptionBuilder - .withArgName("recurse") + .required(true) + .desc("Destination of copy, may be local or a Znode.") + .build(), + Option.builder("recurse") + .argName("recurse") .hasArg() - .isRequired(false) - .withDescription("Recurse (true|false, default is false)") - .create("recurse"), - OptionBuilder - .withArgName("HOST") + .required(false) + .desc("Recurse (true|false, default is false)") + .build(), + Option.builder("zkHost") + .argName("HOST") .hasArg() - .isRequired(true) - .withDescription("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) - .create("zkHost"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + .required(true) + .desc("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; } @@ -2639,31 +2629,30 @@ public ZkMvTool(PrintStream stdout) { super(stdout); } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[]{ - OptionBuilder - .withArgName("src") + Option.builder("src") + .argName("src") .hasArg() - .isRequired(true) - .withDescription("Source Znode to movej from.") - .create("src"), - OptionBuilder - .withArgName("dst") + .required(true) + .desc("Source Znode to movej from.") + .build(), + Option.builder("dst") + .argName("dst") .hasArg() - .isRequired(true) - .withDescription("Destination Znode to move to.") - .create("dst"), - OptionBuilder - .withArgName("HOST") + .required(true) + .desc("Destination Znode to move to.") + .build(), + Option.builder("zkHost") + .argName("HOST") .hasArg() - .isRequired(true) - .withDescription("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) - .create("zkHost"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + .required(true) + .desc("Address of the Zookeeper ensemble; defaults to: " + ZK_HOST) + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; } @@ -2719,41 +2708,40 @@ public String getName() { return "delete"; } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[]{ - OptionBuilder - .withArgName("URL") + Option.builder("solrUrl") + .argName("URL") .hasArg() - .isRequired(false) - .withDescription("Base Solr URL, default is " + DEFAULT_SOLR_URL) - .create("solrUrl"), - OptionBuilder - .withArgName("NAME") + .required(false) + .desc("Base Solr URL, default is " + DEFAULT_SOLR_URL) + .build(), + Option.builder(NAME) + .argName("NAME") .hasArg() - .isRequired(true) - .withDescription("Name of the core / collection to delete.") - .create(NAME), - OptionBuilder - .withArgName("true|false") + .required(true) + .desc("Name of the core / collection to delete.") + .build(), + Option.builder("deleteConfig") + .argName("true|false") .hasArg() - .isRequired(false) - .withDescription("Flag to indicate if the underlying configuration directory for a collection should also be deleted; default is true") - .create("deleteConfig"), - OptionBuilder - .isRequired(false) - .withDescription("Skip safety checks when deleting the configuration directory used by a collection") - .create("forceDeleteConfig"), - OptionBuilder - .withArgName("HOST") + .required(false) + .desc("Flag to indicate if the underlying configuration directory for a collection should also be deleted; default is true") + .build(), + Option.builder("forceDeleteConfig") + .required(false) + .desc("Skip safety checks when deleting the configuration directory used by a collection") + .build(), + Option.builder("zkHost") + .argName("HOST") .hasArg() - .isRequired(false) - .withDescription("Address of the Zookeeper ensemble; defaults to: "+ZK_HOST) - .create("zkHost"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + .required(false) + .desc("Address of the Zookeeper ensemble; defaults to: "+ZK_HOST) + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; } @@ -2851,7 +2839,7 @@ protected void deleteCollection(CloudSolrClient cloudSolrClient, CommandLine cli zkStateReader.getZkClient().clean(configZnode); } catch (Exception exc) { echo("\nWARNING: Failed to delete configuration directory "+configZnode+" in ZooKeeper due to: "+ - exc.getMessage()+"\nYou'll need to manually delete this znode using the zkcli script."); + exc.getMessage()+"\nYou'll need to manually delete this znode using the zkcli script."); } } @@ -2903,55 +2891,54 @@ public String getName() { return "config"; } - @SuppressWarnings("static-access") @Override public Option[] getOptions() { Option[] configOptions = new Option[] { - OptionBuilder - .withArgName("ACTION") + Option.builder("action") + .argName("ACTION") .hasArg() - .isRequired(false) - .withDescription("Config API action, one of: set-property, unset-property; default is set-property") - .create("action"), - OptionBuilder - .withArgName("PROP") + .required(false) + .desc("Config API action, one of: set-property, unset-property; default is set-property") + .build(), + Option.builder("property") + .argName("PROP") .hasArg() - .isRequired(true) - .withDescription("Name of the Config API property to apply the action to, such as: updateHandler.autoSoftCommit.maxTime") - .create("property"), - OptionBuilder - .withArgName("VALUE") + .required(true) + .desc("Name of the Config API property to apply the action to, such as: updateHandler.autoSoftCommit.maxTime") + .build(), + Option.builder("value") + .argName("VALUE") .hasArg() - .isRequired(false) - .withDescription("Set the property to this value; accepts JSON objects and strings") - .create("value"), - OptionBuilder - .withArgName("HOST") + .required(false) + .desc("Set the property to this value; accepts JSON objects and strings") + .build(), + Option.builder("solrUrl") + .argName("HOST") .hasArg() - .isRequired(false) - .withDescription("Base Solr URL, which can be used to determine the zkHost if that's not known") - .create("solrUrl"), - OptionBuilder - .withArgName("HOST") + .required(false) + .desc("Base Solr URL, which can be used to determine the zkHost if that's not known") + .build(), + Option.builder("z") + .argName("HOST") .hasArg() - .isRequired(false) - .withDescription("Address of the Zookeeper ensemble") - .withLongOpt("zkHost") - .create('z'), - OptionBuilder - .withArgName("PORT") + .required(false) + .desc("Address of the Zookeeper ensemble") + .longOpt("zkHost") + .build(), + Option.builder("p") + .argName("PORT") .hasArg() - .isRequired(false) - .withDescription("The port of the Solr node to use when applying configuration change") - .withLongOpt("port") - .create('p'), - OptionBuilder - .withArgName("SCHEME") + .required(false) + .desc("The port of the Solr node to use when applying configuration change") + .longOpt("port") + .build(), + Option.builder("s") + .argName("SCHEME") .hasArg() - .isRequired(false) - .withDescription("The scheme for accessing Solr. Accepted values: http or https. Default: http") - .withLongOpt("scheme") - .create('s') + .required(false) + .desc("The scheme for accessing Solr. Accepted values: http or https. Default: http") + .longOpt("scheme") + .build() }; return joinOptions(configOptions, cloudOptions); } @@ -2993,7 +2980,7 @@ protected void runImpl(CommandLine cli) throws Exception { echo("\nPOSTing request to Config API: " + solrUrl + updatePath); echo(jsonBody); - try (SolrClient solrClient = new Builder(solrUrl).build()) { + try (SolrClient solrClient = new HttpSolrClient.Builder(solrUrl).build()) { NamedList result = postJsonToSolr(solrClient, updatePath, jsonBody); Integer statusCode = (Integer)((NamedList)result.get("responseHeader")).get("status"); if (statusCode == 0) { @@ -3044,89 +3031,88 @@ public String getName() { return "run_example"; } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[] { - OptionBuilder - .isRequired(false) - .withDescription("Don't prompt for input; accept all defaults when running examples that accept user input") - .create("noprompt"), - OptionBuilder - .withArgName("NAME") - .hasArg() - .isRequired(true) - .withDescription("Name of the example to launch, one of: cloud, techproducts, dih, schemaless") - .withLongOpt("example") - .create('e'), - OptionBuilder - .withArgName("PATH") - .hasArg() - .isRequired(false) - .withDescription("Path to the bin/solr script") - .create("script"), - OptionBuilder - .withArgName("DIR") - .hasArg() - .isRequired(true) - .withDescription("Path to the Solr server directory.") - .withLongOpt("serverDir") - .create('d'), - OptionBuilder - .withArgName("FORCE") - .withDescription("Force option in case Solr is run as root") - .create("force"), - OptionBuilder - .withArgName("DIR") - .hasArg() - .isRequired(false) - .withDescription("Path to the Solr example directory; if not provided, ${serverDir}/../example is expected to exist.") - .create("exampleDir"), - OptionBuilder - .withArgName("SCHEME") - .hasArg() - .isRequired(false) - .withDescription("Solr URL scheme: http or https, defaults to http if not specified") - .create("urlScheme"), - OptionBuilder - .withArgName("PORT") - .hasArg() - .isRequired(false) - .withDescription("Specify the port to start the Solr HTTP listener on; default is 8983") - .withLongOpt("port") - .create('p'), - OptionBuilder - .withArgName("HOSTNAME") - .hasArg() - .isRequired(false) - .withDescription("Specify the hostname for this Solr instance") - .withLongOpt("host") - .create('h'), - OptionBuilder - .withArgName("ZKHOST") - .hasArg() - .isRequired(false) - .withDescription("ZooKeeper connection string; only used when running in SolrCloud mode using -c") - .withLongOpt("zkhost") - .create('z'), - OptionBuilder - .isRequired(false) - .withDescription("Start Solr in SolrCloud mode; if -z not supplied, an embedded ZooKeeper instance is started on Solr port+1000, such as 9983 if Solr is bound to 8983") - .withLongOpt("cloud") - .create('c'), - OptionBuilder - .withArgName("MEM") - .hasArg() - .isRequired(false) - .withDescription("Sets the min (-Xms) and max (-Xmx) heap size for the JVM, such as: -m 4g results in: -Xms4g -Xmx4g; by default, this script sets the heap size to 512m") - .withLongOpt("memory") - .create('m'), - OptionBuilder - .withArgName("OPTS") - .hasArg() - .isRequired(false) - .withDescription("Additional options to be passed to the JVM when starting example Solr server(s)") - .withLongOpt("addlopts") - .create('a') + Option.builder("noprompt") + .required(false) + .desc("Don't prompt for input; accept all defaults when running examples that accept user input") + .build(), + Option.builder("e") + .argName("NAME") + .hasArg() + .required(true) + .desc("Name of the example to launch, one of: cloud, techproducts, dih, schemaless") + .longOpt("example") + .build(), + Option.builder("script") + .argName("PATH") + .hasArg() + .required(false) + .desc("Path to the bin/solr script") + .build(), + Option.builder("d") + .argName("DIR") + .hasArg() + .required(true) + .desc("Path to the Solr server directory.") + .longOpt("serverDir") + .build(), + Option.builder("force") + .argName("FORCE") + .desc("Force option in case Solr is run as root") + .build(), + Option.builder("exampleDir") + .argName("DIR") + .hasArg() + .required(false) + .desc("Path to the Solr example directory; if not provided, ${serverDir}/../example is expected to exist.") + .build(), + Option.builder("urlScheme") + .argName("SCHEME") + .hasArg() + .required(false) + .desc("Solr URL scheme: http or https, defaults to http if not specified") + .build(), + Option.builder("p") + .argName("PORT") + .hasArg() + .required(false) + .desc("Specify the port to start the Solr HTTP listener on; default is 8983") + .longOpt("port") + .build(), + Option.builder("h") + .argName("HOSTNAME") + .hasArg() + .required(false) + .desc("Specify the hostname for this Solr instance") + .longOpt("host") + .build(), + Option.builder("z") + .argName("ZKHOST") + .hasArg() + .required(false) + .desc("ZooKeeper connection string; only used when running in SolrCloud mode using -c") + .longOpt("zkhost") + .build(), + Option.builder("c") + .required(false) + .desc("Start Solr in SolrCloud mode; if -z not supplied, an embedded ZooKeeper instance is started on Solr port+1000, such as 9983 if Solr is bound to 8983") + .longOpt("cloud") + .build(), + Option.builder("m") + .argName("MEM") + .hasArg() + .required(false) + .desc("Sets the min (-Xms) and max (-Xmx) heap size for the JVM, such as: -m 4g results in: -Xms4g -Xmx4g; by default, this script sets the heap size to 512m") + .longOpt("memory") + .build(), + Option.builder("a") + .argName("OPTS") + .hasArg() + .required(false) + .desc("Additional options to be passed to the JVM when starting example Solr server(s)") + .longOpt("addlopts") + .build() }; } @@ -3342,6 +3328,7 @@ protected void runCloudExample(CommandLine cli) throws Exception { startSolr(new File(node1Dir,"solr"), true, cli, cloudPorts[0], zkHost, 30); if (zkHost == null) { + @SuppressWarnings("unchecked") Map cloudStatus = (Map)nodeStatus.get("cloud"); if (cloudStatus != null) { String zookeeper = (String)cloudStatus.get("ZooKeeper"); @@ -3809,76 +3796,75 @@ public String getName() { return "assert"; } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[] { - OptionBuilder - .withDescription("Asserts that we are NOT the root user") - .withLongOpt("not-root") - .create("R"), - OptionBuilder - .withDescription("Asserts that we are the root user") - .withLongOpt("root") - .create("r"), - OptionBuilder - .withDescription("Asserts that Solr is NOT running on a certain URL. Default timeout is 1000ms") - .withLongOpt("not-started") + Option.builder("R") + .desc("Asserts that we are NOT the root user") + .longOpt("not-root") + .build(), + Option.builder("r") + .desc("Asserts that we are the root user") + .longOpt("root") + .build(), + Option.builder("S") + .desc("Asserts that Solr is NOT running on a certain URL. Default timeout is 1000ms") + .longOpt("not-started") .hasArg(true) - .withArgName("url") - .create("S"), - OptionBuilder - .withDescription("Asserts that Solr is running on a certain URL. Default timeout is 1000ms") - .withLongOpt("started") + .argName("url") + .build(), + Option.builder("s") + .desc("Asserts that Solr is running on a certain URL. Default timeout is 1000ms") + .longOpt("started") .hasArg(true) - .withArgName("url") - .create("s"), - OptionBuilder - .withDescription("Asserts that we run as same user that owns ") - .withLongOpt("same-user") + .argName("url") + .build(), + Option.builder("u") + .desc("Asserts that we run as same user that owns ") + .longOpt("same-user") .hasArg(true) - .withArgName("directory") - .create("u"), - OptionBuilder - .withDescription("Asserts that directory exists") - .withLongOpt("exists") + .argName("directory") + .build(), + Option.builder("x") + .desc("Asserts that directory exists") + .longOpt("exists") .hasArg(true) - .withArgName("directory") - .create("x"), - OptionBuilder - .withDescription("Asserts that directory does NOT exist") - .withLongOpt("not-exists") + .argName("directory") + .build(), + Option.builder("X") + .desc("Asserts that directory does NOT exist") + .longOpt("not-exists") .hasArg(true) - .withArgName("directory") - .create("X"), - OptionBuilder - .withDescription("Asserts that Solr is running in cloud mode. Also fails if Solr not running. URL should be for root Solr path.") - .withLongOpt("cloud") + .argName("directory") + .build(), + Option.builder("c") + .desc("Asserts that Solr is running in cloud mode. Also fails if Solr not running. URL should be for root Solr path.") + .longOpt("cloud") .hasArg(true) - .withArgName("url") - .create("c"), - OptionBuilder - .withDescription("Asserts that Solr is not running in cloud mode. Also fails if Solr not running. URL should be for root Solr path.") - .withLongOpt("not-cloud") + .argName("url") + .build(), + Option.builder("C") + .desc("Asserts that Solr is not running in cloud mode. Also fails if Solr not running. URL should be for root Solr path.") + .longOpt("not-cloud") .hasArg(true) - .withArgName("url") - .create("C"), - OptionBuilder - .withDescription("Exception message to be used in place of the default error message") - .withLongOpt("message") + .argName("url") + .build(), + Option.builder("m") + .desc("Exception message to be used in place of the default error message") + .longOpt("message") .hasArg(true) - .withArgName("message") - .create("m"), - OptionBuilder - .withDescription("Timeout in ms for commands supporting a timeout") - .withLongOpt("timeout") + .argName("message") + .build(), + Option.builder("t") + .desc("Timeout in ms for commands supporting a timeout") + .longOpt("timeout") .hasArg(true) - .withType(Long.class) - .withArgName("ms") - .create("t"), - OptionBuilder - .withDescription("Return an exit code instead of printing error message on assert fail.") - .withLongOpt("exitcode") - .create("e") + .type(Long.class) + .argName("ms") + .build(), + Option.builder("e") + .desc("Return an exit code instead of printing error message on assert fail.") + .longOpt("exitcode") + .build() }; } @@ -4132,65 +4118,64 @@ public String getName() { List authenticationVariables = Arrays.asList("SOLR_AUTHENTICATION_CLIENT_BUILDER", "SOLR_AUTH_TYPE", "SOLR_AUTHENTICATION_OPTS"); - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[]{ - OptionBuilder - .withArgName("type") - .hasArg() - .withDescription("The authentication mechanism to enable (basicAuth or kerberos). Defaults to 'basicAuth'.") - .create("type"), - OptionBuilder - .withArgName("credentials") - .hasArg() - .withDescription("Credentials in the format username:password. Example: -credentials solr:SolrRocks") - .create("credentials"), - OptionBuilder - .withArgName("prompt") - .hasArg() - .withDescription("Prompts the user to provide the credentials. Use either -credentials or -prompt, not both") - .create("prompt"), - OptionBuilder - .withArgName("config") - .hasArgs() - .withDescription("Configuration parameters (Solr startup parameters). Required for Kerberos authentication") - .create("config"), - OptionBuilder - .withArgName("blockUnknown") - .withDescription("Blocks all access for unknown users (requires authentication for all endpoints)") - .hasArg() - .create("blockUnknown"), - OptionBuilder - .withArgName("solrIncludeFile") - .hasArg() - .withDescription("The Solr include file which contains overridable environment variables for configuring Solr configurations") - .create("solrIncludeFile"), - OptionBuilder - .withArgName("updateIncludeFileOnly") - .withDescription("Only update the solr.in.sh or solr.in.cmd file, and skip actual enabling/disabling" - + " authentication (i.e. don't update security.json)") - .hasArg() - .create("updateIncludeFileOnly"), - OptionBuilder - .withArgName("authConfDir") - .hasArg() - .isRequired() - .withDescription("This is where any authentication related configuration files, if any, would be placed.") - .create("authConfDir"), - OptionBuilder - .withArgName("solrUrl") - .hasArg() - .withDescription("Solr URL") - .create("solrUrl"), - OptionBuilder - .withArgName("zkHost") - .hasArg() - .withDescription("ZooKeeper host") - .create("zkHost"), - OptionBuilder - .isRequired(false) - .withDescription("Enable more verbose command output.") - .create("verbose") + Option.builder("type") + .argName("type") + .hasArg() + .desc("The authentication mechanism to enable (basicAuth or kerberos). Defaults to 'basicAuth'.") + .build(), + Option.builder("credentials") + .argName("credentials") + .hasArg() + .desc("Credentials in the format username:password. Example: -credentials solr:SolrRocks") + .build(), + Option.builder("prompt") + .argName("prompt") + .hasArg() + .desc("Prompts the user to provide the credentials. Use either -credentials or -prompt, not both") + .build(), + Option.builder("config") + .argName("config") + .hasArgs() + .desc("Configuration parameters (Solr startup parameters). Required for Kerberos authentication") + .build(), + Option.builder("blockUnknown") + .argName("blockUnknown") + .desc("Blocks all access for unknown users (requires authentication for all endpoints)") + .hasArg() + .build(), + Option.builder("solrIncludeFile") + .argName("solrIncludeFile") + .hasArg() + .desc("The Solr include file which contains overridable environment variables for configuring Solr configurations") + .build(), + Option.builder("updateIncludeFileOnly") + .argName("updateIncludeFileOnly") + .desc("Only update the solr.in.sh or solr.in.cmd file, and skip actual enabling/disabling" + + " authentication (i.e. don't update security.json)") + .hasArg() + .build(), + Option.builder("authConfDir") + .argName("authConfDir") + .hasArg() + .required() + .desc("This is where any authentication related configuration files, if any, would be placed.") + .build(), + Option.builder("solrUrl") + .argName("solrUrl") + .hasArg() + .desc("Solr URL") + .build(), + Option.builder("zkHost") + .argName("zkHost") + .hasArg() + .desc("ZooKeeper host") + .build(), + Option.builder("verbose") + .required(false) + .desc("Enable more verbose command output.") + .build() }; } @@ -4233,10 +4218,10 @@ private int handleKerberos(CommandLine cli) throws Exception { String cmd = cli.getArgs()[0]; boolean updateIncludeFileOnly = Boolean.parseBoolean(cli.getOptionValue("updateIncludeFileOnly", "false")); String securityJson = "{" + - "\n \"authentication\":{" + - "\n \"class\":\"solr.KerberosPlugin\"" + - "\n }" + - "\n}"; + "\n \"authentication\":{" + + "\n \"class\":\"solr.KerberosPlugin\"" + + "\n }" + + "\n}"; switch (cmd) { @@ -4249,7 +4234,7 @@ private int handleKerberos(CommandLine cli) throws Exception { zkHost = getZkHost(cli); } catch (Exception ex) { CLIO.out("Unable to access ZooKeeper. Please add the following security.json to ZooKeeper (in case of SolrCloud):\n" - + securityJson + "\n"); + + securityJson + "\n"); zkInaccessible = true; } if (zkHost == null) { @@ -4611,40 +4596,39 @@ public String getName() { return "utils"; } - @SuppressWarnings("static-access") public Option[] getOptions() { return new Option[]{ - OptionBuilder - .withArgName("path") + Option.builder("s") + .argName("path") .hasArg() - .withDescription("Path to server dir. Required if logs path is relative") - .create("s"), - OptionBuilder - .withArgName("path") + .desc("Path to server dir. Required if logs path is relative") + .build(), + Option.builder("l") + .argName("path") .hasArg() - .withDescription("Path to logs dir. If relative, also provide server dir with -s") - .create("l"), - OptionBuilder - .withDescription("Be quiet, don't print to stdout, only return exit codes") - .create("q"), - OptionBuilder - .withArgName("daysToKeep") + .desc("Path to logs dir. If relative, also provide server dir with -s") + .build(), + Option.builder("q") + .desc("Be quiet, don't print to stdout, only return exit codes") + .build(), + Option.builder("remove_old_solr_logs") + .argName("daysToKeep") .hasArg() - .withType(Integer.class) - .withDescription("Path to logs directory") - .create("remove_old_solr_logs"), - OptionBuilder - .withArgName("generations") + .type(Integer.class) + .desc("Path to logs directory") + .build(), + Option.builder("rotate_solr_logs") + .argName("generations") .hasArg() - .withType(Integer.class) - .withDescription("Rotate solr.log to solr.log.1 etc") - .create("rotate_solr_logs"), - OptionBuilder - .withDescription("Archive old garbage collection logs into archive/") - .create("archive_gc_logs"), - OptionBuilder - .withDescription("Archive old console logs into archive/") - .create("archive_console_logs") + .type(Integer.class) + .desc("Rotate solr.log to solr.log.1 etc") + .build(), + Option.builder("archive_gc_logs") + .desc("Archive old garbage collection logs into archive/") + .build(), + Option.builder("archive_console_logs") + .desc("Archive old console logs into archive/") + .build() }; } @@ -4756,7 +4740,7 @@ public int rotateSolrLogs(int generations) throws Exception { try (Stream files = Files.find(logsPath, 1, (f, a) -> a.isRegularFile() && String.valueOf(f.getFileName()).startsWith("solr.log.")) .sorted((b,a) -> Integer.valueOf(a.getFileName().toString().substring(9)) - .compareTo(Integer.valueOf(b.getFileName().toString().substring(9))))) { + .compareTo(Integer.valueOf(b.getFileName().toString().substring(9))))) { files.forEach(p -> { try { int number = Integer.parseInt(p.getFileName().toString().substring(9)); diff --git a/solr/core/src/java/org/apache/solr/util/SolrFileCleaningTracker.java b/solr/core/src/java/org/apache/solr/util/SolrFileCleaningTracker.java deleted file mode 100644 index 9c66f0feadb9..000000000000 --- a/solr/core/src/java/org/apache/solr/util/SolrFileCleaningTracker.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.util; - -import java.io.File; -import java.lang.ref.PhantomReference; -import java.lang.ref.ReferenceQueue; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; - -import org.apache.commons.io.FileCleaningTracker; -import org.apache.commons.io.FileDeleteStrategy; - -public class SolrFileCleaningTracker extends FileCleaningTracker { - - ReferenceQueue q = new ReferenceQueue<>(); - - final Collection trackers = Collections.synchronizedSet(new HashSet()); - - final List deleteFailures = Collections.synchronizedList(new ArrayList()); - - volatile boolean exitWhenFinished = false; - - Thread reaper; - - public void track(final File file, final Object marker) { - track(file, marker, null); - } - - public void track(final File file, final Object marker, final FileDeleteStrategy deleteStrategy) { - if (file == null) { - throw new NullPointerException("The file must not be null"); - } - addTracker(file.getPath(), marker, deleteStrategy); - } - - public void track(final String path, final Object marker) { - track(path, marker, null); - } - - public void track(final String path, final Object marker, final FileDeleteStrategy deleteStrategy) { - if (path == null) { - throw new NullPointerException("The path must not be null"); - } - addTracker(path, marker, deleteStrategy); - } - - private synchronized void addTracker(final String path, final Object marker, - final FileDeleteStrategy deleteStrategy) { - if (exitWhenFinished) { - throw new IllegalStateException("No new trackers can be added once exitWhenFinished() is called"); - } - if (reaper == null) { - reaper = new Reaper(); - reaper.start(); - } - trackers.add(new Tracker(path, deleteStrategy, marker, q)); - } - - public int getTrackCount() { - return trackers.size(); - } - - public List getDeleteFailures() { - return deleteFailures; - } - - public synchronized void exitWhenFinished() { - // synchronized block protects reaper - exitWhenFinished = true; - if (reaper != null) { - synchronized (reaper) { - reaper.interrupt(); - try { - reaper.join(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - } - } - } - - private final class Reaper extends Thread { - Reaper() { - super("MultiPart Upload Tmp File Reaper"); - setDaemon(true); - } - - @Override - public void run() { - while (exitWhenFinished == false || trackers.size() > 0) { - try { - // Wait for a tracker to remove. - final Tracker tracker = (Tracker) q.remove(); // cannot return null - trackers.remove(tracker); - if (!tracker.delete()) { - deleteFailures.add(tracker.getPath()); - } - tracker.clear(); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - break; - } - } - } - } - - private static final class Tracker extends PhantomReference { - - private final String path; - - private final FileDeleteStrategy deleteStrategy; - - Tracker(final String path, final FileDeleteStrategy deleteStrategy, final Object marker, - final ReferenceQueue queue) { - super(marker, queue); - this.path = path; - this.deleteStrategy = deleteStrategy == null ? FileDeleteStrategy.NORMAL : deleteStrategy; - } - - public String getPath() { - return path; - } - - public boolean delete() { - return deleteStrategy.deleteQuietly(new File(path)); - } - } - -} \ No newline at end of file diff --git a/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java b/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java index f7c02ce364ca..0280eee45337 100644 --- a/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java +++ b/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java @@ -237,6 +237,7 @@ public String _format(LogEvent event) { return sb.toString(); } + @SuppressWarnings({"unchecked"}) private Map getReplicaProps(ZkController zkController, SolrCore core) { final String collectionName = core.getCoreDescriptor().getCloudDescriptor().getCollectionName(); DocCollection collection = zkController.getClusterState().getCollectionOrNull(collectionName); diff --git a/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java b/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java index 249146d4b21f..7f3a56759048 100644 --- a/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java +++ b/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java @@ -18,17 +18,24 @@ import java.io.*; import java.nio.charset.Charset; +import java.util.Collections; import java.util.List; import java.util.ArrayList; import java.net.URLDecoder; +import java.util.Map; +import java.util.TreeMap; import java.util.UUID; import java.util.regex.Pattern; import java.util.regex.Matcher; + +import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputField; +import org.apache.solr.handler.component.ShardRequest; + /** @@ -62,7 +69,7 @@ public static void main(String[] args) throws Exception { try { client = builder.withBaseSolrUrl(baseUrl).build(); File rf = new File(root); - List files = new ArrayList(); + List files = new ArrayList<>(); gatherFiles(rf, files); int rec = 0; UpdateRequest request = new UpdateRequest(); @@ -91,13 +98,11 @@ public static void main(String[] args) throws Exception { rec++; UUID id = UUID.randomUUID(); - doc.addField("id", id.toString()); - doc.addField("file_s", fileName); + doc.setField("id", id.toString()); + doc.setField("file_s", fileName); request.add(doc); if (rec == 300) { - CLIO.out("Sending batch of 300 log records..."); - request.process(client); - CLIO.out("Batch sent"); + sendBatch(client, request, false /* normal batch */); request = new UpdateRequest(); rec = 0; } @@ -108,17 +113,35 @@ public static void main(String[] args) throws Exception { } if (rec > 0) { - //Process last batch - CLIO.out("Sending last batch ..."); - request.process(client); - client.commit(); - CLIO.out("Committed"); + sendBatch(client, request, true /* last batch */); } } finally { client.close(); } } + private static void sendBatch(SolrClient client, UpdateRequest request, boolean lastRequest) throws SolrServerException, IOException { + final String beginMessage = lastRequest ? "Sending last batch ..." : "Sending batch of 300 log records..."; + CLIO.out(beginMessage); + try { + request.process(client); + CLIO.out("Batch sent"); + } catch (Exception e) { + CLIO.err("Batch sending failed: " + e.getMessage()); + e.printStackTrace(CLIO.getErrStream()); + } + + if (lastRequest) { + try { + client.commit(); + CLIO.out("Committed"); + } catch (Exception e) { + CLIO.err("Unable to commit documents: " + e.getMessage()); + e.printStackTrace(CLIO.getErrStream()); + } + } + } + static void gatherFiles(File rootFile, List files) { if(rootFile.isFile()) { @@ -163,20 +186,25 @@ public SolrInputDocument readRecord() throws IOException { } if (line != null) { + SolrInputDocument lineDoc = new SolrInputDocument(); + lineDoc.setField("date_dt", parseDate(line)); + lineDoc.setField("line_t", line); + lineDoc.setField("type_s", "other"); // Overridden by known types below + if (line.contains("Registered new searcher")) { - return parseNewSearch(line); + parseNewSearch(lineDoc, line); } else if (line.contains("path=/update")) { - return parseUpdate(line); + parseUpdate(lineDoc, line); } else if (line.contains(" ERROR ")) { this.cause = null; - return parseError(line, readTrace()); + parseError(lineDoc, line, readTrace()); } else if (line.contains("start commit")) { - return parseCommit(line); + parseCommit(lineDoc, line); } else if(line.contains("QTime=")) { - return parseQueryRecord(line); - } else { - continue; + parseQueryRecord(lineDoc, line); } + + return lineDoc; } else { return null; } @@ -223,101 +251,79 @@ private String parseDate(String line) { return null; } - private SolrInputDocument parseError(String line, String trace) throws IOException { - SolrInputDocument doc = new SolrInputDocument(); - doc.addField("date_dt", parseDate(line)); - doc.addField("type_s", "error"); - doc.addField("line_t", line); + private void setFieldIfUnset(SolrInputDocument doc, String fieldName, String fieldValue) { + if (doc.containsKey(fieldName)) return; + + doc.setField(fieldName, fieldValue); + } + + private void parseError(SolrInputDocument lineRecord, String line, String trace) throws IOException { + lineRecord.setField("type_s", "error"); //Don't include traces that have only the %html header. if(trace != null && trace.length() > 6) { - doc.addField("stack_t", trace); + lineRecord.setField("stack_t", trace); } if(this.cause != null) { - doc.addField("root_cause_t", cause.replace("Caused by:", "").trim()); + lineRecord.setField("root_cause_t", cause.replace("Caused by:", "").trim()); } - doc.addField("collection_s", parseCollection(line)); - doc.addField("core_s", parseCore(line)); - doc.addField("shard_s", parseShard(line)); - doc.addField("replica_s", parseReplica(line)); - - return doc; + lineRecord.setField("collection_s", parseCollection(line)); + lineRecord.setField("core_s", parseCore(line)); + lineRecord.setField("shard_s", parseShard(line)); + lineRecord.setField("replica_s", parseReplica(line)); } - private SolrInputDocument parseCommit(String line) throws IOException { - SolrInputDocument doc = new SolrInputDocument(); - doc.addField("date_dt", parseDate(line)); - doc.addField("type_s", "commit"); - doc.addField("line_t", line); - if(line.contains("softCommit=true")) { - doc.addField("soft_commit_s", "true"); - } else { - doc.addField("soft_commit_s", "false"); - } - - if(line.contains("openSearcher=true")) { - doc.addField("open_searcher_s", "true"); - } else { - doc.addField("open_searcher_s", "false"); - } + private void parseCommit(SolrInputDocument lineRecord, String line) throws IOException { + lineRecord.setField("type_s", "commit"); + lineRecord.setField("soft_commit_s", Boolean.toString(line.contains("softCommit=true"))); - doc.addField("collection_s", parseCollection(line)); - doc.addField("core_s", parseCore(line)); - doc.addField("shard_s", parseShard(line)); - doc.addField("replica_s", parseReplica(line)); + lineRecord.setField("open_searcher_s", Boolean.toString(line.contains("openSearcher=true"))); - return doc; + lineRecord.setField("collection_s", parseCollection(line)); + lineRecord.setField("core_s", parseCore(line)); + lineRecord.setField("shard_s", parseShard(line)); + lineRecord.setField("replica_s", parseReplica(line)); } - private SolrInputDocument parseQueryRecord(String line) { - - SolrInputDocument doc = new SolrInputDocument(); - doc.addField("date_dt", parseDate(line)); - doc.addField("qtime_i", parseQTime(line)); - doc.addField("status_s", parseStatus(line)); + private void parseQueryRecord(SolrInputDocument lineRecord, String line) { + lineRecord.setField("qtime_i", parseQTime(line)); + lineRecord.setField("status_s", parseStatus(line)); String path = parsePath(line); - doc.addField("path_s", path); + lineRecord.setField("path_s", path); if(line.contains("hits=")) { - doc.addField("hits_l", parseHits(line)); + lineRecord.setField("hits_l", parseHits(line)); } String params = parseParams(line); - doc.addField("params_t", params); - addParams(doc, params); + lineRecord.setField("params_t", params); + addParams(lineRecord, params); - doc.addField("collection_s", parseCollection(line)); - doc.addField("core_s", parseCore(line)); - doc.addField("node_s", parseNode(line)); - doc.addField("shard_s", parseShard(line)); - doc.addField("replica_s", parseReplica(line)); + lineRecord.setField("collection_s", parseCollection(line)); + lineRecord.setField("core_s", parseCore(line)); + lineRecord.setField("node_s", parseNode(line)); + lineRecord.setField("shard_s", parseShard(line)); + lineRecord.setField("replica_s", parseReplica(line)); if(path != null && path.contains("/admin")) { - doc.addField("type_s", "admin"); + lineRecord.setField("type_s", "admin"); } else if(path != null && params.contains("/replication")) { - doc.addField("type_s", "replication"); + lineRecord.setField("type_s", "replication"); } else if (path != null && path.contains("/get")) { - doc.addField("type_s", "get"); + lineRecord.setField("type_s", "get"); } else { - doc.addField("type_s", "query"); + lineRecord.setField("type_s", "query"); } - - return doc; } - private SolrInputDocument parseNewSearch(String line) { - - SolrInputDocument doc = new SolrInputDocument(); - doc.addField("date_dt", parseDate(line)); - doc.addField("core_s", parseNewSearcherCore(line)); - doc.addField("type_s", "newSearcher"); - doc.addField("line_t", line); - return doc; + private void parseNewSearch(SolrInputDocument lineRecord, String line) { + lineRecord.setField("core_s", parseNewSearcherCore(line)); + lineRecord.setField("type_s", "newSearcher"); } private String parseCollection(String line) { @@ -330,25 +336,19 @@ private String parseCollection(String line) { } } - private SolrInputDocument parseUpdate(String line) { - SolrInputDocument doc = new SolrInputDocument(); - doc.addField("date_dt", parseDate(line)); - + private void parseUpdate(SolrInputDocument lineRecord, String line) { if(line.contains("deleteByQuery=")) { - doc.addField("type_s", "deleteByQuery"); + lineRecord.setField("type_s", "deleteByQuery"); } else if(line.contains("delete=")) { - doc.addField("type_s", "delete"); + lineRecord.setField("type_s", "delete"); } else { - doc.addField("type_s", "update"); + lineRecord.setField("type_s", "update"); } - doc.addField("collection_s", parseCollection(line)); - doc.addField("core_s", parseCore(line)); - doc.addField("shard_s", parseShard(line)); - doc.addField("replica_s", parseReplica(line)); - doc.addField("line_t", line); - - return doc; + lineRecord.setField("collection_s", parseCollection(line)); + lineRecord.setField("core_s", parseCore(line)); + lineRecord.setField("shard_s", parseShard(line)); + lineRecord.setField("replica_s", parseReplica(line)); } private String parseNewSearcherCore(String line) { @@ -468,66 +468,72 @@ private String readUntil(String s, char[] chars) { return builder.toString(); } + private void addOrReplaceFieldValue(SolrInputDocument doc, String fieldName, String fieldValue) { + doc.setField(fieldName, fieldValue); + } + private void addParams(SolrInputDocument doc, String params) { String[] pairs = params.split("&"); for(String pair : pairs) { String[] parts = pair.split("="); if(parts.length == 2 && parts[0].equals("q")) { String dq = URLDecoder.decode(parts[1], Charset.defaultCharset()); - doc.addField("q_s", dq); - doc.addField("q_t", dq); + setFieldIfUnset(doc, "q_s", dq); + setFieldIfUnset(doc, "q_t", dq); } if(parts[0].equals("rows")) { String dr = URLDecoder.decode(parts[1], Charset.defaultCharset()); - doc.addField("rows_i", dr); + setFieldIfUnset(doc, "rows_i", dr); } if(parts[0].equals("distrib")) { String dr = URLDecoder.decode(parts[1], Charset.defaultCharset()); - doc.addField("distrib_s", dr); + setFieldIfUnset(doc, "distrib_s", dr); } if(parts[0].equals("shards")) { - doc.addField("shards_s", "true"); + setFieldIfUnset(doc, "shards_s", "true"); } - if(parts[0].equals("ids") && ! isRTGRequest(doc)) { - doc.addField("ids_s", "true"); + if(parts[0].equals("ids") && !isRTGRequest(doc)) { + setFieldIfUnset(doc, "ids_s", "true"); } if(parts[0].equals("isShard")) { String dr = URLDecoder.decode(parts[1], Charset.defaultCharset()); - doc.addField("isShard_s", dr); + setFieldIfUnset(doc, "isShard_s", dr); } if(parts[0].equals("wt")) { String dr = URLDecoder.decode(parts[1], Charset.defaultCharset()); - doc.addField("wt_s", dr); + setFieldIfUnset(doc, "wt_s", dr); } if(parts[0].equals("facet")) { String dr = URLDecoder.decode(parts[1], Charset.defaultCharset()); - doc.addField("facet_s", dr); + setFieldIfUnset(doc, "facet_s", dr); } - } + if(parts[0].equals("shards.purpose")) { + try { + int purpose = Integer.parseInt(parts[1]); + String[] purposes = getRequestPurposeNames(purpose); + for (String p : purposes) { + doc.addField("purpose_ss", p); + } + } catch(Throwable e) { + //We'll just sit on this for now and not interrupt the load for this one field. + } + } + } //Special params used to determine what stage a query is. //So we populate with defaults. //The absence of the distrib params means its a distributed query. - - if(doc.getField("distrib_s") == null) { - doc.addField("distrib_s", "true"); - } - - if(doc.getField("shards_s") == null) { - doc.addField("shards_s", "false"); - } - - if(doc.getField("ids_s") == null) { - doc.addField("ids_s", "false"); - } + setFieldIfUnset(doc, "distrib_s", "true"); + setFieldIfUnset(doc, "shards_s", "false"); + setFieldIfUnset(doc, "ids_s", "false"); } private boolean isRTGRequest(SolrInputDocument doc) { @@ -538,4 +544,53 @@ private boolean isRTGRequest(SolrInputDocument doc) { return "/get".equals(path.getValue()); } } + + private static final Map purposes; + protected static final String UNKNOWN_VALUE = "Unknown"; + private static final String[] purposeUnknown = new String[] { UNKNOWN_VALUE }; + + public static String[] getRequestPurposeNames(Integer reqPurpose) { + if (reqPurpose != null) { + int valid = 0; + for (Map.Entryentry : purposes.entrySet()) { + if ((reqPurpose & entry.getKey()) != 0) { + valid++; + } + } + if (valid == 0) { + return purposeUnknown; + } else { + String[] result = new String[valid]; + int i = 0; + for (Map.Entryentry : purposes.entrySet()) { + if ((reqPurpose & entry.getKey()) != 0) { + result[i] = entry.getValue(); + i++; + } + } + return result; + } + } + return purposeUnknown; + } + + static { + Map map = new TreeMap<>(); + map.put(ShardRequest.PURPOSE_PRIVATE, "PRIVATE"); + map.put(ShardRequest.PURPOSE_GET_TOP_IDS, "GET_TOP_IDS"); + map.put(ShardRequest.PURPOSE_REFINE_TOP_IDS, "REFINE_TOP_IDS"); + map.put(ShardRequest.PURPOSE_GET_FACETS, "GET_FACETS"); + map.put(ShardRequest.PURPOSE_REFINE_FACETS, "REFINE_FACETS"); + map.put(ShardRequest.PURPOSE_GET_FIELDS, "GET_FIELDS"); + map.put(ShardRequest.PURPOSE_GET_HIGHLIGHTS, "GET_HIGHLIGHTS"); + map.put(ShardRequest.PURPOSE_GET_DEBUG, "GET_DEBUG"); + map.put(ShardRequest.PURPOSE_GET_STATS, "GET_STATS"); + map.put(ShardRequest.PURPOSE_GET_TERMS, "GET_TERMS"); + map.put(ShardRequest.PURPOSE_GET_TOP_GROUPS, "GET_TOP_GROUPS"); + map.put(ShardRequest.PURPOSE_GET_MLT_RESULTS, "GET_MLT_RESULTS"); + map.put(ShardRequest.PURPOSE_REFINE_PIVOT_FACETS, "REFINE_PIVOT_FACETS"); + map.put(ShardRequest.PURPOSE_SET_TERM_STATS, "SET_TERM_STATS"); + map.put(ShardRequest.PURPOSE_GET_TERM_STATS, "GET_TERM_STATS"); + purposes = Collections.unmodifiableMap(map); + } } \ No newline at end of file diff --git a/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java b/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java index 9bc8d257a4a0..da80c859c54b 100644 --- a/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java +++ b/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java @@ -196,19 +196,15 @@ public static int numDocs(SolrIndexSearcher s, Query q, Query f) } + private static final Pattern SPLIT_PATTERN = Pattern.compile("[\\s,]+"); // space or comma - - - - - private final static Pattern splitList=Pattern.compile(",| "); - - /** Split a value that may contain a comma, space of bar separated list. */ - public static String[] split(String value){ - return splitList.split(value.trim(), 0); + /** Split a value between spaces and/or commas. No need to trim anything. */ + public static String[] split(String value) { + // TODO consider moving / adapting this into a new StrUtils.splitSmart variant? + // TODO deprecate; it's only used by two callers? + return SPLIT_PATTERN.split(value.trim()); } - /** * Pre-fetch documents into the index searcher's document cache. * @@ -327,6 +323,7 @@ public static Set getDebugInterests(String[] params, ResponseBuilder rb) * @return The debug info * @throws java.io.IOException if there was an IO error */ + @SuppressWarnings({"rawtypes"}) public static NamedList doStandardDebug( SolrQueryRequest req, String userQuery, @@ -343,12 +340,13 @@ public static NamedList doStandardDebug( } + @SuppressWarnings({"unchecked"}) public static void doStandardQueryDebug( SolrQueryRequest req, String userQuery, Query query, boolean dbgQuery, - NamedList dbg) + @SuppressWarnings({"rawtypes"})NamedList dbg) { if (dbgQuery) { /* userQuery may have been pre-processed .. expose that */ @@ -364,12 +362,13 @@ public static void doStandardQueryDebug( } } + @SuppressWarnings({"unchecked"}) public static void doStandardResultsDebug( SolrQueryRequest req, Query query, DocList results, boolean dbgResults, - NamedList dbg) throws IOException + @SuppressWarnings({"rawtypes"})NamedList dbg) throws IOException { if (dbgResults) { SolrIndexSearcher searcher = req.getSearcher(); @@ -848,7 +847,7 @@ public static NamedList removeNulls(Map.Entry[] entries, Named * {@code resultIds} is. {@code resultIds} comes from {@link ResponseBuilder#resultIds}. If the doc key * isn't in {@code resultIds} then it is ignored. * Note: most likely you will call {@link #removeNulls(Map.Entry[], NamedList)} sometime after calling this. */ - public static void copyNamedListIntoArrayByDocPosInResponse(NamedList namedList, Map resultIds, + public static void copyNamedListIntoArrayByDocPosInResponse(@SuppressWarnings({"rawtypes"})NamedList namedList, Map resultIds, Map.Entry[] destArr) { assert resultIds.size() == destArr.length; for (int i = 0; i < namedList.size(); i++) { @@ -973,7 +972,7 @@ public static Sort getSort(SolrQueryRequest req) { /* we definitely had some sort of sort string from the user, * but no SortSpec came out of it */ - log.warn("Invalid sort \""+sort+"\" was specified, ignoring", sortE); + log.warn("Invalid sort '{}' was specified, ignoring", sort, sortE); return null; } diff --git a/solr/core/src/java/org/apache/solr/util/TestInjection.java b/solr/core/src/java/org/apache/solr/util/TestInjection.java index 3298628ff940..651d26c1d808 100644 --- a/solr/core/src/java/org/apache/solr/util/TestInjection.java +++ b/solr/core/src/java/org/apache/solr/util/TestInjection.java @@ -71,9 +71,11 @@ public TestShutdownFailError(String msg) { * If non-null, then this class should be used for accessing random entropy * @see #random */ + @SuppressWarnings({"rawtypes"}) private static final Class LUCENE_TEST_CASE; static { + @SuppressWarnings({"rawtypes"}) Class nonFinalTemp = null; try { ClassLoader classLoader = MethodHandles.lookup().lookupClass().getClassLoader(); @@ -94,6 +96,7 @@ static Random random() { // non-private for testing return null; } else { try { + @SuppressWarnings({"unchecked"}) Method randomMethod = LUCENE_TEST_CASE.getMethod("random"); return (Random) randomMethod.invoke(null); } catch (Exception e) { diff --git a/solr/core/src/java/org/apache/solr/util/configuration/SSLCredentialProviderFactory.java b/solr/core/src/java/org/apache/solr/util/configuration/SSLCredentialProviderFactory.java index 4d38ea2cfdfd..ec1782e08818 100644 --- a/solr/core/src/java/org/apache/solr/util/configuration/SSLCredentialProviderFactory.java +++ b/solr/core/src/java/org/apache/solr/util/configuration/SSLCredentialProviderFactory.java @@ -38,6 +38,7 @@ public class SSLCredentialProviderFactory { public static final String DEFAULT_PROVIDER_CHAIN = "env;sysprop"; public static final String PROVIDER_CHAIN_KEY = "solr.ssl.credential.provider.chain"; + @SuppressWarnings({"rawtypes"}) private final static ImmutableMap defaultProviders = ImmutableMap.of( "env", EnvSSLCredentialProvider.class, "sysprop", SysPropSSLCredentialProvider.class, @@ -82,7 +83,8 @@ private SSLCredentialProvider getProviderByClassName(String clazzName) { } } - private SSLCredentialProvider getDefaultProvider(Class aClass) { + @SuppressWarnings({"unchecked"}) + private SSLCredentialProvider getDefaultProvider(@SuppressWarnings({"rawtypes"})Class aClass) { try { return (SSLCredentialProvider) aClass.getConstructor().newInstance(); } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { diff --git a/solr/core/src/java/org/apache/solr/util/plugin/NamedListInitializedPlugin.java b/solr/core/src/java/org/apache/solr/util/plugin/NamedListInitializedPlugin.java index ce2a7c891256..e611b8b33ee5 100644 --- a/solr/core/src/java/org/apache/solr/util/plugin/NamedListInitializedPlugin.java +++ b/solr/core/src/java/org/apache/solr/util/plugin/NamedListInitializedPlugin.java @@ -25,5 +25,5 @@ * @since solr 1.3 */ public interface NamedListInitializedPlugin { - void init( NamedList args ); + void init( @SuppressWarnings({"rawtypes"})NamedList args ); } diff --git a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java index 50e0e593214c..c4fdd515d562 100644 --- a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java +++ b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java @@ -174,6 +174,7 @@ static void toSolrInputDocument(String prefix, SolrInputDocument doc, Object o) doc.addField(key, o); return; } + @SuppressWarnings({"unchecked"}) Map map = (Map)o; for (Map.Entry entry : map.entrySet()) { if (entry.getValue() instanceof Map) { // flatten recursively @@ -269,6 +270,7 @@ public static void convertMetric(String n, Metric metric, PropertyFilter propert Counter counter = (Counter) metric; convertCounter(n, counter, propertyFilter, compact, consumer); } else if (metric instanceof Gauge) { + @SuppressWarnings({"rawtypes"}) Gauge gauge = (Gauge) metric; try { convertGauge(n, gauge, propertyFilter, simple, compact, separator, consumer); @@ -482,8 +484,10 @@ static void convertMeter(String name, Meter meter, PropertyFilter propertyFilter * then return a map with a "value" field. * @param consumer consumer that accepts produced objects */ - static void convertGauge(String name, Gauge gauge, PropertyFilter propertyFilter, boolean simple, boolean compact, - String separator, BiConsumer consumer) { + static void convertGauge(String name, + @SuppressWarnings({"rawtypes"})Gauge gauge, + PropertyFilter propertyFilter, boolean simple, boolean compact, + String separator, BiConsumer consumer) { if (compact || simple) { Object o = gauge.getValue(); if (o instanceof Map) { diff --git a/solr/core/src/java/org/apache/solr/util/tracing/SolrRequestCarrier.java b/solr/core/src/java/org/apache/solr/util/tracing/SolrRequestCarrier.java index 8dcb02543aab..f5c613b98b76 100644 --- a/solr/core/src/java/org/apache/solr/util/tracing/SolrRequestCarrier.java +++ b/solr/core/src/java/org/apache/solr/util/tracing/SolrRequestCarrier.java @@ -28,9 +28,10 @@ */ public class SolrRequestCarrier implements TextMap { + @SuppressWarnings({"rawtypes"}) private final SolrRequest solrRequest; - public SolrRequestCarrier(SolrRequest solrRequest) { + public SolrRequestCarrier(@SuppressWarnings({"rawtypes"})SolrRequest solrRequest) { this.solrRequest = solrRequest; } diff --git a/solr/core/src/java/org/apache/solr/util/xslt/TransformerProvider.java b/solr/core/src/java/org/apache/solr/util/xslt/TransformerProvider.java index 825a146efed1..baae45df49eb 100644 --- a/solr/core/src/java/org/apache/solr/util/xslt/TransformerProvider.java +++ b/solr/core/src/java/org/apache/solr/util/xslt/TransformerProvider.java @@ -59,10 +59,9 @@ public class TransformerProvider { private TransformerProvider() { // tell'em: currently, we only cache the last used XSLT transform, and blindly recompile it // once cacheLifetimeSeconds expires - log.warn("{}{}{}" - , "The TransformerProvider's simplistic XSLT caching mechanism is not appropriate " - , "for high load scenarios, unless a single XSLT transform is used" - , " and xsltCacheLifetimeSeconds is set to a sufficiently high value."); + log.warn("The TransformerProvider's simplistic XSLT caching mechanism is not appropriate " + + "for high load scenarios, unless a single XSLT transform is used" + + " and xsltCacheLifetimeSeconds is set to a sufficiently high value."); } /** Return a new Transformer, possibly created from our cached Templates object diff --git a/solr/core/src/test-files/solr/solr-50-all.xml b/solr/core/src/test-files/solr/solr-50-all.xml index f69d904a4b8f..736349f24078 100644 --- a/solr/core/src/test-files/solr/solr-50-all.xml +++ b/solr/core/src/test-files/solr/solr-50-all.xml @@ -24,6 +24,7 @@ testConfigSetsHandler testManagementPath testSharedLib + ${solr.allowPaths:} ${shareSchema:true} 66 100 diff --git a/solr/core/src/test-files/solr/solr-solrreporter.xml b/solr/core/src/test-files/solr/solr-solrreporter.xml index f3249622ecaa..2a7416539d8f 100644 --- a/solr/core/src/test-files/solr/solr-solrreporter.xml +++ b/solr/core/src/test-files/solr/solr-solrreporter.xml @@ -17,6 +17,8 @@ --> + ${solr.allowPaths:} + ${urlScheme:} ${socketTimeout:90000} diff --git a/solr/core/src/test-files/solr/solr.xml b/solr/core/src/test-files/solr/solr.xml index 5e30ffd25b31..fb2791d1a244 100644 --- a/solr/core/src/test-files/solr/solr.xml +++ b/solr/core/src/test-files/solr/solr.xml @@ -24,6 +24,7 @@ ${shareSchema:false} ${configSetBaseDir:configsets} ${coreRootDirectory:.} + ${solr.allowPaths:} ${urlScheme:} diff --git a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java index f49604f683ac..c24c9b04ebfb 100644 --- a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java +++ b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java @@ -233,8 +233,7 @@ private static boolean deleteImpl(final File f, final boolean doLog) { } final boolean ex = f.exists(); if (doLog && ex) { - LOG.warn("Failed to delete file or dir [" - + f.getAbsolutePath() + "]: it still exists."); + LOG.warn("Failed to delete file or dir [{}]: it still exists.", f.getAbsolutePath()); } return !ex; } @@ -727,6 +726,7 @@ private static void runCommandOnStream( try { // Consume stdout and stderr, to avoid blocking the command executor = Executors.newFixedThreadPool(2); + @SuppressWarnings({"rawtypes"}) Future output = executor.submit(() -> { try { // Read until the output stream receives an EOF and closed. @@ -747,9 +747,12 @@ private static void runCommandOnStream( new IOUtils.NullOutputStream()); } } catch (IOException e) { - LOG.debug(e.getMessage()); + if (LOG.isDebugEnabled()) { + LOG.debug(e.getMessage()); + } } }); + @SuppressWarnings({"rawtypes"}) Future error = executor.submit(() -> { try { // Read until the error stream receives an EOF and closed. @@ -770,7 +773,9 @@ private static void runCommandOnStream( new IOUtils.NullOutputStream()); } } catch (IOException e) { - LOG.debug(e.getMessage()); + if (LOG.isDebugEnabled()) { + LOG.debug(e.getMessage()); + } } }); @@ -1042,8 +1047,7 @@ public static class HardLink extends org.apache.hadoop.fs.HardLink { public static int symLink(String target, String linkname) throws IOException{ if (target == null || linkname == null) { - LOG.warn("Can not create a symLink with a target = " + target - + " and link =" + linkname); + LOG.warn("Can not create a symLink with a target = {} and link = {}", target, linkname); return 1; } @@ -1080,14 +1084,13 @@ public static int symLink(String target, String linkname) throws IOException{ + "administrators and all non-administrators from creating symbolic links. " + "This behavior can be changed in the Local Security Policy management console"); } else if (returnVal != 0) { - LOG.warn("Command '" + StringUtils.join(" ", cmd) + "' failed " - + returnVal + " with: " + ec.getMessage()); + LOG.warn("Command '{}' failed {} with: {}",StringUtils.join(" ", cmd) + , returnVal, ec.getMessage()); } return returnVal; } catch (IOException e) { if (LOG.isDebugEnabled()) { - LOG.debug("Error while create symlink " + linkname + " to " + target - + "." + " Exception: " + StringUtils.stringifyException(e)); + LOG.debug("Error while create symlink {} to {}. Exception: {}", linkname, target, StringUtils.stringifyException(e)); } throw e; } @@ -1126,8 +1129,7 @@ public static int chmod(String filename, String perm, boolean recursive) shExec.execute(); }catch(IOException e) { if(LOG.isDebugEnabled()) { - LOG.debug("Error while changing permission : " + filename - +" Exception: " + StringUtils.stringifyException(e)); + LOG.debug("Error while changing permission : {} Exception: {}", filename, StringUtils.stringifyException(e)); } } return shExec.getExitCode(); @@ -1501,7 +1503,7 @@ public static String[] createJarWithClassPath(String inputClassPath, Path pwd, // then this is acceptable. If it returns false due to some other I/O // error, then this method will fail later with an IOException while saving // the jar. - LOG.debug("mkdirs false for " + workingDir + ", execution will continue"); + LOG.debug("mkdirs false for {}, execution will continue", workingDir); } StringBuilder unexpandedWildcardClasspath = new StringBuilder(); diff --git a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java index bfc18a3073d1..9cba3fb1ff33 100644 --- a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java +++ b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java @@ -418,7 +418,7 @@ private int moveLazyPersistReplicasToFinalized(File source) try { fileIoProvider.mkdirsWithExistsCheck(volume, targetDir); } catch(IOException ioe) { - LOG.warn("Failed to mkdirs " + targetDir); + LOG.warn("Failed to mkdirs {}", targetDir); continue; } @@ -426,8 +426,7 @@ private int moveLazyPersistReplicasToFinalized(File source) try { fileIoProvider.rename(volume, metaFile, targetMetaFile); } catch (IOException e) { - LOG.warn("Failed to move meta file from " - + metaFile + " to " + targetMetaFile, e); + LOG.warn("Failed to move meta file from {} to {}", metaFile, targetMetaFile, e); continue; } @@ -435,8 +434,7 @@ private int moveLazyPersistReplicasToFinalized(File source) try { fileIoProvider.rename(volume, blockFile, targetBlockFile); } catch (IOException e) { - LOG.warn("Failed to move block file from " - + blockFile + " to " + targetBlockFile, e); + LOG.warn("Failed to move block file from {} to {}", blockFile, targetBlockFile, e); continue; } @@ -444,7 +442,7 @@ private int moveLazyPersistReplicasToFinalized(File source) ++numRecovered; } else { // Failure should be rare. - LOG.warn("Failed to move " + blockFile + " to " + targetDir); + LOG.warn("Failed to move {} to {}", blockFile, targetDir); } } } @@ -655,8 +653,7 @@ static ReplicaInfo selectReplicaToDelete(final ReplicaInfo replica1, replicaToDelete = (replicaToKeep == replica1) ? replica2 : replica1; if (LOG.isDebugEnabled()) { - LOG.debug("resolveDuplicateReplicas decide to keep " + replicaToKeep - + ". Will try to delete " + replicaToDelete); + LOG.debug("resolveDuplicateReplicas decide to keep {}. Will try to delete {}", replicaToKeep, replicaToDelete); } return replicaToDelete; } @@ -664,10 +661,10 @@ static ReplicaInfo selectReplicaToDelete(final ReplicaInfo replica1, private void deleteReplica(final ReplicaInfo replicaToDelete) { // Delete the files on disk. Failure here is okay. if (!replicaToDelete.deleteBlockData()) { - LOG.warn("Failed to delete block file for replica " + replicaToDelete); + LOG.warn("Failed to delete block file for replica {}", replicaToDelete); } if (!replicaToDelete.deleteMetadata()) { - LOG.warn("Failed to delete meta file for replica " + replicaToDelete); + LOG.warn("Failed to delete meta file for replica {}", replicaToDelete); } } @@ -765,18 +762,21 @@ private boolean readReplicasFromCache(ReplicaMap volumeMap, File replicaFile = new File(currentDir, REPLICA_CACHE_FILE); // Check whether the file exists or not. if (!replicaFile.exists()) { - LOG.info("Replica Cache file: "+ replicaFile.getPath() + - " doesn't exist "); + if (LOG.isInfoEnabled()) { + LOG.info("Replica Cache file: {} doesn't exist", replicaFile.getPath()); + } return false; } long fileLastModifiedTime = replicaFile.lastModified(); if (System.currentTimeMillis() > fileLastModifiedTime + replicaCacheExpiry) { - LOG.info("Replica Cache file: " + replicaFile.getPath() + - " has gone stale"); + if (LOG.isInfoEnabled()) { + LOG.info("Replica Cache file: {} has gone stale", replicaFile.getPath()); + } // Just to make findbugs happy if (!replicaFile.delete()) { - LOG.info("Replica Cache file: " + replicaFile.getPath() + - " cannot be deleted"); + if (LOG.isInfoEnabled()) { + LOG.info("Replica Cache file: {} cannot be deleted", replicaFile.getPath()); + } } return false; } @@ -814,14 +814,16 @@ private boolean readReplicasFromCache(ReplicaMap volumeMap, iter.remove(); volumeMap.add(bpid, info); } - LOG.info("Successfully read replica from cache file : " - + replicaFile.getPath()); + if (LOG.isInfoEnabled()) { + LOG.info("Successfully read replica from cache file : {}", replicaFile.getPath()); + } return true; } catch (Exception e) { // Any exception we need to revert back to read from disk // Log the error and return false - LOG.info("Exception occurred while reading the replicas cache file: " - + replicaFile.getPath(), e ); + if (LOG.isInfoEnabled()) { + LOG.info("Exception occurred while reading the replicas cache file: {}", replicaFile.getPath(), e); + } return false; } finally { @@ -829,8 +831,9 @@ private boolean readReplicasFromCache(ReplicaMap volumeMap, IOUtils.closeStream(inputStream); if (!fileIoProvider.delete(volume, replicaFile)) { - LOG.info("Failed to delete replica cache file: " + - replicaFile.getPath()); + if (LOG.isInfoEnabled()) { + LOG.info("Failed to delete replica cache file: {}", replicaFile.getPath()); + } } } } @@ -922,8 +925,7 @@ protected void compute() { addToReplicasMap(volumeMap, dir, lazyWriteReplicaMap, isFinalized, exceptions, subTaskQueue); } catch (IOException e) { - LOG.warn("Caught exception while adding replicas from " + volume - + " in subtask. Will throw later.", e); + LOG.warn("Caught exception while adding replicas from {} in subtask. Will throw later.", volume, e); exceptions.add(e); } } diff --git a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java index cd3c4a324eb5..0767d4f8f85d 100644 --- a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java +++ b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java @@ -523,7 +523,7 @@ private ServerConnector createHttpsChannelConnector( if(null != excludeCiphers && !excludeCiphers.isEmpty()) { sslContextFactory.setExcludeCipherSuites( StringUtils.getTrimmedStrings(excludeCiphers)); - LOG.info("Excluded Cipher List:" + excludeCiphers); + LOG.info("Excluded Cipher List:{}", excludeCiphers); } conn.addFirstConnectionFactory(new SslConnectionFactory(sslContextFactory, @@ -610,7 +610,7 @@ private void initializeWebServer(String name, String hostName, if (pathSpecs != null) { for (String path : pathSpecs) { - LOG.info("adding path spec: " + path); + LOG.info("adding path spec: {}", path); addFilterPathMapping(path, webAppContext); } } @@ -782,8 +782,8 @@ public void setAttribute(String name, Object value) { */ public void addJerseyResourcePackage(final String packageName, final String pathSpec) { - LOG.info("addJerseyResourcePackage: packageName=" + packageName - + ", pathSpec=" + pathSpec); + LOG.info("addJerseyResourcePackage: packageName={}, pathcpec={}" + , packageName, pathSpec); final ServletHolder sh = new ServletHolder(ServletContainer.class); sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass", "com.sun.jersey.api.core.PackagesResourceConfig"); @@ -845,9 +845,10 @@ public void addInternalServlet(String name, String pathSpec, for (int i = 0; i < servletMappings.length; i++) { if (servletMappings[i].containsPathSpec(pathSpec)) { if (LOG.isDebugEnabled()) { - LOG.debug("Found existing " + servletMappings[i].getServletName() + - " servlet at path " + pathSpec + "; will replace mapping" + - " with " + holder.getName() + " servlet"); + LOG.debug("Found existing {} servlet at path {}; will replace mapping with {} servlet" + , servletMappings[i].getServletName() + , pathSpec + , holder.getName()); } ServletMapping[] newServletMappings = ArrayUtil.removeFromArray(servletMappings, servletMappings[i]); @@ -859,7 +860,7 @@ public void addInternalServlet(String name, String pathSpec, webAppContext.addServlet(holder, pathSpec); if(requireAuth && UserGroupInformation.isSecurityEnabled()) { - LOG.info("Adding Kerberos (SPNEGO) filter to " + name); + LOG.info("Adding Kerberos (SPNEGO) filter to {}", name); ServletHandler handler = webAppContext.getServletHandler(); FilterMapping fmap = new FilterMapping(); fmap.setPathSpec(pathSpec); @@ -894,9 +895,8 @@ public void addInternalServlet(String name, String pathSpec, for (int i = 0; i < servletMappings.length; i++) { if (servletMappings[i].containsPathSpec(pathSpec)) { if (LOG.isDebugEnabled()) { - LOG.debug("Found existing " + servletMappings[i].getServletName() + - " servlet at path " + pathSpec + "; will replace mapping" + - " with " + sh.getName() + " servlet"); + LOG.debug("Found existing {} servlet at path {}; will replace mapping with {} servlet" + , servletMappings[i].getServletName(), pathSpec, sh.getName()); } ServletMapping[] newServletMappings = ArrayUtil.removeFromArray(servletMappings, servletMappings[i]); @@ -936,9 +936,10 @@ public void addFilter(String name, String classname, final String[] USER_FACING_URLS = { "*.html", "*.jsp" }; FilterMapping fmap = getFilterMapping(name, USER_FACING_URLS); defineFilter(webAppContext, filterHolder, fmap); - LOG.info( - "Added filter " + name + " (class=" + classname + ") to context " - + webAppContext.getDisplayName()); + if (LOG.isInfoEnabled()) { + LOG.info("Added filter {} (class={}) to context {}", name, classname + , webAppContext.getDisplayName()); + } final String[] ALL_URLS = { "/*" }; fmap = getFilterMapping(name, ALL_URLS); for (Map.Entry e @@ -946,8 +947,10 @@ public void addFilter(String name, String classname, if (e.getValue()) { ServletContextHandler ctx = e.getKey(); defineFilter(ctx, filterHolder, fmap); - LOG.info("Added filter " + name + " (class=" + classname - + ") to context " + ctx.getDisplayName()); + if (LOG.isInfoEnabled()) { + LOG.info("Added filter {} (class={}) to context {}" + , name, classname, ctx.getDisplayName()); + } } } filterNames.add(name); @@ -963,7 +966,7 @@ public void addGlobalFilter(String name, String classname, for (ServletContextHandler ctx : defaultContexts.keySet()) { defineFilter(ctx, filterHolder, fmap); } - LOG.info("Added global filter '" + name + "' (class=" + classname + ")"); + LOG.info("Added global filter {}' (class={})'", name, classname); } /** @@ -1179,7 +1182,9 @@ private static void bindListener(ServerConnector listener) throws Exception { // failed to open w/o issuing a close first, even if the port is changed listener.close(); listener.open(); - LOG.info("Jetty bound to port " + listener.getLocalPort()); + if (LOG.isInfoEnabled()) { + LOG.info("Jetty bound to port {}", listener.getLocalPort()); + } } /** @@ -1286,9 +1291,7 @@ public void stop() throws Exception { try { c.close(); } catch (Exception e) { - LOG.error( - "Error while stopping listener for webapp" - + webAppContext.getDisplayName(), e); + LOG.error("Error while stopping listener for webapp{}", webAppContext.getDisplayName(), e); exception = addMultiException(exception, e); } } @@ -1300,16 +1303,15 @@ public void stop() throws Exception { webAppContext.clearAttributes(); webAppContext.stop(); } catch (Exception e) { - LOG.error("Error while stopping web app context for webapp " - + webAppContext.getDisplayName(), e); + LOG.error("Error while stopping web app context for webapp {}", webAppContext.getDisplayName(), e); exception = addMultiException(exception, e); } try { webServer.stop(); } catch (Exception e) { - LOG.error("Error while stopping web server for webapp " - + webAppContext.getDisplayName(), e); + LOG.error("Error while stopping web server for webapp {}" + , webAppContext.getDisplayName(), e); exception = addMultiException(exception, e); } @@ -1415,8 +1417,7 @@ public static boolean hasAdministratorAccess( response.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthenticated users are not " + "authorized to access this page."); - LOG.warn("User " + remoteUser + " is unauthorized to access the page " - + request.getRequestURI() + "."); + LOG.warn("User {} is unauthorized to access the page {}.", remoteUser, request.getRequestURI()); return false; } diff --git a/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java b/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java index 51abc0ad8c28..0630dec559d2 100644 --- a/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java +++ b/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java @@ -53,6 +53,7 @@ import org.apache.solr.schema.SchemaField; import org.apache.solr.search.DocIterator; import org.apache.solr.search.DocList; +import org.apache.solr.util.BaseTestHarness; import org.junit.BeforeClass; import org.junit.Test; @@ -131,6 +132,7 @@ public void testSomeStuff() throws Exception { Map metrics = manager.registry(registry).getMetrics(); assertTrue(metrics.containsKey("CORE.coreName")); assertTrue(metrics.containsKey("CORE.refCount")); + @SuppressWarnings({"unchecked"}) Gauge g = (Gauge)metrics.get("CORE.refCount"); assertTrue(g.getValue().intValue() > 0); @@ -231,7 +233,7 @@ public void testSomeStuff() throws Exception { ,"//*[@numFound='0']" ); - assertU(h.simpleTag("rollback")); + assertU(BaseTestHarness.simpleTag("rollback")); assertU(commit()); } @@ -450,6 +452,7 @@ public void testClientErrorOnMalformedNumbers() throws Exception { } @Test + @SuppressWarnings({"rawtypes"}) public void testRequestHandlerBaseException() { final String tmp = "BOO! ignore_exception"; SolrRequestHandler handler = new RequestHandlerBase() { @@ -539,7 +542,9 @@ public void testXMLWriter() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testLocalSolrQueryRequestParams() { + @SuppressWarnings({"rawtypes"}) HashMap args = new HashMap(); args.put("string", "string value"); args.put("array", new String[] {"array", "value"}); @@ -614,7 +619,9 @@ public void testTermVectorFields() { } @Test + @SuppressWarnings({"unchecked"}) public void testSolrParams() throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList nl = new NamedList(); nl.add("i",555); nl.add("s","bbb"); @@ -652,6 +659,7 @@ public void testSolrParams() throws Exception { assertEquals(p.getBool("foo",false), false); assertEquals(!!p.getBool("bt"), !p.getBool("bf")); + @SuppressWarnings({"rawtypes"}) NamedList more = new NamedList(); more.add("s", "aaa"); more.add("s", "ccc"); @@ -1017,7 +1025,7 @@ public void testAbuseOfSort() { e.getMessage().contains(f)); } } - + // /** this doesn't work, but if it did, this is how we'd test it. */ // public void testOverwriteFalse() { diff --git a/solr/core/src/test/org/apache/solr/CursorPagingTest.java b/solr/core/src/test/org/apache/solr/CursorPagingTest.java index a1331473d89e..2210e18ec3f8 100644 --- a/solr/core/src/test/org/apache/solr/CursorPagingTest.java +++ b/solr/core/src/test/org/apache/solr/CursorPagingTest.java @@ -668,11 +668,13 @@ public SentinelIntSet assertFullWalkNoDups(int maxSize, SolrParams params) while (0 < docsOnThisPage) { String json = assertJQ(req(params, CURSOR_MARK_PARAM, cursorMark)); + @SuppressWarnings({"rawtypes"}) Map rsp = (Map) fromJSONString(json); assertTrue("response doesn't contain " + CURSOR_MARK_NEXT + ": " + json, rsp.containsKey(CURSOR_MARK_NEXT)); String nextCursorMark = (String)rsp.get(CURSOR_MARK_NEXT); assertNotNull(CURSOR_MARK_NEXT + " is null", nextCursorMark); + @SuppressWarnings({"unchecked"}) List> docs = (List) (((Map)rsp.get("response")).get("docs")); docsOnThisPage = docs.size(); if (null != params.getInt(CommonParams.ROWS)) { @@ -743,6 +745,7 @@ public void testFacetingWithRandomSorts() throws Exception { * * Also checks that facets are the same with each page, and that they are correct. */ + @SuppressWarnings({"unchecked"}) public SentinelIntSet assertFullWalkNoDupsWithFacets(int maxSize, SolrParams params) throws Exception { @@ -754,9 +757,11 @@ public SentinelIntSet assertFullWalkNoDupsWithFacets(int maxSize, SolrParams par SentinelIntSet ids = new SentinelIntSet(maxSize, -1); String cursorMark = CURSOR_MARK_START; int docsOnThisPage = Integer.MAX_VALUE; + @SuppressWarnings({"rawtypes"}) List previousFacets = null; while (0 < docsOnThisPage) { String json = assertJQ(req(params, CURSOR_MARK_PARAM, cursorMark)); + @SuppressWarnings({"rawtypes"}) Map rsp = (Map) fromJSONString(json); assertTrue("response doesn't contain " + CURSOR_MARK_NEXT + ": " + json, rsp.containsKey(CURSOR_MARK_NEXT)); @@ -790,7 +795,9 @@ public SentinelIntSet assertFullWalkNoDupsWithFacets(int maxSize, SolrParams par } cursorMark = nextCursorMark; + @SuppressWarnings({"rawtypes"}) Map facetFields = (Map)((Map)rsp.get("facet_counts")).get("facet_fields"); + @SuppressWarnings({"rawtypes"}) List facets = (List)facetFields.get(facetField); if (null != previousFacets) { assertEquals("Facets not the same as on previous page:\nprevious page facets: " @@ -825,6 +832,7 @@ public SentinelIntSet assertFullWalkNoDupsWithFacets(int maxSize, SolrParams par */ public String assertCursor(SolrQueryRequest req, String... tests) throws Exception { String json = assertJQ(req, tests); + @SuppressWarnings({"rawtypes"}) Map rsp = (Map) fromJSONString(json); assertTrue("response doesn't contain "+CURSOR_MARK_NEXT + ": " + json, rsp.containsKey(CURSOR_MARK_NEXT)); diff --git a/solr/core/src/test/org/apache/solr/OutputWriterTest.java b/solr/core/src/test/org/apache/solr/OutputWriterTest.java index 3df9459accc6..eb144e83fa95 100644 --- a/solr/core/src/test/org/apache/solr/OutputWriterTest.java +++ b/solr/core/src/test/org/apache/solr/OutputWriterTest.java @@ -107,7 +107,7 @@ public static class UselessOutputWriter implements QueryResponseWriter { public UselessOutputWriter() {} @Override - public void init(NamedList n) {} + public void init(@SuppressWarnings({"rawtypes"})NamedList n) {} @Override public void write(Writer writer, SolrQueryRequest request, SolrQueryResponse response) diff --git a/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java b/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java index cc4a10254abd..3c56d5ac97e0 100644 --- a/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java +++ b/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java @@ -48,7 +48,9 @@ public static void beforeClass() throws Exception { * Gets a list of everything we can find in the classpath and makes sure it has * a name, description, etc... */ + @SuppressWarnings({"unchecked"}) public void testCallMBeanInfo() throws Exception { + @SuppressWarnings({"rawtypes"}) List classes = new ArrayList<>(); classes.addAll(getClassesForPackage(SearchHandler.class.getPackage().getName())); classes.addAll(getClassesForPackage(SearchComponent.class.getPackage().getName())); @@ -62,7 +64,7 @@ public void testCallMBeanInfo() throws Exception { String registry = h.getCore().getCoreMetricManager().getRegistryName(); SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registry, "foo"); String scope = TestUtil.randomSimpleString(random(), 2, 10); - for( Class clazz : classes ) { + for(@SuppressWarnings({"rawtypes"})Class clazz : classes ) { if( SolrInfoBean.class.isAssignableFrom( clazz ) ) { try { SolrInfoBean info = (SolrInfoBean)clazz.getConstructor().newInstance(); @@ -91,6 +93,7 @@ public void testCallMBeanInfo() throws Exception { assertTrue( "there are at least 10 SolrInfoBean that should be found in the classpath, found " + checked, checked > 10 ); } + @SuppressWarnings({"rawtypes"}) private static List getClassesForPackage(String pckgname) throws Exception { ArrayList directories = new ArrayList<>(); ClassLoader cld = h.getCore().getResourceLoader().getClassLoader(); @@ -104,6 +107,7 @@ private static List getClassesForPackage(String pckgname) throws Exceptio directories.add(f); } + @SuppressWarnings({"rawtypes"}) ArrayList classes = new ArrayList<>(); for (File directory : directories) { if (directory.exists()) { diff --git a/solr/core/src/test/org/apache/solr/TestCrossCoreJoin.java b/solr/core/src/test/org/apache/solr/TestCrossCoreJoin.java index 492e2c3c3f91..074e7c4f3ccc 100644 --- a/solr/core/src/test/org/apache/solr/TestCrossCoreJoin.java +++ b/solr/core/src/test/org/apache/solr/TestCrossCoreJoin.java @@ -87,14 +87,14 @@ public void testScoreJoin() throws Exception { void doTestJoin(String joinPrefix) throws Exception { assertJQ(req("q", joinPrefix + " from=dept_id_s to=dept_s fromIndex=fromCore}cat:dev", "fl", "id", "debugQuery", random().nextBoolean() ? "true":"false") - , "/response=={'numFound':3,'start':0,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" + , "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" ); // find people that develop stuff - but limit via filter query to a name of "john" // this tests filters being pushed down to queries (SOLR-3062) assertJQ(req("q", joinPrefix + " from=dept_id_s to=dept_s fromIndex=fromCore}cat:dev", "fl", "id", "fq", "name:john", "debugQuery", random().nextBoolean() ? "true":"false") - , "/response=={'numFound':1,'start':0,'docs':[{'id':'1'}]}" + , "/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'}]}" ); } diff --git a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java index 7b759d01913a..11220e43333d 100644 --- a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java +++ b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java @@ -45,6 +45,7 @@ @SuppressPointFields(bugUrl="https://issues.apache.org/jira/browse/SOLR-10844") public class TestDistributedGrouping extends BaseDistributedSearchTestCase { + @SuppressWarnings({"unchecked"}) public TestDistributedGrouping() { // SOLR-10844: Even with points suppressed, this test breaks if we (randomize) docvalues="true" on trie fields?!?!?!!? System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"false"); @@ -62,6 +63,7 @@ public TestDistributedGrouping() { String oddField="oddField_s1"; @Test + @SuppressWarnings({"unchecked"}) public void test() throws Exception { del("*:*"); commit(); @@ -309,6 +311,7 @@ public void test() throws Exception { int which = r.nextInt(clients.size()); SolrClient client = clients.get(which); QueryResponse rsp = client.query(params); + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) rsp.getResponse().get("grouped"); nl = (NamedList) nl.getVal(0); int matches = (Integer) nl.getVal(0); diff --git a/solr/core/src/test/org/apache/solr/TestDistributedSearch.java b/solr/core/src/test/org/apache/solr/TestDistributedSearch.java index f634f3ec8951..aebc772f02fd 100644 --- a/solr/core/src/test/org/apache/solr/TestDistributedSearch.java +++ b/solr/core/src/test/org/apache/solr/TestDistributedSearch.java @@ -117,6 +117,7 @@ public TestDistributedSearch() { } @Test + @SuppressWarnings({"unchecked"}) public void test() throws Exception { assertEquals(clients.size(), jettys.size()); @@ -209,7 +210,10 @@ public void test() throws Exception { query("q","*:*", "sort",i1+" desc", "fl","*,score"); query("q","*:*", "sort","n_tl1 asc", "fl","*,score"); query("q","*:*", "sort","n_tl1 desc"); + handle.put("maxScore", SKIPVAL); + testMinExactCount(); + query("q","{!func}"+i1);// does not expect maxScore. So if it comes ,ignore it. JavaBinCodec.writeSolrDocumentList() //is agnostic of request params. handle.remove("maxScore"); @@ -596,7 +600,7 @@ public void test() throws Exception { if (shardReq.params.getBool(StatsParams.STATS, false)) { numStatsShardRequests++; for (ShardResponse shardRsp : shardReq.sreq.responses) { - NamedList shardStats = + NamedList shardStats = ((NamedList>>) shardRsp.getSolrResponse().getResponse().get("stats")).get("stats_fields").get(i1); @@ -626,7 +630,7 @@ public void test() throws Exception { // assertEquals("wrong min", -987.0D, (Double)s.getMin(), 0.0001D ); assertEquals("wrong mean", 377.153846D, (Double)s.getMean(), 0.0001D ); - assertEquals("wrong stddev", 1271.76215D, (Double)s.getStddev(), 0.0001D ); + assertEquals("wrong stddev", 1271.76215D, s.getStddev(), 0.0001D ); // assertNull("expected null for count", s.getCount()); assertNull("expected null for calcDistinct", s.getCountDistinct()); @@ -680,7 +684,7 @@ public void test() throws Exception { // ignore the FieldStatsInfo convinience class, and look directly at the NamedList // so we don't need any sort of crazy reflection - NamedList svals = + NamedList svals = ((NamedList>>) rsp.getResponse().get("stats")).get("stats_fields").get(i1); @@ -803,7 +807,7 @@ public void test() throws Exception { // NOTE: min is expected to be null even though requested because of no values assertEquals("wrong min", null, s.getMin()); assertTrue("mean should be NaN", ((Double)s.getMean()).isNaN()); - assertEquals("wrong stddev", 0.0D, (Double)s.getStddev(), 0.0D ); + assertEquals("wrong stddev", 0.0D, s.getStddev(), 0.0D ); // things that we didn't ask for, so they better be null assertNull("expected null for count", s.getCount()); @@ -1082,11 +1086,38 @@ public void test() throws Exception { assertEquals(new EnumFieldValue(11, "Critical"), rsp.getFieldStatsInfo().get(fieldName).getMax()); - handle.put("severity", UNORDERED); // this is stupid, but stats.facet doesn't garuntee order + handle.put("severity", UNORDERED); // this is stupid, but stats.facet doesn't guarantee order query("q", "*:*", "stats", "true", "stats.field", fieldName, "stats.facet", fieldName); } + private void testMinExactCount() throws Exception { + assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT, "200", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc"); + assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT, "-1", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc"); + assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT, "1", CommonParams.ROWS, "200", CommonParams.SORT, "score desc, id asc"); + assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", "facet", "true", "facet.field", s1, CommonParams.MIN_EXACT_COUNT,"1", CommonParams.ROWS, "200", CommonParams.SORT, "score desc, id asc"); + assertIsExactHitCount("q","{!cache=false}id:1", CommonParams.MIN_EXACT_COUNT,"1", CommonParams.ROWS, "1"); + assertApproximatedHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT,"2", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc"); + } + + private void assertIsExactHitCount(Object... requestParams) throws Exception { + QueryResponse response = query(requestParams); + assertNotNull("Expecting exact hit count in response: " + response.getResults().toString(), + response.getResults().getNumFoundExact()); + assertTrue("Expecting exact hit count in response: " + response.getResults().toString(), + response.getResults().getNumFoundExact()); + } + + private void assertApproximatedHitCount(Object...requestParams) throws Exception { + handle.put("numFound", SKIPVAL); + QueryResponse response = query(requestParams); + assertNotNull("Expecting numFoundExact in response: " + response.getResults().toString(), + response.getResults().getNumFoundExact()); + assertFalse("Expecting aproximated results in response: " + response.getResults().toString(), + response.getResults().getNumFoundExact()); + handle.remove("numFound", SKIPVAL); + } + /** comparing results with facet.method=uif */ private void queryAndCompareUIF(Object ... params) throws Exception { final QueryResponse expect = query(params); @@ -1124,6 +1155,7 @@ protected void checkMinCountsRange(List counts, Object[] pairs } } + @SuppressWarnings({"unchecked", "rawtypes"}) protected void queryPartialResults(final List upShards, final List upClients, Object... q) throws Exception { diff --git a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java index 35ef3c602452..54e983bb9b73 100644 --- a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java +++ b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java @@ -16,20 +16,6 @@ */ package org.apache.solr; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - import org.apache.solr.client.solrj.impl.BinaryResponseParser; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; @@ -50,6 +36,20 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + public class TestGroupingSearch extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -245,11 +245,11 @@ public void testGroupingSimpleFormatArrayIndexOutOfBoundsException() throws Exce assertJQ( req("q", "*:*", "start", "1", "group", "true", "group.field", "id", "group.main", "true"), - "/response=={'numFound':3,'start':1,'docs':[{'id':'2'},{'id':'3'}]}" + "/response=={'numFound':3,'start':1,'numFoundExact':true,'docs':[{'id':'2'},{'id':'3'}]}" ); assertJQ( req("q", "*:*", "start", "1", "rows", "1", "group", "true", "group.field", "id", "group.main", "true"), - "/response=={'numFound':3,'start':1,'docs':[{'id':'2'}]}" + "/response=={'numFound':3,'start':1,'numFoundExact':true,'docs':[{'id':'2'}]}" ); } @@ -264,7 +264,7 @@ public void testGroupingSimpleFormatStartBiggerThanRows() throws Exception { assertJQ( req("q", "*:*", "start", "2", "rows", "1", "group", "true", "group.field", "id", "group.main", "true"), - "/response=={'numFound':5,'start':2,'docs':[{'id':'3'}]}" + "/response=={'numFound':5,'start':2,'numFoundExact':true,'docs':[{'id':'3'}]}" ); } @@ -323,12 +323,12 @@ public void testGroupingSortByFunction() throws Exception { assertJQ( req("q", "*:*", "sort", "sum(value1_i, value2_i) desc", "rows", "1", "group", "true", "group.field", "id", "fl", "id"), - "/grouped=={'id':{'matches':5,'groups':[{'groupValue':'5','doclist':{'numFound':1,'start':0,'docs':[{'id':'5'}]}}]}}" + "/grouped=={'id':{'matches':5,'groups':[{'groupValue':'5','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}]}}" ); assertJQ( req("q", "*:*", "sort", "geodist(45.18014,-93.87742,store) asc", "rows", "1", "group", "true", "group.field", "id", "fl", "id"), - "/grouped=={'id':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':1,'start':0,'docs':[{'id':'1'}]}}]}}" + "/grouped=={'id':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}]}}" ); } @@ -347,7 +347,7 @@ public void testGroupingGroupedBasedFaceting() throws Exception { "value1_s1", "fl", "id", "facet", "true", "facet.field", "value3_s1", "group.truncate", "false"); assertJQ( req, - "/grouped=={'value1_s1':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':3,'start':0,'docs':[{'id':'1'}]}}]}}", + "/grouped=={'value1_s1':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}]}}", "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',3,'b',2]}," + EMPTY_FACETS + "}" ); @@ -356,7 +356,7 @@ public void testGroupingGroupedBasedFaceting() throws Exception { "value1_s1", "fl", "id", "facet", "true", "facet.field", "value3_s1", "group.truncate", "true"); assertJQ( req, - "/grouped=={'value1_s1':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':3,'start':0,'docs':[{'id':'1'}]}}]}}", + "/grouped=={'value1_s1':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}]}}", "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + EMPTY_FACETS + "}" ); @@ -365,7 +365,7 @@ public void testGroupingGroupedBasedFaceting() throws Exception { "strdist(1,value1_s1,edit)", "fl", "id", "facet", "true", "facet.field", "value3_s1", "group.truncate", "true"); assertJQ( req, - "/grouped=={'strdist(1,value1_s1,edit)':{'matches':5,'groups':[{'groupValue':1.0,'doclist':{'numFound':3,'start':0,'docs':[{'id':'1'}]}}]}}", + "/grouped=={'strdist(1,value1_s1,edit)':{'matches':5,'groups':[{'groupValue':1.0,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}]}}", "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + EMPTY_FACETS + "}" ); @@ -374,7 +374,7 @@ public void testGroupingGroupedBasedFaceting() throws Exception { "facet.field", "value3_s1", "group.truncate", "true"); assertJQ( req, - "/grouped=={'value4_i':{'matches':5,'groups':[{'groupValue':1,'doclist':{'numFound':3,'start':0,'docs':[{'id':'1'}]}}]}}", + "/grouped=={'value4_i':{'matches':5,'groups':[{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}]}}", "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + EMPTY_FACETS + "}" ); @@ -383,7 +383,7 @@ public void testGroupingGroupedBasedFaceting() throws Exception { "facet.field", "{!ex=v}value3_s1", "group.truncate", "true", "fq", "{!tag=v}value3_s1:b"); assertJQ( req, - "/grouped=={'value4_i':{'matches':2,'groups':[{'groupValue':2,'doclist':{'numFound':2,'start':0,'docs':[{'id':'3'}]}}]}}", + "/grouped=={'value4_i':{'matches':2,'groups':[{'groupValue':2,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}]}}", "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + EMPTY_FACETS + "}" ); @@ -392,7 +392,7 @@ public void testGroupingGroupedBasedFaceting() throws Exception { "facet.field", "{!ex=v}value3_s1", "group.truncate", "false", "fq", "{!tag=v}value3_s1:b"); assertJQ( req, - "/grouped=={'value4_i':{'matches':2,'groups':[{'groupValue':2,'doclist':{'numFound':2,'start':0,'docs':[{'id':'3'}]}}]}}", + "/grouped=={'value4_i':{'matches':2,'groups':[{'groupValue':2,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}]}}", "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',3,'b',2]}," + EMPTY_FACETS + "}" ); @@ -401,7 +401,7 @@ public void testGroupingGroupedBasedFaceting() throws Exception { "facet.field", "{!ex=v}value3_s1", "group.truncate", "true", "fq", "{!tag=v}value3_s1:b"); assertJQ( req, - "/grouped=={'sub(value4_i,1)':{'matches':2,'groups':[{'groupValue':1.0,'doclist':{'numFound':2,'start':0,'docs':[{'id':'3'}]}}]}}", + "/grouped=={'sub(value4_i,1)':{'matches':2,'groups':[{'groupValue':1.0,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}]}}", "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + EMPTY_FACETS + "}" ); } @@ -424,7 +424,7 @@ public void testGroupingGroupedBasedFacetingWithTaggedFilter() throws Exception "facet.query", "{!ex=chk key=LM3}bday:[2012-10-18T00:00:00Z TO 2013-01-17T23:59:59Z]"); assertJQ( req, - "/grouped=={'cat_sI':{'matches':2,'groups':[{'groupValue':'a','doclist':{'numFound':1,'start':0,'docs':[{'id':'5'}]}}]}}", + "/grouped=={'cat_sI':{'matches':2,'groups':[{'groupValue':'a','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}]}}", "/facet_counts=={'facet_queries':{'LW1':2,'LM1':2,'LM3':2},'facet_fields':{}," + EMPTY_FACETS + "}" ); } @@ -463,33 +463,33 @@ public void testGroupAPI() throws Exception { ,"/responseHeader=={'_SKIP_':'QTime', 'status':0}" // partial match by skipping some elements ,"/responseHeader=={'_MATCH_':'status', 'status':0}" // partial match by only including some elements ,"/grouped=={'"+f+"':{'matches':10,'groups':[\n" + - "{'groupValue':1,'doclist':{'numFound':3,'start':0,'docs':[{'id':'8'}]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':0,'docs':[{'id':'3'}]}}," + - "{'groupValue':2,'doclist':{'numFound':3,'start':0,'docs':[{'id':'4'}]}}," + - "{'groupValue':5,'doclist':{'numFound':1,'start':0,'docs':[{'id':'1'}]}}," + - "{'groupValue':4,'doclist':{'numFound':1,'start':0,'docs':[{'id':'2'}]}}" + + "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'}]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}," + + "{'groupValue':2,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'4'}]}}," + + "{'groupValue':5,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}," + + "{'groupValue':4,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'2'}]}}" + "]}}" ); // test that filtering cuts down the result set assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "fq",f+":2") ,"/grouped=={'"+f+"':{'matches':3,'groups':[" + - "{'groupValue':2,'doclist':{'numFound':3,'start':0,'docs':[{'id':'4'}]}}" + + "{'groupValue':2,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'4'}]}}" + "]}}" ); // test limiting the number of groups returned assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2") ,"/grouped=={'"+f+"':{'matches':10,'groups':[" + - "{'groupValue':1,'doclist':{'numFound':3,'start':0,'docs':[{'id':'8'}]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':0,'docs':[{'id':'3'}]}}" + + "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'}]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}" + "]}}" ); // test offset into group list assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","1", "start","1") ,"/grouped=={'"+f+"':{'matches':10,'groups':[" + - "{'groupValue':3,'doclist':{'numFound':2,'start':0,'docs':[{'id':'3'}]}}" + + "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}" + "]}}" ); @@ -502,24 +502,24 @@ public void testGroupAPI() throws Exception { // test increasing the docs per group returned assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2", "group.limit","3") ,"/grouped=={'"+f+"':{'matches':10,'groups':[" + - "{'groupValue':1,'doclist':{'numFound':3,'start':0,'docs':[{'id':'8'},{'id':'10'},{'id':'5'}]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':0,'docs':[{'id':'3'},{'id':'6'}]}}" + + "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'},{'id':'5'}]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'6'}]}}" + "]}}" ); // test offset into each group assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2", "group.limit","3", "group.offset","1") ,"/grouped=={'"+f+"':{'matches':10,'groups':[" + - "{'groupValue':1,'doclist':{'numFound':3,'start':1,'docs':[{'id':'10'},{'id':'5'}]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':1,'docs':[{'id':'6'}]}}" + + "{'groupValue':1,'doclist':{'numFound':3,'start':1,'numFoundExact':true,'docs':[{'id':'10'},{'id':'5'}]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':1,'numFoundExact':true,'docs':[{'id':'6'}]}}" + "]}}" ); // test big offset into each group assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2", "group.limit","3", "group.offset","10") ,"/grouped=={'"+f+"':{'matches':10,'groups':[" + - "{'groupValue':1,'doclist':{'numFound':3,'start':10,'docs':[]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':10,'docs':[]}}" + + "{'groupValue':1,'doclist':{'numFound':3,'start':10,'numFoundExact':true,'docs':[]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':10,'numFoundExact':true,'docs':[]}}" + "]}}" ); @@ -527,8 +527,8 @@ public void testGroupAPI() throws Exception { assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id,score", "rows","2", "group.limit","2", "indent","off") ,"/grouped/"+f+"/groups==" + "[" + - "{'groupValue':1,'doclist':{'numFound':3,'start':0,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':0,'maxScore':7.0,'docs':[{'id':'3','score':7.0},{'id':'6','score':2.0}]}}" + + "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'maxScore':7.0,'docs':[{'id':'3','score':7.0},{'id':'6','score':2.0}]}}" + "]" ); @@ -537,8 +537,8 @@ public void testGroupAPI() throws Exception { String func = "add("+f+","+f+")"; assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.func", func , "fl","id", "rows","2") ,"/grouped=={'"+func+"':{'matches':10,'groups':[" + - "{'groupValue':2.0,'doclist':{'numFound':3,'start':0,'docs':[{'id':'8'}]}}," + - "{'groupValue':6.0,'doclist':{'numFound':2,'start':0,'docs':[{'id':'3'}]}}" + + "{'groupValue':2.0,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'}]}}," + + "{'groupValue':6.0,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}" + "]}}" ); @@ -560,7 +560,7 @@ public void testGroupAPI() throws Exception { ///////////////////////// group.query assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "group.limit","3") ,"/grouped=={'id:[2 TO 5]':{'matches':10," + - "'doclist':{'numFound':4,'start':0,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}}}" + "'doclist':{'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}}}" ); // group.query that matches nothing @@ -571,50 +571,50 @@ public void testGroupAPI() throws Exception { "group.query","id:1000", "fl","id", "group.limit","3") - ,"/grouped/id:[2 TO 5]=={'matches':10,'doclist':{'numFound':4,'start':0,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}}" - ,"/grouped/id:1000=={'matches':10,'doclist':{'numFound':0,'start':0,'docs':[]}}" + ,"/grouped/id:[2 TO 5]=={'matches':10,'doclist':{'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}}" + ,"/grouped/id:1000=={'matches':10,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" ); // group.query and sort assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query",f+":1", "fl","id,score", "rows","2", "group.limit","2", "sort",f+" desc, score desc", "indent","off") ,"/grouped/"+f+":1==" + - "{'matches':10,'doclist':{'numFound':3,'start':0,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}}," + "{'matches':10,'doclist':{'numFound':3,'start':0,numFoundExact:true,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}}," ); // group.query with fl=score and default sort assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query",f+":1", "fl","id,score", "rows","2", "group.limit","2", "sort", "score desc", "indent","off") ,"/grouped/"+f+":1==" + - "{'matches':10,'doclist':{'numFound':3,'start':0,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}}," + "{'matches':10,'doclist':{'numFound':3,'start':0,numFoundExact:true,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}}," ); assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query",f+":1", "fl","id", "rows","2", "group.limit","2", "indent","off") ,"/grouped/"+f+":1==" + - "{'matches':10,'doclist':{'numFound':3,'start':0,'docs':[{'id':'8'},{'id':'10'}]}}," + "{'matches':10,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'}]}}," ); // group.query and offset assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "group.limit","3", "group.offset","2") ,"/grouped=={'id:[2 TO 5]':{'matches':10," + - "'doclist':{'numFound':4,'start':2,'docs':[{'id':'2'},{'id':'5'}]}}}" + "'doclist':{'numFound':4,'start':2,'numFoundExact':true,'docs':[{'id':'2'},{'id':'5'}]}}}" ); // group.query and big offset assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "group.limit","3", "group.offset","10") ,"/grouped=={'id:[2 TO 5]':{'matches':10," + - "'doclist':{'numFound':4,'start':10,'docs':[]}}}" + "'doclist':{'numFound':4,'start':10,'numFoundExact':true,'docs':[]}}}" ); ///////////////////////// group.query as main result assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "rows","3", "group.main","true") - ,"/response=={'numFound':4,'start':0,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}" + ,"/response=={'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}" ); // group.query and offset assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "rows","3", "start","2", "group.main","true") - ,"/response=={'numFound':4,'start':2,'docs':[{'id':'2'},{'id':'5'}]}" + ,"/response=={'numFound':4,'start':2,'numFoundExact':true,'docs':[{'id':'2'},{'id':'5'}]}" ); // group.query and big offset assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "rows","3", "start","10", "group.main","true") - ,"/response=={'numFound':4,'start':10,'docs':[]}" + ,"/response=={'numFound':4,'start':10,'numFoundExact':true,'docs':[]}" ); @@ -625,46 +625,46 @@ public void testGroupAPI() throws Exception { "group.field",f, "rows","1", "fl","id", "group.limit","2") - ,"/grouped/id:[2 TO 5]=={'matches':10,'doclist':{'numFound':4,'start':0,'docs':[{'id':'3'},{'id':'4'}]}}" - ,"/grouped/id:[5 TO 5]=={'matches':10,'doclist':{'numFound':1,'start':0,'docs':[{'id':'5'}]}}" - ,"/grouped/"+f+"=={'matches':10,'groups':[{'groupValue':1,'doclist':{'numFound':3,'start':0,'docs':[{'id':'8'},{'id':'10'}]}}]}" + ,"/grouped/id:[2 TO 5]=={'matches':10,'doclist':{'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'}]}}" + ,"/grouped/id:[5 TO 5]=={'matches':10,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}" + ,"/grouped/"+f+"=={'matches':10,'groups':[{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'}]}}]}" ); ///////////////////////// group.field as main result assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "group.main","true") - ,"/response=={'numFound':10,'start':0,'docs':[{'id':'8'},{'id':'3'},{'id':'4'},{'id':'1'},{'id':'2'}]}" + ,"/response=={'numFound':10,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'3'},{'id':'4'},{'id':'1'},{'id':'2'}]}" ); // test that rows limits #docs assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","3", "group.main","true") - ,"/response=={'numFound':10,'start':0,'docs':[{'id':'8'},{'id':'3'},{'id':'4'}]}" + ,"/response=={'numFound':10,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'3'},{'id':'4'}]}" ); // small offset assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2", "start","1", "group.main","true") - ,"/response=={'numFound':10,'start':1,'docs':[{'id':'3'},{'id':'4'}]}" + ,"/response=={'numFound':10,'start':1,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}" ); // large offset assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2", "start","20", "group.main","true") - ,"/response=={'numFound':10,'start':20,'docs':[]}" + ,"/response=={'numFound':10,'start':20,'numFoundExact':true,'docs':[]}" ); // group.limit>1 assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","3", "group.limit","2", "group.main","true") - ,"/response=={'numFound':10,'start':0,'docs':[{'id':'8'},{'id':'10'},{'id':'3'}]}" + ,"/response=={'numFound':10,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'},{'id':'3'}]}" ); // group.limit>1 with start>0 assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","3", "start","1", "group.limit","2", "group.main","true") - ,"/response=={'numFound':10,'start':1,'docs':[{'id':'10'},{'id':'3'},{'id':'6'}]}" + ,"/response=={'numFound':10,'start':1,'numFoundExact':true,'docs':[{'id':'10'},{'id':'3'},{'id':'6'}]}" ); ///////////////////////// group.format == simple assertJQ(req("fq", filt, "q", "{!func}" + f2, "group", "true", "group.field", f, "fl", "id", "rows", "3", "start", "1", "group.limit", "2", "group.format", "simple") , "/grouped/foo_i=={'matches':10,'doclist':" - + "{'numFound':10,'start':1,'docs':[{'id':'10'},{'id':'3'},{'id':'6'}]}}" + + "{'numFound':10,'start':1,'numFoundExact':true,'docs':[{'id':'10'},{'id':'3'},{'id':'6'}]}}" ); //////////////////////// grouping where main query matches nothing assertJQ(req("fq", filt, "q", "bogus_s:nothing", "group", "true", "group.field", f, "fl", "id", "group.limit", "2", "group.format", "simple") - , "/grouped/foo_i=={'matches':0,'doclist':{'numFound':0,'start':0,'docs':[]}}" + , "/grouped/foo_i=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" ); assertJQ(req("fq",filt, "q","bogus_s:nothing", "group","true", "group.query","id:[2 TO 5]", @@ -672,8 +672,8 @@ public void testGroupAPI() throws Exception { "group.field",f, "rows","1", "fl","id", "group.limit","2") - ,"/grouped/id:[2 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,'docs':[]}}" - ,"/grouped/id:[5 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,'docs':[]}}" + ,"/grouped/id:[2 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" + ,"/grouped/id:[5 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" ,"/grouped/"+f+"=={'matches':0,'groups':[]}" ); assertJQ(req("fq",filt, @@ -683,8 +683,8 @@ public void testGroupAPI() throws Exception { "group.query","id:1000", "fl","id", "group.limit","3") - ,"/grouped/id:[2 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,'docs':[]}}" - ,"/grouped/id:1000=={'matches':0,'doclist':{'numFound':0,'start':0,'docs':[]}}" + ,"/grouped/id:[2 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" + ,"/grouped/id:1000=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" ); } @@ -709,7 +709,7 @@ public void testGroupingNonIndexedOrStoredDocValues() throws Exception { "facet.query", "{!ex=chk key=LM3}bday:[2012-10-18T00:00:00Z TO 2013-01-17T23:59:59Z]"); assertJQ( req, - "/grouped=={'"+FOO_STRING_DOCVAL_FIELD+"':{'matches':2,'groups':[{'groupValue':'a','doclist':{'numFound':1,'start':0,'docs':[{'id':'5'}]}}]}}", + "/grouped=={'"+FOO_STRING_DOCVAL_FIELD+"':{'matches':2,'groups':[{'groupValue':'a','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}]}}", "/facet_counts=={'facet_queries':{'LW1':2,'LM1':2,'LM3':2},'facet_fields':{}," + EMPTY_FACETS + "}" ); } @@ -730,10 +730,10 @@ public void testGroupingOnDateField() throws Exception { assertJQ(req(params, "group.field", "date_dt", "sort", "id asc"), "/grouped=={'date_dt':{'matches':5,'ngroups':4, 'groups':" + - "[{'groupValue':'2012-11-20T00:00:00Z','doclist':{'numFound':2,'start':0,'docs':[{'id':'1'},{'id':'3'}]}}," + - "{'groupValue':'2012-11-21T00:00:00Z','doclist':{'numFound':1,'start':0,'docs':[{'id':'2'}]}}," + - "{'groupValue':'2013-01-15T00:00:00Z','doclist':{'numFound':1,'start':0,'docs':[{'id':'4'}]}}," + - "{'groupValue':null,'doclist':{'numFound':1,'start':0,'docs':[{'id':'5'}]}}" + + "[{'groupValue':'2012-11-20T00:00:00Z','doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'1'},{'id':'3'}]}}," + + "{'groupValue':'2012-11-21T00:00:00Z','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'2'}]}}," + + "{'groupValue':'2013-01-15T00:00:00Z','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'4'}]}}," + + "{'groupValue':null,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}" + "]}}" ); } @@ -772,6 +772,7 @@ public void testRandomGrouping() throws Exception { types.add(new FldType("foo_bdv", ZERO_ONE, new BVal())); clearIndex(); + @SuppressWarnings({"rawtypes"}) Map model = indexDocs(types, null, indexSize); //System.out.println("############### model=" + model); @@ -848,6 +849,7 @@ public void testRandomGrouping() throws Exception { rows=1; start=0; group_offset=1; group_limit=1; } + @SuppressWarnings({"rawtypes"}) Map groups = groupBy(model.values(), groupField); // first sort the docs in each group @@ -888,7 +890,7 @@ public void testRandomGrouping() throws Exception { continue; } - for (Comparable field : doc.getValues(FOO_STRING_FIELD)) { + for (@SuppressWarnings({"rawtypes"})Comparable field : doc.getValues(FOO_STRING_FIELD)) { String key = field.toString(); boolean exists = facetCounts.containsKey(key); int count = exists ? facetCounts.get(key) : 0; @@ -896,6 +898,7 @@ public void testRandomGrouping() throws Exception { } } } + @SuppressWarnings({"rawtypes"}) List expectedFacetResponse = new ArrayList<>(); for (Map.Entry stringIntegerEntry : facetCounts.entrySet()) { expectedFacetResponse.add(stringIntegerEntry.getKey()); @@ -918,12 +921,8 @@ public void testRandomGrouping() throws Exception { Object realResponse = Utils.fromJSONString(strResponse); String err = JSONTestUtil.matchObj("/grouped/" + groupField, realResponse, modelResponse); if (err != null) { - log.error("GROUPING MISMATCH (" + queryIter + "): " + err - + "\n\trequest="+req - + "\n\tresult="+strResponse - + "\n\texpected="+ Utils.toJSONString(modelResponse) - + "\n\tsorted_model="+ sortedGroups - ); + log.error("GROUPING MISMATCH ({}}): {}\n\trequest={}\n\tresult={}\n\texpected={}\n\tsorted_model={}" + , queryIter, err, req, strResponse, Utils.toJSONString(modelResponse), sortedGroups); // re-execute the request... good for putting a breakpoint here for debugging String rsp = h.query(req); @@ -934,12 +933,8 @@ public void testRandomGrouping() throws Exception { // assert post / pre grouping facets err = JSONTestUtil.matchObj("/facet_counts/facet_fields/"+FOO_STRING_FIELD, realResponse, expectedFacetResponse); if (err != null) { - log.error("GROUPING MISMATCH (" + queryIter + "): " + err - + "\n\trequest="+req - + "\n\tresult="+strResponse - + "\n\texpected="+ Utils.toJSONString(expectedFacetResponse) - ); - + log.error("GROUPING MISMATCH ({}): {}\n\trequest={}\n\tresult={}\n\texpected={}" + , queryIter, err, req, strResponse, Utils.toJSONString(expectedFacetResponse)); // re-execute the request... good for putting a breakpoint here for debugging h.query(req); fail(err); @@ -947,6 +942,31 @@ public void testRandomGrouping() throws Exception { } // end query iter } // end index iter + } + + @Test + public void testGroupWithMinExactHitCount() throws Exception { + final int NUM_DOCS = 20; + for (int i = 0; i < NUM_DOCS ; i++) { + assertU(adoc("id", String.valueOf(i), FOO_STRING_FIELD, "Book1")); + assertU(commit()); + } + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("q", FOO_STRING_FIELD + ":Book1"); + assertQ(req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2") + ,"/response/result[@numFoundExact='false']" + ); + params.set("group", true); + params.set("group.field", FOO_STRING_FIELD); + assertQ(req(params) + ,"/response/lst[@name='grouped']/lst[@name='"+FOO_STRING_FIELD+"']/arr[@name='groups']/lst[1]/result[@numFoundExact='true']" + ); + + assertQ(req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2") + ,"/response/lst[@name='grouped']/lst[@name='"+FOO_STRING_FIELD+"']/arr[@name='groups']/lst[1]/result[@numFoundExact='true']" + ); + + } public static Object buildGroupedResult(IndexSchema schema, List sortedGroups, int start, int rows, int group_offset, int group_limit, boolean includeNGroups) { @@ -960,7 +980,7 @@ public static Object buildGroupedResult(IndexSchema schema, List sortedGrou if (includeNGroups) { result.put("ngroups", sortedGroups.size()); } - List groupList = new ArrayList(); + List> groupList = new ArrayList<>(); result.put("groups", groupList); for (int i=start; i sortedGrou group.put("doclist", resultSet); resultSet.put("numFound", grp.docs.size()); resultSet.put("start", group_offset); - List docs = new ArrayList(); + resultSet.put("numFoundExact", true); + List> docs = new ArrayList<>(); resultSet.put("docs", docs); for (int j=group_offset; j= group_limit) break; @@ -1005,6 +1026,7 @@ public static Comparator createFirstDocComparator(final Comparator doc }; } + @SuppressWarnings({"rawtypes"}) public static Map groupBy(Collection docs, String field) { Map groups = new HashMap<>(); for (Doc doc : docs) { @@ -1037,6 +1059,7 @@ public static Map groupBy(Collection docs, String field) { public static class Grp { + @SuppressWarnings({"rawtypes"}) public Comparable groupValue; public List docs; public Doc maxDoc; // the document highest according to the "sort" param diff --git a/solr/core/src/test/org/apache/solr/TestJoin.java b/solr/core/src/test/org/apache/solr/TestJoin.java index 1e0a676dafc1..3cd8fc2404e8 100644 --- a/solr/core/src/test/org/apache/solr/TestJoin.java +++ b/solr/core/src/test/org/apache/solr/TestJoin.java @@ -80,37 +80,37 @@ public void testJoinAllMethods() throws Exception { ModifiableSolrParams p = params("sort","id asc"); assertJQ(req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "title:MTS"), "fl","id") - ,"/response=={'numFound':3,'start':0,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" + ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" ); // empty from assertJQ(req(p, "q", buildJoinRequest("noexist_ss_dv", DEPT_ID_FIELD, "*:*", "fl","id")) - ,"/response=={'numFound':0,'start':0,'docs':[]}" + ,"/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}" ); // empty to assertJQ(req(p, "q", buildJoinRequest(DEPT_FIELD, "noexist_ss_dv", "*:*"), "fl","id") - ,"/response=={'numFound':0,'start':0,'docs':[]}" + ,"/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}" ); // self join... return everyone in same dept(s) as Dave assertJQ(req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_FIELD, "name:dave"), "fl","id") - ,"/response=={'numFound':3,'start':0,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" + ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" ); // from single-value to multi-value assertJQ(req(p, "q", buildJoinRequest(DEPT_ID_FIELD, DEPT_FIELD, "text:develop"), "fl","id") - ,"/response=={'numFound':3,'start':0,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" + ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" ); // from multi-value to single-value assertJQ(req(p, "q",buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "title:MTS"), "fl","id", "debugQuery","true") - ,"/response=={'numFound':3,'start':0,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" + ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" ); // expected outcome for a sub query matching dave joined against departments final String davesDepartments = - "/response=={'numFound':2,'start':0,'docs':[{'id':'10'},{'id':'13'}]}"; + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'13'}]}"; // straight forward query assertJQ(req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "name:dave"), "fl","id"), @@ -134,7 +134,7 @@ public void testJoinAllMethods() throws Exception { // find people that develop stuff - but limit via filter query to a name of "john" // this tests filters being pushed down to queries (SOLR-3062) assertJQ(req(p, "q", buildJoinRequest(DEPT_ID_FIELD, DEPT_FIELD, "text:develop"), "fl","id", "fq", "name:john") - ,"/response=={'numFound':1,'start':0,'docs':[{'id':'1'}]}" + ,"/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'}]}" ); } @@ -171,12 +171,13 @@ public void testIndexJoin() throws Exception { // non-DV/text field. assertJQ(req(p, "q","{!join from=title to=title}name:dave", "fl","id") - ,"/response=={'numFound':2,'start':0,'docs':[{'id':'3'},{'id':'4'}]}" + ,"/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}" ); } @Test + @SuppressWarnings({"unchecked"}) public void testRandomJoin() throws Exception { int indexIter=50 * RANDOM_MULTIPLIER; int queryIter=50 * RANDOM_MULTIPLIER; @@ -207,7 +208,9 @@ public void testRandomJoin() throws Exception { types.add(new FldType("small_is_dv",ZERO_ONE, new IRange(0,5+indexSize/3))); clearIndex(); + @SuppressWarnings({"rawtypes"}) Map model = indexDocs(types, null, indexSize); + @SuppressWarnings({"rawtypes"}) Map>> pivots = new HashMap<>(); for (int qiter=0; qiter> pivot = pivots.get(fromField+"/"+toField); if (pivot == null) { pivot = createJoinMap(model, fromField, toField); @@ -235,10 +239,12 @@ public void testRandomJoin() throws Exception { } Collection fromDocs = model.values(); + @SuppressWarnings({"rawtypes"}) Set docs = join(fromDocs, pivot); List docList = new ArrayList<>(docs.size()); - for (Comparable id : docs) docList.add(model.get(id)); + for (@SuppressWarnings({"rawtypes"})Comparable id : docs) docList.add(model.get(id)); Collections.sort(docList, createComparator("_docid_",true,false,false,false)); + @SuppressWarnings({"rawtypes"}) List sortedDocs = new ArrayList(); for (Doc doc : docList) { if (sortedDocs.size() >= 10) break; @@ -248,6 +254,7 @@ public void testRandomJoin() throws Exception { Map resultSet = new LinkedHashMap<>(); resultSet.put("numFound", docList.size()); resultSet.put("start", 0); + resultSet.put("numFoundExact", true); resultSet.put("docs", sortedDocs); // todo: use different join queries for better coverage @@ -263,11 +270,8 @@ public void testRandomJoin() throws Exception { Object realResponse = Utils.fromJSONString(strResponse); String err = JSONTestUtil.matchObj("/response", realResponse, resultSet); if (err != null) { - log.error("JOIN MISMATCH: " + err - + "\n\trequest="+req - + "\n\tresult="+strResponse - + "\n\texpected="+ Utils.toJSONString(resultSet) - + "\n\tmodel="+ model + log.error("JOIN MISMATCH: {}\n\trequest={}\n\tresult={}\n\texpected={}\n\tmodel={}" + , err, req, strResponse, Utils.toJSONString(resultSet), model ); // re-execute the request... good for putting a breakpoint here for debugging @@ -281,6 +285,7 @@ public void testRandomJoin() throws Exception { } + @SuppressWarnings({"rawtypes"}) Map> createJoinMap(Map model, String fromField, String toField) { Map> id_to_id = new HashMap<>(); @@ -307,9 +312,12 @@ Map> createJoinMap(Map model, Strin } + @SuppressWarnings({"rawtypes"}) Set join(Collection input, Map> joinMap) { + @SuppressWarnings({"rawtypes"}) Set ids = new HashSet<>(); for (Doc doc : input) { + @SuppressWarnings({"rawtypes"}) Collection output = joinMap.get(doc.id); if (output == null) continue; ids.addAll(output); diff --git a/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java b/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java index aaeab54d5fc6..f8bb93e690f1 100644 --- a/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java +++ b/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java @@ -75,6 +75,7 @@ public static void beforeTests() throws Exception { int indexSize; List types; + @SuppressWarnings({"rawtypes"}) Map model = null; boolean validateResponses = true; @@ -123,7 +124,7 @@ void deleteSomeDocs() { int percent = rand.nextInt(100); if (model == null) return; ArrayList ids = new ArrayList<>(model.size()); - for (Comparable id : model.keySet()) { + for (@SuppressWarnings({"rawtypes"})Comparable id : model.keySet()) { if (rand.nextInt(100) < percent) { ids.add(id.toString()); } @@ -286,11 +287,8 @@ void doFacetTests(FldType ftype) throws Exception { for (int i=1; i types; + @SuppressWarnings({"rawtypes"}) Map model = null; boolean validateResponses = true; @@ -109,6 +110,7 @@ void init() { types.add(new FldType("bool_b",ZERO_ONE, new Vals(){ @Override + @SuppressWarnings({"rawtypes"}) public Comparable get() { return random().nextBoolean(); } @@ -125,7 +127,7 @@ void deleteSomeDocs() { int percent = rand.nextInt(100); if (model == null) return; ArrayList ids = new ArrayList<>(model.size()); - for (Comparable id : model.keySet()) { + for (@SuppressWarnings({"rawtypes"})Comparable id : model.keySet()) { if (rand.nextInt(100) < percent) { ids.add(id.toString()); } @@ -316,11 +318,8 @@ private void validateResponse(String expected, String actual, ModifiableSolrPara String err = JSONTestUtil.match("/", actual, expected, 0.0); if (err != null) { - log.error("ERROR: mismatch facet response: " + err + - "\n expected =" + expected + - "\n response = " + actual + - "\n request = " + params - ); + log.error("ERROR: mismatch facet response: {}\n expected ={}\n response = {}\n request = {}" + , err, expected, actual, params); fail(err); } } @@ -330,6 +329,7 @@ private void validateResponse(String expected, String actual, ModifiableSolrPara * then all vals with 0 , and then missing count with null label, * in the implementation below they are called three stratas * */ + @SuppressWarnings({"unchecked"}) private String getExpectationForSortByCount( ModifiableSolrParams params, List methods) throws Exception { String indexSortedResponse = getIndexSortedAllFacetValues(params, methods); @@ -345,8 +345,8 @@ public List get(Object key) { } }; - for (Iterator iterator = facetSortedByIndex.iterator(); iterator.hasNext();) { - Object label = (Object) iterator.next(); + for (@SuppressWarnings({"rawtypes"})Iterator iterator = facetSortedByIndex.iterator(); iterator.hasNext();) { + Object label = iterator.next(); Long count = (Long) iterator.next(); final Integer strata; if (label==null) { // missing (here "stratas" seems like overengineering ) @@ -363,6 +363,7 @@ public List get(Object key) { facet.add(label); facet.add(count); } + @SuppressWarnings({"rawtypes"}) List stratified =new ArrayList<>(); for(Integer s : new Integer[]{1, 0}) { // non-zero capped to one goes first, zeroes go then stratified.addAll(stratas.get(s)); @@ -439,17 +440,22 @@ private String capFacetCountsTo1(String expected) throws IOException { }); } + @SuppressWarnings({"unchecked"}) private String transformFacetFields(String expected, Consumer> consumer) throws IOException { Object json = Utils.fromJSONString(expected); + @SuppressWarnings({"rawtypes"}) Map facet_fields = getFacetFieldMap(json); + @SuppressWarnings({"rawtypes"}) Set entries = facet_fields.entrySet(); for (Object facetTuples : entries) { //despite there should be only one field + @SuppressWarnings({"rawtypes"}) Entry entry = (Entry)facetTuples; consumer.accept(entry); } return Utils.toJSONString(json); } + @SuppressWarnings({"rawtypes"}) private Map getFacetFieldMap(Object json) { Object facet_counts = ((Map)json).get("facet_counts"); Map facet_fields = (Map) ((Map)facet_counts).get("facet_fields"); diff --git a/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java b/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java index d3ad78b74801..513626ae5cbe 100644 --- a/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java +++ b/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java @@ -93,6 +93,7 @@ public void testSimple() throws Exception { QueryResponse res = getSolrClient().query(params); assertEquals(0, res.getResults().getNumFound()); + @SuppressWarnings({"rawtypes"}) NamedList echoedParams = (NamedList) res.getHeader().get("params"); assertEquals("f1", echoedParams.get("p1")); assertEquals("f2", echoedParams.get("p2")); diff --git a/solr/core/src/test/org/apache/solr/analysis/TokenizerChainTest.java b/solr/core/src/test/org/apache/solr/analysis/TokenizerChainTest.java index d40911975b2d..d41e64108ae8 100644 --- a/solr/core/src/test/org/apache/solr/analysis/TokenizerChainTest.java +++ b/solr/core/src/test/org/apache/solr/analysis/TokenizerChainTest.java @@ -29,6 +29,7 @@ public class TokenizerChainTest extends SolrTestCaseJ4 { @Test + @SuppressWarnings({"unchecked"}) public void testNormalization() throws Exception { String fieldName = "f"; TokenFilterFactory[] tff = new TokenFilterFactory[2]; diff --git a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerAdminHandler.java b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerAdminHandler.java index 6df6a6bf6b24..b5faba240976 100644 --- a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerAdminHandler.java +++ b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerAdminHandler.java @@ -32,6 +32,7 @@ public class TestEmbeddedSolrServerAdminHandler extends SolrTestCaseJ4 { @Test + @SuppressWarnings({"rawtypes"}) public void testPathIsAddedToContext() throws IOException, SolrServerException { final NodeConfig config = new NodeConfig.NodeConfigBuilder("testnode", TEST_PATH()) diff --git a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerConstructors.java b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerConstructors.java index 951853248dd2..912a35c802e7 100644 --- a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerConstructors.java +++ b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerConstructors.java @@ -30,6 +30,7 @@ public class TestEmbeddedSolrServerConstructors extends SolrTestCaseJ4 { @Test + @SuppressWarnings({"try"}) public void testPathConstructor() throws IOException { Path path = Paths.get(TEST_HOME()); try (EmbeddedSolrServer server = new EmbeddedSolrServer(path, "collection1")) { diff --git a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java index f93600df71e8..e9b905d72bfe 100644 --- a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java @@ -94,6 +94,7 @@ public void tearDown() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testProperties() throws Exception { CollectionAdminRequest.createCollection("collection1meta", "conf", 2, 1).process(cluster.getSolrClient()); CollectionAdminRequest.createCollection("collection2meta", "conf", 1, 1).process(cluster.getSolrClient()); diff --git a/solr/core/src/test/org/apache/solr/cloud/AssignBackwardCompatibilityTest.java b/solr/core/src/test/org/apache/solr/cloud/AssignBackwardCompatibilityTest.java index c1ba9721449f..54f535bdc071 100644 --- a/solr/core/src/test/org/apache/solr/cloud/AssignBackwardCompatibilityTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/AssignBackwardCompatibilityTest.java @@ -66,7 +66,9 @@ public void test() throws IOException, SolrServerException, KeeperException, Int boolean clearedCounter = false; for (int i = 0; i < numOperations; i++) { - log.info("Collection counter={} i={}", getCounter(), i); + if (log.isInfoEnabled()) { + log.info("Collection counter={} i={}", getCounter(), i); + } boolean deleteReplica = random().nextBoolean() && numLiveReplicas > 1; // No need to clear counter more than one time if (random().nextBoolean() && i > 5 && !clearedCounter) { diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java index 413e55af302e..d5db36acf745 100644 --- a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java @@ -795,6 +795,7 @@ protected CollectionAdminResponse createCollection(Map> co } params.set("name", collectionName); params.set("collection.configName", configSetName); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -902,6 +903,7 @@ private void doOptimisticLockingAndUpdating() throws Exception { QueryRequest qr = new QueryRequest(params("qt", "/get", "id","1000")); for (SolrClient client : clients) { val += 10; + @SuppressWarnings({"rawtypes"}) NamedList rsp = client.request(qr); String match = JSONTestUtil.matchObj("/val_i", rsp.get("doc"), expected); if (match != null) throw new RuntimeException(match); @@ -943,8 +945,11 @@ private Long getNumCommits(HttpSolrClient sourceClient) throws // use generic request to avoid extra processing of queries QueryRequest req = new QueryRequest(params); NamedList resp = client.request(req); + @SuppressWarnings({"rawtypes"}) NamedList metrics = (NamedList) resp.get("metrics"); + @SuppressWarnings({"rawtypes"}) NamedList uhandlerCat = (NamedList) metrics.getVal(0); + @SuppressWarnings({"unchecked"}) Map commits = (Map) uhandlerCat.get("UPDATE.updateHandler.commits"); return (Long) commits.get("count"); } @@ -1150,9 +1155,11 @@ private void createCollection(String collection, createSolrCore(collection, collectionClients, baseUrl, num, null); } + @SuppressWarnings({"unchecked"}) private void createSolrCore(final String collection, List collectionClients, final String baseUrl, final int num, final String shardId) { + @SuppressWarnings({"rawtypes"}) Callable call = () -> { try (HttpSolrClient client = getHttpSolrClient(baseUrl)) { // client.setConnectionTimeout(15000); @@ -1263,6 +1270,7 @@ protected void indexDoc(String collection, SolrInputDocument doc) throws IOExcep client.add(doc); } + @SuppressWarnings({"unchecked"}) private void createNewCollection(final String collection) throws InterruptedException { try { assertEquals(0, CollectionAdminRequest @@ -1279,6 +1287,7 @@ private void createNewCollection(final String collection) throws InterruptedExce for (final JettySolrRunner runner : jettys) { unique++; final int frozeUnique = unique; + @SuppressWarnings({"rawtypes"}) Callable call = () -> { try { diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java b/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java deleted file mode 100644 index d3fec26e6cf7..000000000000 --- a/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.cloud; - -import java.util.Map; - -import com.codahale.metrics.Gauge; -import com.codahale.metrics.Metric; -import org.apache.lucene.util.LuceneTestCase.Slow; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.params.CommonParams; -import org.apache.solr.common.params.ModifiableSolrParams; -import org.apache.solr.core.SolrCore; -import org.apache.solr.request.LocalSolrQueryRequest; -import org.apache.solr.request.SolrQueryRequest; -import org.junit.BeforeClass; -import org.junit.Test; - -/** - * This test is not fully functional - the port registered is illegal - - * so you cannot hit this with http - a nice side benifit is that it will - * detect if a node is trying to do an update to itself with http - it shouldn't - * do that. - */ -@Slow -public class BasicZkTest extends AbstractZkTestCase { - - @BeforeClass - public static void beforeClass() { - - } - - @Test - public void testBasic() throws Exception { - - // test using ZooKeeper - assertTrue("Not using ZooKeeper", h.getCoreContainer().isZooKeeperAware()); - - // for the really slow/busy computer, we wait to make sure we have a leader before starting - h.getCoreContainer().getZkController().getZkStateReader().getLeaderUrl("collection1", "shard1", 30000); - - ZkController zkController = h.getCoreContainer().getZkController(); - - SolrCore core = h.getCore(); - - // test that we got the expected config, not just hardcoded defaults - assertNotNull(core.getRequestHandler("/mock")); - - lrf.args.put(CommonParams.VERSION, "2.2"); - assertQ("test query on empty index", request("qlkciyopsbgzyvkylsjhchghjrdf"), - "//result[@numFound='0']"); - - // test escaping of ";" - assertU("deleting 42 for no reason at all", delI("42")); - assertU("adding doc#42", adoc("id", "42", "val_s", "aa;bb")); - assertU("does commit work?", commit()); - - assertQ("backslash escaping semicolon", request("id:42 AND val_s:aa\\;bb"), - "//*[@numFound='1']", "//str[@name='id'][.='42']"); - - assertQ("quote escaping semicolon", request("id:42 AND val_s:\"aa;bb\""), - "//*[@numFound='1']", "//str[@name='id'][.='42']"); - - assertQ("no escaping semicolon", request("id:42 AND val_s:aa"), - "//*[@numFound='0']"); - - assertU(delI("42")); - assertU(commit()); - assertQ(request("id:42"), "//*[@numFound='0']"); - - // test overwrite default of true - - assertU(adoc("id", "42", "val_s", "AAA")); - assertU(adoc("id", "42", "val_s", "BBB")); - assertU(commit()); - assertQ(request("id:42"), "//*[@numFound='1']", "//str[.='BBB']"); - assertU(adoc("id", "42", "val_s", "CCC")); - assertU(adoc("id", "42", "val_s", "DDD")); - assertU(commit()); - assertQ(request("id:42"), "//*[@numFound='1']", "//str[.='DDD']"); - - // test deletes - String[] adds = new String[] { add(doc("id", "101"), "overwrite", "true"), - add(doc("id", "101"), "overwrite", "true"), - add(doc("id", "105"), "overwrite", "false"), - add(doc("id", "102"), "overwrite", "true"), - add(doc("id", "103"), "overwrite", "false"), - add(doc("id", "101"), "overwrite", "true"), }; - for (String a : adds) { - assertU(a, a); - } - assertU(commit()); - int zkPort = zkServer.getPort(); - - zkServer.shutdown(); - - // document indexing shouldn't stop immediately after a ZK disconnect - assertU(adoc("id", "201")); - - Thread.sleep(300); - - // try a reconnect from disconnect - zkServer = new ZkTestServer(zkDir, zkPort); - zkServer.run(false); - - Thread.sleep(300); - - // ensure zk still thinks node is up - assertTrue( - zkController.getClusterState().getLiveNodes().toString(), - zkController.getClusterState().liveNodesContain( - zkController.getNodeName())); - - // test maxint - assertQ(request("q", "id:[100 TO 110]", "rows", "2147483647"), - "//*[@numFound='4']"); - - // test big limit - assertQ(request("q", "id:[100 TO 111]", "rows", "1147483647"), - "//*[@numFound='4']"); - - assertQ(request("id:[100 TO 110]"), "//*[@numFound='4']"); - assertU(delI("102")); - assertU(commit()); - assertQ(request("id:[100 TO 110]"), "//*[@numFound='3']"); - assertU(delI("105")); - assertU(commit()); - assertQ(request("id:[100 TO 110]"), "//*[@numFound='2']"); - assertU(delQ("id:[100 TO 110]")); - assertU(commit()); - assertQ(request("id:[100 TO 110]"), "//*[@numFound='0']"); - - - - // SOLR-2651: test that reload still gets config files from zookeeper - zkController.getZkClient().setData("/configs/conf1/solrconfig.xml", new byte[0], true); - - // we set the solrconfig to nothing, so this reload should fail - SolrException e = expectThrows(SolrException.class, - "The reloaded SolrCore did not pick up configs from zookeeper", - () -> { - ignoreException("solrconfig.xml"); - h.getCoreContainer().reload(h.getCore().getName()); - }); - resetExceptionIgnores(); - assertTrue(e.getMessage().contains("Unable to reload core [collection1]")); - assertTrue(e.getCause().getMessage().contains("Error loading solr config from solrconfig.xml")); - - // test stats call - Map metrics = h.getCore().getCoreMetricManager().getRegistry().getMetrics(); - assertEquals("collection1", ((Gauge)metrics.get("CORE.coreName")).getValue()); - assertEquals("collection1", ((Gauge)metrics.get("CORE.collection")).getValue()); - assertEquals("shard1", ((Gauge)metrics.get("CORE.shard")).getValue()); - assertTrue(metrics.get("CORE.refCount") != null); - - //zkController.getZkClient().printLayoutToStdOut(); - } - - public SolrQueryRequest request(String... q) { - LocalSolrQueryRequest req = lrf.makeRequest(q); - ModifiableSolrParams params = new ModifiableSolrParams(); - params.add(req.getParams()); - params.set("distrib", false); - req.setParams(params); - return req; - } -} diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java index 3b7a67df2375..fbeaad1f49dc 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java @@ -141,6 +141,7 @@ protected CloudSolrClient createCloudClient(String defaultCollection, int socket @Test //05-Jul-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 09-Apr-2018 // commented out on: 24-Dec-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018 + @SuppressWarnings({"try"}) public void test() throws Exception { // None of the operations used here are particularly costly, so this should work. // Using this low timeout will also help us catch index stalling. diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java index 6fc66429b7e5..26b0c36a1d88 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java @@ -291,8 +291,10 @@ public void test() throws Exception { .getResults().getNumFound(); assertTrue("Found " + ctrlDocs + " control docs", cloudClientDocs > 0); - - log.info("collection state: " + printClusterStateInfo(DEFAULT_COLLECTION)); + + if (log.isInfoEnabled()) { + log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION)); + } if (VERBOSE) System.out.println("control docs:" + controlClient.query(new SolrQuery("*:*")).getResults() diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java index 8f9abffc1f8c..e1e9a8705ee7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java @@ -203,10 +203,11 @@ public void test() throws Exception { Thread.sleep(3000); waitForThingsToLevelOut(3, TimeUnit.MINUTES); - - log.info("control docs:" + controlClient.query(new SolrQuery("*:*")).getResults().getNumFound() + "\n\n"); - - log.info("collection state: " + printClusterStateInfo(DEFAULT_COLLECTION)); + + if (log.isInfoEnabled()) { + log.info("control docs:{}\n\n", controlClient.query(new SolrQuery("*:*")).getResults().getNumFound()); + log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION)); // logOk + } waitForReplicationFromReplicas(DEFAULT_COLLECTION, cloudClient.getZkStateReader(), new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME)); // waitForAllWarmingSearchers(); diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java index c96d0d87aac6..5be91da63f14 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java @@ -260,14 +260,15 @@ private SolrZkClient electNewOverseer(String address) throws KeeperException, ZkStateReader reader = new ZkStateReader(zkClient); LeaderElector overseerElector = new LeaderElector(zkClient); UpdateShardHandler updateShardHandler = new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); - // TODO: close Overseer - Overseer overseer = new Overseer((HttpShardHandler) new HttpShardHandlerFactory().getShardHandler(), updateShardHandler, "/admin/cores", - reader, null, new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "solr").build()); - overseer.close(); - ElectionContext ec = new OverseerElectionContext(zkClient, overseer, - address.replaceAll("/", "_")); - overseerElector.setup(ec); - overseerElector.joinElection(ec, false); + try (HttpShardHandlerFactory hshf = new HttpShardHandlerFactory()) { + Overseer overseer = new Overseer((HttpShardHandler) hshf.getShardHandler(), updateShardHandler, "/admin/cores", + reader, null, new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "solr").build()); + overseer.close(); + ElectionContext ec = new OverseerElectionContext(zkClient, overseer, + address.replaceAll("/", "_")); + overseerElector.setup(ec); + overseerElector.joinElection(ec, false); + } reader.close(); return zkClient; } diff --git a/solr/core/src/test/org/apache/solr/cloud/CloudTestUtils.java b/solr/core/src/test/org/apache/solr/cloud/CloudTestUtils.java index f86a6bae86bd..07e279b7992f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CloudTestUtils.java +++ b/solr/core/src/test/org/apache/solr/cloud/CloudTestUtils.java @@ -64,6 +64,7 @@ public static long waitForTriggerToBeScheduled(final SolrCloudManager cloudManag TimeOut timeout = new TimeOut(DEFAULT_TIMEOUT, TimeUnit.SECONDS, cloudManager.getTimeSource()); while (!timeout.hasTimedOut()) { final SolrResponse response = cloudManager.request(AutoScalingRequest.create(SolrRequest.METHOD.GET, null)); + @SuppressWarnings({"unchecked"}) final Map triggers = (Map) response.getResponse().get("triggers"); Assert.assertNotNull("null triggers in response from autoscaling request", triggers); @@ -106,6 +107,7 @@ public static void assertAutoScalingRequest(final SolrCloudManager cloudManager, final String json) throws IOException { // TODO: a lot of code that directly uses AutoScalingRequest.create should use this method + @SuppressWarnings({"rawtypes"}) final SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, json); final SolrResponse rsp = cloudManager.request(req); final String result = rsp.getResponse().get("result").toString(); @@ -117,6 +119,7 @@ public static void assertAutoScalingRequest(final SolrCloudManager cloudManager, /** * Helper class for sending (JSON) autoscaling requests that can randomize between V1 and V2 requests */ + @SuppressWarnings({"rawtypes"}) public static class AutoScalingRequest extends SolrRequest { private SolrParams params = null; /** @@ -125,6 +128,7 @@ public static class AutoScalingRequest extends SolrRequest { * @param m HTTP Method to use * @aram message JSON payload, may be null */ + @SuppressWarnings({"rawtypes"}) public static SolrRequest create(SolrRequest.METHOD m, String message) { return create(m, null, message); } @@ -136,10 +140,12 @@ public static SolrRequest create(SolrRequest.METHOD m, String message) { * otherwise must start with "/" * @param message JSON payload, may be null */ + @SuppressWarnings({"rawtypes"}) public static SolrRequest create(SolrRequest.METHOD m, String subPath, String message) { return create(m,subPath,message,null); } + @SuppressWarnings({"rawtypes"}) public static SolrRequest create(SolrRequest.METHOD m, String subPath, String message, SolrParams params) { final boolean useV1 = LuceneTestCase.random().nextBoolean(); String path = useV1 ? "/admin/autoscaling" : "/cluster/autoscaling"; diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java index f41d80a59f9b..87629d2fc0cd 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java +++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java @@ -184,7 +184,7 @@ public static ZkStateReader buildClusterState(String clusterDescription, int rep } } - ClusterState clusterState = new ClusterState(1, new HashSet<>(Arrays.asList(liveNodes)), collectionStates); + ClusterState clusterState = new ClusterState(new HashSet<>(Arrays.asList(liveNodes)), collectionStates); MockZkStateReader reader = new MockZkStateReader(clusterState, collectionStates.keySet()); String json; diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java index 5606c5b4d7f4..f6e74da40624 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java @@ -56,10 +56,10 @@ public void testStoreAndRead() throws Exception { collectionStates.put("collection1", new DocCollection("collection1", slices, null, DocRouter.DEFAULT)); collectionStates.put("collection2", new DocCollection("collection2", slices, null, DocRouter.DEFAULT)); - ClusterState clusterState = new ClusterState(-1,liveNodes, collectionStates); + ClusterState clusterState = new ClusterState(liveNodes, collectionStates); byte[] bytes = Utils.toJSON(clusterState); // System.out.println("#################### " + new String(bytes)); - ClusterState loadedClusterState = ClusterState.load(-1, bytes, liveNodes); + ClusterState loadedClusterState = ClusterState.createFromJson(-1, bytes, liveNodes); assertEquals("Provided liveNodes not used properly", 2, loadedClusterState .getLiveNodes().size()); @@ -67,13 +67,13 @@ public void testStoreAndRead() throws Exception { assertEquals("Properties not copied properly", replica.getStr("prop1"), loadedClusterState.getCollection("collection1").getSlice("shard1").getReplicasMap().get("node1").getStr("prop1")); assertEquals("Properties not copied properly", replica.getStr("prop2"), loadedClusterState.getCollection("collection1").getSlice("shard1").getReplicasMap().get("node1").getStr("prop2")); - loadedClusterState = ClusterState.load(-1, new byte[0], liveNodes); + loadedClusterState = ClusterState.createFromJson(-1, new byte[0], liveNodes); assertEquals("Provided liveNodes not used properly", 2, loadedClusterState .getLiveNodes().size()); assertEquals("Should not have collections", 0, loadedClusterState.getCollectionsMap().size()); - loadedClusterState = ClusterState.load(-1, (byte[])null, liveNodes); + loadedClusterState = ClusterState.createFromJson(-1, (byte[])null, liveNodes); assertEquals("Provided liveNodes not used properly", 2, loadedClusterState .getLiveNodes().size()); diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java index 3892bc6f0a75..1a6d54e3bc92 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java @@ -50,11 +50,7 @@ public class CollectionPropsTest extends SolrCloudTestCase { @BeforeClass public static void setupClass() throws Exception { - Boolean useLegacyCloud = rarely(); - log.info("Using legacyCloud?: {}", useLegacyCloud); - configureCluster(4) - .withProperty(ZkStateReader.LEGACY_CLOUD, String.valueOf(useLegacyCloud)) .addConfig("conf", configset("cloud-minimal")) .configure(); } @@ -205,7 +201,9 @@ public void testWatcher() throws KeeperException, InterruptedException, IOExcept // Trigger a value change event log.info("setting value2"); collectionProps.setCollectionProperty(collectionName, "property", "value2"); - log.info("(value2) waitForTrigger=={}", watcher.waitForTrigger()); + if (log.isInfoEnabled()) { + log.info("(value2) waitForTrigger=={}", watcher.waitForTrigger()); + } assertEquals("value2", watcher.getProps().get("property")); // Delete the properties znode @@ -286,6 +284,7 @@ public Watcher(final String name, final boolean forceReadPropsFromZk) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public boolean onStateChanged(Map collectionProperties) { log.info("{}: state changed...", name); if (forceReadPropsFromZk) { diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionStateFormat2Test.java b/solr/core/src/test/org/apache/solr/cloud/CollectionStateZnodeTest.java similarity index 89% rename from solr/core/src/test/org/apache/solr/cloud/CollectionStateFormat2Test.java rename to solr/core/src/test/org/apache/solr/cloud/CollectionStateZnodeTest.java index 04da1f53a344..6033e1ee0fcf 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionStateFormat2Test.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionStateZnodeTest.java @@ -24,7 +24,7 @@ import org.junit.BeforeClass; import org.junit.Test; -public class CollectionStateFormat2Test extends SolrCloudTestCase { +public class CollectionStateZnodeTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { @@ -48,7 +48,7 @@ public void testZkNodeLocation() throws Exception { cluster.waitForActiveCollection(collectionName, 2, 4); waitForState("Collection not created", collectionName, (n, c) -> DocCollection.isFullyActive(n, c, 2, 2)); - assertTrue("State Format 2 collection path does not exist", + assertTrue("Collection path does not exist", zkClient().exists(ZkStateReader.getCollectionPath(collectionName), true)); Stat stat = new Stat(); @@ -57,13 +57,12 @@ public void testZkNodeLocation() throws Exception { DocCollection c = getCollectionState(collectionName); assertEquals("DocCollection version should equal the znode version", stat.getVersion(), c.getZNodeVersion() ); - assertTrue("DocCollection#getStateFormat() must be > 1", c.getStateFormat() > 1); // remove collection CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); waitForState("Collection not deleted", collectionName, (n, coll) -> coll == null); - assertFalse("collection state should not exist externally", + assertFalse("collection state should not exist", zkClient().exists(ZkStateReader.getCollectionPath(collectionName), true)); } diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java index 4db1152a901e..f7435736cb16 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java @@ -38,6 +38,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import com.google.common.collect.ImmutableList; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrClient; @@ -75,8 +76,6 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; - -import com.google.common.collect.ImmutableList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -93,10 +92,6 @@ public void beforeTest() throws Exception { // clear any persisted auto scaling configuration zkClient().setData(SOLR_AUTOSCALING_CONF_PATH, Utils.toJSON(new ZkNodeProps()), true); - - final ClusterProperties props = new ClusterProperties(zkClient()); - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, null).process(cluster.getSolrClient()); - assertEquals("Cluster property was not unset", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, null), null); } @After @@ -148,6 +143,7 @@ public void testCreateCollWithDefaultClusterPropertiesOldFormat() throws Excepti .process(cluster.getSolrClient()); for (int i = 0; i < 300; i++) { + @SuppressWarnings({"rawtypes"}) Map m = cluster.getSolrClient().getZkStateReader().getClusterProperty(COLLECTION_DEF, null); if (m != null) break; Thread.sleep(10); @@ -232,6 +228,7 @@ public void testCreateCollWithDefaultClusterPropertiesNewFormat() throws Excepti .process(cluster.getSolrClient()); for (int i = 0; i < 300; i++) { + @SuppressWarnings({"rawtypes"}) Map m = cluster.getSolrClient().getZkStateReader().getClusterProperty(COLLECTION_DEF, null); if (m != null) break; Thread.sleep(10); @@ -306,7 +303,6 @@ public void testCreateCollWithDefaultClusterPropertiesNewFormat() throws Excepti public void testCreateAndDeleteCollection() throws Exception { String collectionName = "solrj_test"; CollectionAdminResponse response = CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2) - .setStateFormat(1) .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); @@ -328,24 +324,21 @@ public void testCreateAndDeleteCollection() throws Exception { waitForState("Expected " + collectionName + " to disappear from cluster state", collectionName, (n, c) -> c == null); - // Test Creating a collection with new stateformat. - collectionName = "solrj_newstateformat"; + // Test Creating a new collection. + collectionName = "solrj_test2"; response = CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2) - .setStateFormat(2) .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); waitForState("Expected " + collectionName + " to appear in cluster state", collectionName, (n, c) -> c != null); - } @Test public void testCloudInfoInCoreStatus() throws IOException, SolrServerException { String collectionName = "corestatus_test"; CollectionAdminResponse response = CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2) - .setStateFormat(1) .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); @@ -483,6 +476,7 @@ public void testCreateCollectionWithPropertyParam() throws Exception { Path tmpDir = createTempDir("testPropertyParamsForCreate"); Path dataDir = tmpDir.resolve("dataDir-" + TestUtil.randomSimpleString(random(), 1, 5)); Path ulogDir = tmpDir.resolve("ulogDir-" + TestUtil.randomSimpleString(random(), 1, 5)); + cluster.getJettySolrRunners().forEach(j -> j.getCoreContainer().getAllowPaths().add(tmpDir)); CollectionAdminResponse response = CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1) .withProperty(CoreAdminParams.DATA_DIR, dataDir.toString()) @@ -558,21 +552,21 @@ public void testClusterProp() throws InterruptedException, IOException, SolrServ // sanity check our expected default final ClusterProperties props = new ClusterProperties(zkClient()); assertEquals("Expecting prop to default to unset, test needs upated", - props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, null), null); + props.getClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, null), null); - CollectionAdminResponse response = CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "true") + CollectionAdminResponse response = CollectionAdminRequest.setClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, "true") .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); - assertEquals("Cluster property was not set", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, null), "true"); + assertEquals("Cluster property was not set", props.getClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, null), "true"); // Unset ClusterProp that we set. - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, null).process(cluster.getSolrClient()); - assertEquals("Cluster property was not unset", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, null), null); + CollectionAdminRequest.setClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, null).process(cluster.getSolrClient()); + assertEquals("Cluster property was not unset", props.getClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, null), null); - response = CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false") + response = CollectionAdminRequest.setClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, "false") .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); - assertEquals("Cluster property was not set", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, null), "false"); + assertEquals("Cluster property was not set", props.getClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, null), "false"); } @Test @@ -651,9 +645,11 @@ public void testColStatus() throws Exception { req.setWithSizeInfo(true); CollectionAdminResponse rsp = req.process(cluster.getSolrClient()); assertEquals(0, rsp.getStatus()); + @SuppressWarnings({"unchecked"}) List nonCompliant = (List)rsp.getResponse().findRecursive(collectionName, "schemaNonCompliant"); assertEquals(nonCompliant.toString(), 1, nonCompliant.size()); assertTrue(nonCompliant.toString(), nonCompliant.contains("(NONE)")); + @SuppressWarnings({"unchecked"}) NamedList segInfos = (NamedList) rsp.getResponse().findRecursive(collectionName, "shards", "shard1", "leader", "segInfos"); assertNotNull(Utils.toJSONString(rsp), segInfos.findRecursive("info", "core", "startTime")); assertNotNull(Utils.toJSONString(rsp), segInfos.get("fieldInfoLegend")); diff --git a/solr/core/src/test/org/apache/solr/cloud/ConfigSetsAPITest.java b/solr/core/src/test/org/apache/solr/cloud/ConfigSetsAPITest.java index 26d77b7bae7b..0053fd9307d0 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ConfigSetsAPITest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ConfigSetsAPITest.java @@ -61,6 +61,7 @@ public void testConfigSetDeleteWhenInUse() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testSharedSchema() throws Exception { CollectionAdminRequest.createCollection("col1", "cShare", 1, 1) .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); diff --git a/solr/core/src/test/org/apache/solr/cloud/CreateRoutedAliasTest.java b/solr/core/src/test/org/apache/solr/cloud/CreateRoutedAliasTest.java index 9833e908913e..afb13b24e06a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CreateRoutedAliasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CreateRoutedAliasTest.java @@ -153,7 +153,6 @@ public void testV2() throws Exception { assertEquals(1, coll.getNumTlogReplicas().intValue()); // per-shard assertEquals(1, coll.getNumPullReplicas().intValue()); // per-shard assertEquals(4, coll.getMaxShardsPerNode()); - //TODO SOLR-11877 assertEquals(2, coll.getStateFormat()); assertTrue("nodeSet didn't work?", coll.getSlices().stream().flatMap(s -> s.getReplicas().stream()) .map(Replica::getNodeName).allMatch(createNode::equals)); @@ -200,7 +199,6 @@ public void testV1() throws Exception { assertEquals("foo_s", ((Map)coll.get("router")).get("field")); assertEquals(1, coll.getSlices().size()); // numShards assertEquals(2, coll.getReplicationFactor().intValue()); // num replicas - //TODO SOLR-11877 assertEquals(2, coll.getStateFormat()); // Test Alias metadata Aliases aliases = cluster.getSolrClient().getZkStateReader().getAliases(); diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java index 33a1a55955da..0edc7bef322c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java @@ -27,7 +27,6 @@ import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.TimeSource; import org.apache.solr.core.CoreDescriptor; import org.apache.solr.core.SolrCore; @@ -46,7 +45,6 @@ public class DeleteInactiveReplicaTest extends SolrCloudTestCase { public static void setupCluster() throws Exception { configureCluster(4) .addConfig("conf", configset("cloud-minimal")) - .withProperty(ZkStateReader.LEGACY_CLOUD, "false") .configure(); } @@ -81,7 +79,9 @@ public void deleteInactiveReplicaTest() throws Exception { return r == null || r.getState() != Replica.State.ACTIVE; }); - log.info("Removing replica {}/{} ", shard.getName(), replica.getName()); + if (log.isInfoEnabled()) { + log.info("Removing replica {}/{} ", shard.getName(), replica.getName()); + } CollectionAdminRequest.deleteReplica(collectionName, shard.getName(), replica.getName()) .process(cluster.getSolrClient()); waitForState("Expected deleted replica " + replica.getName() + " to be removed from cluster state", collectionName, (n, c) -> { diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java index a6ff54bd8997..5fd339e59150 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java @@ -75,7 +75,7 @@ public void test() throws Exception { // check what replicas are on the node, and whether the call should fail boolean shouldFail = false; DocCollection docColl = state.getCollection(coll); - log.info("#### DocCollection: " + docColl); + log.info("#### DocCollection: {}", docColl); List replicas = docColl.getReplicas(node2bdecommissioned); if (replicas != null) { for (Replica replica : replicas) { @@ -106,7 +106,9 @@ public void test() throws Exception { } Thread.sleep(50); } - log.info("####### DocCollection after: " + cloudClient.getZkStateReader().getClusterState().getCollection(coll)); + if (log.isInfoEnabled()) { + log.info("####### DocCollection after: {}", cloudClient.getZkStateReader().getClusterState().getCollection(coll)); + } if (shouldFail) { assertTrue(String.valueOf(rsp), rsp.getRequestStatus() == RequestStatusState.FAILED); } else { diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java index 253f2ba4dcbd..8340458cf895 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java @@ -211,19 +211,7 @@ public void deleteReplicaByCountForAllShards() throws Exception { @Test public void deleteReplicaFromClusterState() throws Exception { - deleteReplicaFromClusterState("false"); - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, null).process(cluster.getSolrClient()); - } - - @Test - public void deleteReplicaFromClusterStateLegacy() throws Exception { - deleteReplicaFromClusterState("true"); - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, null).process(cluster.getSolrClient()); - } - - private void deleteReplicaFromClusterState(String legacyCloud) throws Exception { - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, legacyCloud).process(cluster.getSolrClient()); - final String collectionName = "deleteFromClusterState_"+legacyCloud; + final String collectionName = "deleteFromClusterStateCollection"; CollectionAdminRequest.createCollection(collectionName, "conf", 1, 3) .process(cluster.getSolrClient()); @@ -237,7 +225,7 @@ private void deleteReplicaFromClusterState(String legacyCloud) throws Exception Slice shard = getCollectionState(collectionName).getSlice("shard1"); - // don't choose the leader to shutdown, it just complicates things unneccessarily + // don't choose the leader to shutdown, it just complicates things unnecessarily Replica replica = getRandomReplica(shard, (r) -> ( r.getState() == Replica.State.ACTIVE && ! r.equals(shard.getLeader()))); @@ -283,23 +271,7 @@ private void deleteReplicaFromClusterState(String legacyCloud) throws Exception @Slow // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 public void raceConditionOnDeleteAndRegisterReplica() throws Exception { - raceConditionOnDeleteAndRegisterReplica("false"); - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, null).process(cluster.getSolrClient()); - } - - @Test - @Slow - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 - public void raceConditionOnDeleteAndRegisterReplicaLegacy() throws Exception { - raceConditionOnDeleteAndRegisterReplica("true"); - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, null).process(cluster.getSolrClient()); - } - - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 - public void raceConditionOnDeleteAndRegisterReplica(String legacyCloud) throws Exception { - - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, legacyCloud).process(cluster.getSolrClient()); - final String collectionName = "raceDeleteReplica_"+legacyCloud; + final String collectionName = "raceDeleteReplicaCollection"; CollectionAdminRequest.createCollection(collectionName, "conf", 1, 2) .process(cluster.getSolrClient()); @@ -467,7 +439,9 @@ public void deleteReplicaOnIndexing() throws Exception { try { cluster.getSolrClient().waitForState(collectionName, 20, TimeUnit.SECONDS, (liveNodes, collectionState) -> collectionState.getReplicas().size() == 1); } catch (TimeoutException e) { - log.info("Timeout wait for state {}", getCollectionState(collectionName)); + if (log.isInfoEnabled()) { + log.info("Timeout wait for state {}", getCollectionState(collectionName)); + } throw e; } } diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java index 9e0289ec9569..0b488f02e2b8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java @@ -615,6 +615,7 @@ private List getAllSortFieldNames() throws SolrServerException, IOExcept LukeRequest req = new LukeRequest("/admin/luke"); req.setShowSchema(true); NamedList rsp = controlClient.request(req); + @SuppressWarnings({"unchecked"}) NamedList fields = (NamedList) ((NamedList)rsp.get("schema")).get("fields"); ArrayList names = new ArrayList<>(fields.size()); for (Map.Entry item : fields) { diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java index d99a4069e210..d9a1a09e9e62 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java @@ -78,6 +78,7 @@ public void cleanup() throws Exception { /** * Modifies the request to inlcude authentication params if needed, returns the request */ + @SuppressWarnings({"rawtypes"}) private T setAuthIfNeeded(T req) { if (null != USER) { assert null != PASS; diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java index bbd6eb063ee7..805e013b8e2e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java @@ -123,7 +123,7 @@ public void testNoScore() throws Exception { @AfterClass public static void shutdown() { - log.info("DistribJoinFromCollectionTest logic complete ... deleting the " + toColl + " and " + fromColl + " collections"); + log.info("DistribJoinFromCollectionTest logic complete ... deleting the {} and {} collections", toColl, fromColl); // try to clean up for (String c : new String[]{ toColl, fromColl }) { @@ -132,7 +132,7 @@ public static void shutdown() { req.process(cluster.getSolrClient()); } catch (Exception e) { // don't fail the test - log.warn("Could not delete collection {} after test completed due to: " + e, c); + log.warn("Could not delete collection {} after test completed due to:", c, e); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java b/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java index 0394152ce254..a4b1b1281d15 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java @@ -123,7 +123,7 @@ public void testReplicaVersionHandling() throws Exception { req.setParams(params); req.add(doc); - log.info("Sending doc with out-of-date version ("+(maxOnReplica -1)+") document directly to replica"); + log.info("Sending doc with out-of-date version ({}) document directly to replica", maxOnReplica -1); client.request(req); client.commit(); @@ -244,7 +244,9 @@ public void run() { cluster.getSolrClient().commit(COLLECTION); - log.info("Total of "+deletedDocs.size()+" docs deleted"); + if (log.isInfoEnabled()) { + log.info("Total of {} docs deleted", deletedDocs.size()); + } maxOnLeader = getMaxVersionFromIndex(leader); maxOnReplica = getMaxVersionFromIndex(replica); @@ -363,7 +365,7 @@ protected boolean reloadCollection(Replica replica, String testCollectionName) t Thread.sleep(1000); // send reload command for the collection - log.info("Sending RELOAD command for " + testCollectionName); + log.info("Sending RELOAD command for {}", testCollectionName); CollectionAdminRequest.reloadCollection(testCollectionName) .process(client); Thread.sleep(2000); // reload can take a short while diff --git a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java index 7509e626e7df..84b3622d435b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java @@ -89,7 +89,9 @@ public void testReplicasInLowerTerms() throws Exception { JettySolrRunner notLeader0 = getJettyOnPort(getReplicaPort(notLeaders.get(0))); ZkController zkController = notLeader0.getCoreContainer().getZkController(); - log.info("Before put non leaders into lower term: " + printClusterStateInfo()); + if (log.isInfoEnabled()) { + log.info("Before put non leaders into lower term: {}", printClusterStateInfo()); + } putNonLeadersIntoLowerTerm(testCollectionName, SHARD1, zkController, leader, notLeaders, cloudClient); for (Replica replica : notLeaders) { @@ -109,7 +111,9 @@ public void testReplicasInLowerTerms() throws Exception { } } assertEquals(2, numReplicasOnLiveNodes); - log.info("Before forcing leader: " + printClusterStateInfo()); + if (log.isInfoEnabled()) { + log.info("Before forcing leader: {}", printClusterStateInfo()); + } // Assert there is no leader yet assertNull("Expected no leader right now. State: " + clusterState.getCollection(testCollectionName).getSlice(SHARD1), clusterState.getCollection(testCollectionName).getSlice(SHARD1).getLeader()); @@ -124,7 +128,9 @@ public void testReplicasInLowerTerms() throws Exception { cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); clusterState = cloudClient.getZkStateReader().getClusterState(); - log.info("After forcing leader: " + clusterState.getCollection(testCollectionName).getSlice(SHARD1)); + if (log.isInfoEnabled()) { + log.info("After forcing leader: {}", clusterState.getCollection(testCollectionName).getSlice(SHARD1)); + } // we have a leader Replica newLeader = clusterState.getCollectionOrNull(testCollectionName).getSlice(SHARD1).getLeader(); assertNotNull(newLeader); @@ -195,7 +201,9 @@ private void putNonLeadersIntoLowerTerm(String collectionName, String shard, ZkC } // Kill the leader - log.info("Killing leader for shard1 of " + collectionName + " on node " + leader.getNodeName() + ""); + if (log.isInfoEnabled()) { + log.info("Killing leader for shard1 of {} on node {}", collectionName, leader.getNodeName()); + } leaderJetty.stop(); // Wait for a steady state, till the shard is leaderless @@ -243,14 +251,16 @@ private void bringBackOldLeaderAndSendDoc(String collection, Replica leader, Lis waitForRecoveriesToFinish(collection, cloudClient.getZkStateReader(), true); cloudClient.getZkStateReader().forceUpdateCollection(collection); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); - log.info("After bringing back leader: " + clusterState.getCollection(collection).getSlice(SHARD1)); + if (log.isInfoEnabled()) { + log.info("After bringing back leader: {}", clusterState.getCollection(collection).getSlice(SHARD1)); + } int numActiveReplicas = getNumberOfActiveReplicas(clusterState, collection, SHARD1); assertEquals(1+notLeaders.size(), numActiveReplicas); - log.info("Sending doc "+docid+"..."); + log.info("Sending doc {}...", docid); sendDoc(docid); log.info("Committing..."); cloudClient.commit(); - log.info("Doc "+docid+" sent and commit issued"); + log.info("Doc {} sent and commit issued", docid); assertDocsExistInAllReplicas(notLeaders, collection, docid, docid); assertDocsExistInAllReplicas(Collections.singletonList(leader), collection, docid, docid); } diff --git a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java index 4bd4b52fa17a..0b15fe018eb2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java @@ -418,7 +418,7 @@ public void run() { final int totalDocsExpected = numThreads * numBatchesPerThread * numDocsPerBatch; ExecutorUtil.shutdownAndAwaitTermination(executor); - for (Future result : futures) { + for (@SuppressWarnings({"rawtypes"})Future result : futures) { assertFalse(result.isCancelled()); assertTrue(result.isDone()); // all we care about is propogating any possibile execution exception... @@ -480,7 +480,9 @@ private void checkShardConsistency(final SolrParams params) throws Exception { for (Replica replica : slice) { try (HttpSolrClient replicaClient = getHttpSolrClient(replica.getCoreUrl())) { final SolrDocumentList replicaResults = replicaClient.query(perReplicaParams).getResults(); - log.debug("Shard {}: Replica ({}) results: {}", shardName, replica.getCoreName(), replicaResults); + if (log.isDebugEnabled()) { + log.debug("Shard {}: Replica ({}) results: {}", shardName, replica.getCoreName(), replicaResults); + } assertEquals("inconsistency w/leader: shard=" + shardName + "core=" + replica.getCoreName(), Collections.emptySet(), CloudInspectUtil.showDiff(leaderResults, replicaResults, diff --git a/solr/core/src/test/org/apache/solr/cloud/FullThrottleStoppableIndexingThread.java b/solr/core/src/test/org/apache/solr/cloud/FullThrottleStoppableIndexingThread.java index 1c5d470beb2a..a3a81c53cb6d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/FullThrottleStoppableIndexingThread.java +++ b/solr/core/src/test/org/apache/solr/cloud/FullThrottleStoppableIndexingThread.java @@ -107,7 +107,7 @@ public void run() { } - log.info("FT added docs:" + numAdds + " with " + fails + " fails" + " deletes:" + numDeletes); + log.info("FT added docs:{} with {} fails deletes:{}", numAdds, fails, numDeletes); } private void changeUrlOnError(Exception e) { diff --git a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java index 8df61759e842..b5d3638ae5b2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java @@ -83,11 +83,15 @@ private void multiShardTest() throws Exception { + printClusterStateInfo(), notLeaders.size() == 1); - log.info("All replicas active for "+testCollectionName); + if (log.isInfoEnabled()) { + log.info("All replicas active for {}", testCollectionName); + } // let's put the leader in its own partition, no replicas can contact it now Replica leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); - log.info("Creating partition to leader at "+leader.getCoreUrl()); + if (log.isInfoEnabled()) { + log.info("Creating partition to leader at {}", leader.getCoreUrl()); + } SocketProxy leaderProxy = getProxyForReplica(leader); leaderProxy.close(); @@ -101,7 +105,9 @@ private void multiShardTest() throws Exception { leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); assertSame("Leader was not active", Replica.State.ACTIVE, leader.getState()); - log.info("Healing partitioned replica at "+leader.getCoreUrl()); + if (log.isInfoEnabled()) { + log.info("Healing partitioned replica at {}", leader.getCoreUrl()); + } leaderProxy.reopen(); Thread.sleep(sleepMsBeforeHealPartition); @@ -126,11 +132,13 @@ private void oneShardTest() throws Exception { + printClusterStateInfo(), notLeaders.size() == 2); - log.info("All replicas active for "+testCollectionName); + log.info("All replicas active for {}", testCollectionName); // let's put the leader in its own partition, no replicas can contact it now Replica leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); - log.info("Creating partition to leader at "+leader.getCoreUrl()); + if (log.isInfoEnabled()) { + log.info("Creating partition to leader at {}", leader.getCoreUrl()); + } SocketProxy leaderProxy = getProxyForReplica(leader); leaderProxy.close(); @@ -143,7 +151,9 @@ private void oneShardTest() throws Exception { leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); assertSame("Leader was not active", Replica.State.ACTIVE, leader.getState()); - log.info("Healing partitioned replica at "+leader.getCoreUrl()); + if (log.isInfoEnabled()) { + log.info("Healing partitioned replica at {}", leader.getCoreUrl()); + } leaderProxy.reopen(); Thread.sleep(sleepMsBeforeHealPartition); @@ -165,21 +175,22 @@ public JettySolrRunner createJetty(File solrHome, String dataDir, protected void sendCommitWithRetry(Replica replica) throws Exception { String replicaCoreUrl = replica.getCoreUrl(); - log.info("Sending commit request to: "+replicaCoreUrl); + log.info("Sending commit request to: {}", replicaCoreUrl); final RTimer timer = new RTimer(); try (HttpSolrClient client = getHttpSolrClient(replicaCoreUrl)) { try { client.commit(); - log.info("Sent commit request to {} OK, took {}ms", replicaCoreUrl, timer.getTime()); + if (log.isInfoEnabled()) { + log.info("Sent commit request to {} OK, took {}ms", replicaCoreUrl, timer.getTime()); + } } catch (Exception exc) { Throwable rootCause = SolrException.getRootCause(exc); if (rootCause instanceof NoHttpResponseException) { - log.warn("No HTTP response from sending commit request to "+replicaCoreUrl+ - "; will re-try after waiting 3 seconds"); + log.warn("No HTTP response from sending commit request to {}; will re-try after waiting 3 seconds", replicaCoreUrl); Thread.sleep(3000); client.commit(); - log.info("Second attempt at sending commit to "+replicaCoreUrl+" succeeded."); + log.info("Second attempt at sending commit to {} succeeded", replicaCoreUrl); } else { throw exc; } diff --git a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java index 96c10e4da5f6..fea8a285e106 100644 --- a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java @@ -260,7 +260,7 @@ protected void testRf2() throws Exception { maxVersionBefore = ulog.getCurrentMaxVersion(); } assertNotNull("max version bucket seed not set for core " + coreName, maxVersionBefore); - log.info("Looked up max version bucket seed "+maxVersionBefore+" for core "+coreName); + log.info("Looked up max version bucket seed {} for core {}", maxVersionBefore, coreName); // now up the stakes and do more docs int numDocs = TEST_NIGHTLY ? 1000 : 105; @@ -296,15 +296,15 @@ protected void testRf2() throws Exception { try (SolrCore core = coreContainer.getCore(coreName)) { assertNotNull("Core '" + coreName + "' not found for replica: " + notLeader.getName(), core); Long currentMaxVersion = core.getUpdateHandler().getUpdateLog().getCurrentMaxVersion(); - log.info("After recovery, looked up NEW max version bucket seed " + currentMaxVersion + - " for core " + coreName + ", was: " + maxVersionBefore); + log.info("After recovery, looked up NEW max version bucket seed {} for core {}, was: {}" + , currentMaxVersion, coreName, maxVersionBefore); assertTrue("max version bucket seed not updated after recovery!", currentMaxVersion > maxVersionBefore); } // verify all docs received assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, numDocs + 3); - log.info("testRf2 succeeded ... deleting the "+testCollectionName+" collection"); + log.info("testRf2 succeeded ... deleting the {} collection", testCollectionName); // try to clean up attemptCollectionDelete(cloudClient, testCollectionName); @@ -374,13 +374,14 @@ protected void testRf3() throws Exception { assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, 4); - log.info("testRf3 succeeded ... deleting the "+testCollectionName+" collection"); + log.info("testRf3 succeeded ... deleting the {} collection", testCollectionName); // try to clean up attemptCollectionDelete(cloudClient, testCollectionName); } // test inspired by SOLR-6511 + @SuppressWarnings({"try"}) protected void testLeaderZkSessionLoss() throws Exception { String testCollectionName = "c8n_1x2_leader_session_loss"; @@ -433,7 +434,9 @@ protected void testLeaderZkSessionLoss() throws Exception { // TODO: This test logic seems to be timing dependent and fails on Jenkins // need to come up with a better approach - log.info("Sending doc 2 to old leader "+leader.getName()); + if (log.isInfoEnabled()) { + log.info("Sending doc 2 to old leader {}", leader.getName()); + } try ( HttpSolrClient leaderSolr = getHttpSolrClient(leader, testCollectionName)) { leaderSolr.add(doc); @@ -458,7 +461,7 @@ protected void testLeaderZkSessionLoss() throws Exception { waitToSeeReplicasActive(testCollectionName, "shard1", replicasToCheck, 30); assertDocsExistInAllReplicas(participatingReplicas, testCollectionName, 1, 2); - log.info("testLeaderZkSessionLoss succeeded ... deleting the "+testCollectionName+" collection"); + log.info("testLeaderZkSessionLoss succeeded ... deleting the {} collection", testCollectionName); // try to clean up attemptCollectionDelete(cloudClient, testCollectionName); @@ -565,12 +568,14 @@ protected void assertDocExists(HttpSolrClient solr, String coll, String docId) t } protected void assertDocNotExists(HttpSolrClient solr, String coll, String docId) throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList rsp = realTimeGetDocId(solr, docId); String match = JSONTestUtil.matchObj("/id", rsp.get("doc"), Integer.valueOf(docId)); assertTrue("Doc with id=" + docId + " is found in " + solr.getBaseURL() + " due to: " + match + "; rsp="+rsp, match != null); } + @SuppressWarnings({"rawtypes"}) private NamedList realTimeGetDocId(HttpSolrClient solr, String docId) throws SolrServerException, IOException { QueryRequest qr = new QueryRequest(params("qt", "/get", "id", docId, "distrib", "false")); return solr.request(qr); @@ -609,7 +614,9 @@ protected void waitToSeeReplicasActive(String testCollectionName, String shardId final Replica.State state = replica.getState(); if (state != Replica.State.ACTIVE) { - log.info("Replica " + replica.getName() + " is currently " + state); + if (log.isInfoEnabled()) { + log.info("Replica {} is currently {}", replica.getName(), state); + } allReplicasUp = false; } } @@ -626,7 +633,9 @@ protected void waitToSeeReplicasActive(String testCollectionName, String shardId fail("Didn't see replicas "+ replicasToCheck + " come up within " + maxWaitMs + " ms! ClusterState: " + printClusterStateInfo(testCollectionName)); - log.info("Took {} ms to see replicas [{}] become active.", timer.getTime(), replicasToCheck); + if (log.isInfoEnabled()) { + log.info("Took {} ms to see replicas [{}] become active.", timer.getTime(), replicasToCheck); + } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java b/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java index cccf590c78cf..0e9587bfa2cb 100644 --- a/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java +++ b/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java @@ -78,7 +78,7 @@ public void start() throws Exception { FileUtils.deleteDirectory(workDir); // clean directory numTries++; if (numTries == 3) { - log.error("Failed setting up MiniKDC. Tried " + numTries + " times."); + log.error("Failed setting up MiniKDC. Tried {} times.", numTries); throw e; } log.error("BindException encountered when setting up MiniKdc. Trying again."); @@ -132,7 +132,7 @@ private static class JaasConfiguration extends Configuration { */ public JaasConfiguration(String clientPrincipal, File clientKeytab, String serverPrincipal, File serverKeytab) { - Map clientOptions = new HashMap(); + Map clientOptions = new HashMap<>(); clientOptions.put("principal", clientPrincipal); clientOptions.put("keyTab", clientKeytab.getAbsolutePath()); clientOptions.put("useKeyTab", "true"); @@ -148,7 +148,7 @@ public JaasConfiguration(String clientPrincipal, File clientKeytab, AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, clientOptions)}; if(serverPrincipal!=null && serverKeytab!=null) { - Map serverOptions = new HashMap(clientOptions); + Map serverOptions = new HashMap<>(clientOptions); serverOptions.put("principal", serverPrincipal); serverOptions.put("keytab", serverKeytab.getAbsolutePath()); serverEntry = new AppConfigurationEntry[]{ diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java index 9c3e83fc2ee5..881b68a0a353 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java @@ -99,7 +99,7 @@ void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs) throws KeeperException, InterruptedException, IOException { super.runLeaderProcess(weAreReplacement, pauseBeforeStartMs); if (runLeaderDelay > 0) { - log.info("Sleeping for " + runLeaderDelay + "ms to simulate leadership takeover delay"); + log.info("Sleeping for {}ms to simulate leadership takeover delay", runLeaderDelay); Thread.sleep(runLeaderDelay); } } @@ -352,7 +352,7 @@ public void testElection() throws Exception { @Test public void testParallelElection() throws Exception { final int numShards = 2 + random().nextInt(18); - log.info("Testing parallel election across " + numShards + " shards"); + log.info("Testing parallel election across {} shards", numShards); List threads = new ArrayList<>(); diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java index bde632f5ede8..e94783e6a1cd 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java @@ -142,7 +142,7 @@ protected void testRf3WithLeaderFailover() throws Exception { if (oldLeaderProxy != null) { oldLeaderProxy.close(); } else { - log.warn("No SocketProxy found for old leader node "+leaderNode); + log.warn("No SocketProxy found for old leader node {}",leaderNode); } Thread.sleep(10000); // give chance for new leader to be elected. diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java index b053743dabef..9c5ff4d5ef43 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java @@ -102,7 +102,9 @@ public void test() throws Exception { for (String id : addedIds) { assertNotNull(cluster.getSolrClient().getById(collection,id)); } - log.info("The test success oldLeader:{} currentState:{}", oldLeader, getCollectionState(collection)); + if (log.isInfoEnabled()) { + log.info("The test success oldLeader:{} currentState:{}", oldLeader, getCollectionState(collection)); + } } finally { CollectionAdminRequest.deleteCollection(collection).process(cluster.getSolrClient()); @@ -178,7 +180,9 @@ public void testOtherReplicasAreNotActive() throws Exception { if (numReplicas == 2) { Slice shard = getCollectionState(collection).getSlice("shard1"); otherReplicaJetty = cluster.getReplicaJetty(getNonLeader(shard)); - log.info("Stop jetty node : {} state:{}", otherReplicaJetty.getBaseUrl(), getCollectionState(collection)); + if (log.isInfoEnabled()) { + log.info("Stop jetty node : {} state:{}", otherReplicaJetty.getBaseUrl(), getCollectionState(collection)); + } otherReplicaJetty.stop(); cluster.waitForJettyToStop(otherReplicaJetty); waitForState("Timeout waiting for replica get down", collection, (liveNodes, collectionState) -> getNonLeader(collectionState.getSlice("shard1")).getState() != Replica.State.ACTIVE); diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java index 3dfb521d629f..fb0cbbdb5874 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java @@ -92,7 +92,9 @@ public void setupTest() throws Exception { cluster.stopJettySolrRunner(jetty);// TODO: Can we avoid this restart cluster.startJettySolrRunner(jetty); proxy.open(jetty.getBaseUrl().toURI()); - log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl()); + if (log.isInfoEnabled()) { + log.info("Adding proxy for URL: {}. Proxy {}", jetty.getBaseUrl(), proxy.getUrl()); + } proxies.put(jetty, proxy); jettys.put(proxy.getUrl(), jetty); } @@ -291,12 +293,14 @@ private void assertDocsExistInAllReplicas(List notLeaders, } private void assertDocExists(HttpSolrClient solr, String coll, String docId) throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList rsp = realTimeGetDocId(solr, docId); String match = JSONTestUtil.matchObj("/id", rsp.get("doc"), docId); assertTrue("Doc with id=" + docId + " not found in " + solr.getBaseURL() + " due to: " + match + "; rsp="+rsp, match == null); } + @SuppressWarnings({"rawtypes"}) private NamedList realTimeGetDocId(HttpSolrClient solr, String docId) throws SolrServerException, IOException { QueryRequest qr = new QueryRequest(params("qt", "/get", "id", docId, "distrib", "false")); return solr.request(qr); diff --git a/solr/core/src/test/org/apache/solr/cloud/LegacyCloudClusterPropTest.java b/solr/core/src/test/org/apache/solr/cloud/LegacyCloudClusterPropTest.java deleted file mode 100644 index f697204c34d4..000000000000 --- a/solr/core/src/test/org/apache/solr/cloud/LegacyCloudClusterPropTest.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.solr.cloud; - -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStreamWriter; -import java.io.Writer; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Properties; - -import org.apache.solr.client.solrj.embedded.JettySolrRunner; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.common.cloud.ClusterProperties; -import org.apache.solr.common.cloud.ClusterStateUtil; -import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.core.CorePropertiesLocator; -import org.junit.After; -import org.junit.BeforeClass; -import org.junit.Test; - -public class LegacyCloudClusterPropTest extends SolrCloudTestCase { - - @BeforeClass - public static void setupCluster() throws Exception { - - // currently this test is fine with a single shard with a single replica and it's simpler. Could easily be - // extended to multiple shards/replicas, but there's no particular need. - configureCluster(1) - .addConfig("conf", configset("cloud-minimal")) - .configure(); - } - - @After - public void afterTest() throws Exception { - cluster.deleteAllCollections(); - } - - - // Are all these required? - private static String[] requiredProps = { - "numShards", - "collection.configName", - "name", - "replicaType", - "shard", - "collection", - "coreNodeName" - }; - - @Test - //2018-06-18 (commented) @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") - //Commented 14-Oct-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 17-Aug-2018 - // commented out on: 01-Apr-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 - public void testCreateCollectionSwitchLegacyCloud() throws Exception { - createAndTest("legacyTrue", true); - createAndTest("legacyFalse", false); - } - - private void createAndTest(final String coll, final boolean legacy) throws Exception { - - // First, just insure that core.properties file gets created with coreNodeName and all other mandatory parameters. - final String legacyString = Boolean.toString(legacy); - final String legacyAnti = Boolean.toString(!legacy); - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, legacyString).process(cluster.getSolrClient()); - ClusterProperties props = new ClusterProperties(zkClient()); - - assertEquals("Value of legacyCloud cluster prop unexpected", legacyString, - props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, legacyAnti)); - - CollectionAdminRequest.createCollection(coll, "conf", 1, 1) - .setMaxShardsPerNode(1) - .process(cluster.getSolrClient()); - - cluster.waitForActiveCollection(coll, 1, 1); - - assertTrue(ClusterStateUtil.waitForAllActiveAndLiveReplicas(cluster.getSolrClient().getZkStateReader(), 120000)); - - // Insure all mandatory properties are there. - checkMandatoryProps(coll); - - checkCollectionActive(coll); - // The fixes for SOLR-11503 insure that creating a collection has coreNodeName whether legacyCloud is true or false, - // we still need to test repairing a properties file that does _not_ have coreNodeName set, the second part of - // the fix. - - // First, remove the coreNodeName from cluster.properties and write it out it. - removePropertyFromAllReplicas(coll, "coreNodeName"); - - // Now restart Solr, this should repair the removal on core load no matter the value of legacyCloud - JettySolrRunner jetty = cluster.getJettySolrRunner(0); - jetty.stop(); - - cluster.waitForJettyToStop(jetty); - - jetty.start(); - - cluster.waitForAllNodes(30); - - checkMandatoryProps(coll); - checkCollectionActive(coll); - } - - private void checkCollectionActive(String coll) { - assertTrue(ClusterStateUtil.waitForAllActiveAndLiveReplicas(cluster.getSolrClient().getZkStateReader(), 120000)); - DocCollection docColl = getCollectionState(coll); - for (Replica rep : docColl.getReplicas()) { - if (rep.getState() == Replica.State.ACTIVE) return; - } - fail("Replica was not active for collection " + coll); - } - private void removePropertyFromAllReplicas(String coll, String propDel) throws IOException { - DocCollection docColl = getCollectionState(coll); - - // First remove the property from all core.properties files - for (Replica rep : docColl.getReplicas()) { - final String coreName = rep.getCoreName(); - Properties prop = loadPropFileForReplica(coreName); - prop.remove(propDel); - JettySolrRunner jetty = cluster.getJettySolrRunner(0); - Path expected = Paths.get(jetty.getSolrHome()).toAbsolutePath().resolve(coreName); - Path corePropFile = Paths.get(expected.toString(), CorePropertiesLocator.PROPERTIES_FILENAME); - - try (Writer os = new OutputStreamWriter(Files.newOutputStream(corePropFile), StandardCharsets.UTF_8)) { - prop.store(os, ""); - } - } - - // Now insure it's really gone - for (Replica rep : docColl.getReplicas()) { - Properties prop = loadPropFileForReplica(rep.getCoreName()); - assertEquals("Property " + propDel + " should have been deleted", - "bogus", prop.getProperty(propDel, "bogus")); - } - } - - private Properties loadPropFileForReplica(String coreName) throws IOException { - JettySolrRunner jetty = cluster.getJettySolrRunner(0); - Path expected = Paths.get(jetty.getSolrHome()).toAbsolutePath().resolve(coreName); - Path corePropFile = Paths.get(expected.toString(), CorePropertiesLocator.PROPERTIES_FILENAME); - Properties props = new Properties(); - try (InputStream fis = Files.newInputStream(corePropFile)) { - props.load(new InputStreamReader(fis, StandardCharsets.UTF_8)); - } - return props; - } - - private void checkMandatoryProps(String coll) throws IOException { - DocCollection docColl = getCollectionState(coll); - for (Replica rep : docColl.getReplicas()) { - Properties prop = loadPropFileForReplica(rep.getCoreName()); for (String testProp : requiredProps) { - String propVal = prop.getProperty(testProp, "bogus"); - if ("bogus".equals(propVal)) { - fail("Should have found property " + testProp + " in properties file"); - } - } - } - } -} diff --git a/solr/core/src/test/org/apache/solr/cloud/MetricsHistoryIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/MetricsHistoryIntegrationTest.java index 5332e7ad8311..a720677a5e42 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MetricsHistoryIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MetricsHistoryIntegrationTest.java @@ -100,6 +100,7 @@ public void testList() throws Exception { NamedList rsp = solrClient.request(createHistoryRequest(params(CommonParams.ACTION, "list"))); assertNotNull(rsp); // expected solr.jvm, solr.node and solr.collection..system + @SuppressWarnings({"unchecked"}) SimpleOrderedMap lst = (SimpleOrderedMap) rsp.get("metrics"); assertNotNull(lst); assertEquals(lst.toString(), 3, lst.size()); @@ -109,6 +110,7 @@ public void testList() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testStatus() throws Exception { NamedList rsp = solrClient.request(createHistoryRequest( params(CommonParams.ACTION, "status", CommonParams.NAME, "solr.jvm"))); @@ -129,6 +131,7 @@ public void testStatus() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testGet() throws Exception { NamedList rsp = solrClient.request(createHistoryRequest(params( CommonParams.ACTION, "get", CommonParams.NAME, "solr.jvm"))); @@ -192,6 +195,7 @@ public void testGet() throws Exception { }); } + @SuppressWarnings({"rawtypes"}) public static SolrRequest createHistoryRequest(SolrParams params) { return new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/metrics/history", params); } diff --git a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java index 85e13d15fd87..edd23b50e34a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java @@ -50,11 +50,6 @@ public static void setupCluster() throws Exception { configureCluster(2) .addConfig("conf", configset("cloud-minimal")) .configure(); - - if (usually()) { - CollectionAdminRequest.setClusterProperty("legacyCloud", "false").process(cluster.getSolrClient()); - log.info("Using legacyCloud=false for cluster"); - } } private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -153,14 +148,14 @@ public void multipleShardMigrateTest() throws Exception { cluster.getSolrClient().deleteById("a/" + BIT_SEP + "!104"); splitKeyCount[0]--; } catch (Exception e) { - log.warn("Error deleting document a/" + BIT_SEP + "!104", e); + log.warn("Error deleting document a/{}!104", BIT_SEP, e); } cluster.getSolrClient().commit(); collectionClient.commit(); solrQuery = new SolrQuery("*:*").setRows(1000); QueryResponse response = collectionClient.query(solrQuery); - log.info("Response from target collection: " + response); + log.info("Response from target collection: {}", response); assertEquals("DocCount on target collection does not match", splitKeyCount[0], response.getResults().getNumFound()); waitForState("Expected to find routing rule for split key " + splitKey, "sourceCollection", (n, c) -> { @@ -208,7 +203,7 @@ public void run() { if (splitKey.equals(shardKey)) splitKeyCount++; } catch (Exception e) { - log.error("Exception while adding document id: " + doc.getField("id"), e); + log.error("Exception while adding document id: {}", doc.getField("id"), e); } try { Thread.sleep(50); diff --git a/solr/core/src/test/org/apache/solr/cloud/MockSolrSource.java b/solr/core/src/test/org/apache/solr/cloud/MockSolrSource.java index 05d56f5ce962..72813960132a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MockSolrSource.java +++ b/solr/core/src/test/org/apache/solr/cloud/MockSolrSource.java @@ -43,6 +43,6 @@ public static ZkController makeSimpleMock(Overseer overseer, ZkStateReader reade when(zkControllerMock.getZkStateReader()).thenReturn(reader); when(zkControllerMock.getZkClient()).thenReturn(zkClient); when(zkControllerMock.getOverseer()).thenReturn(overseer); - return (ZkController) zkControllerMock; + return zkControllerMock; } } diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java index 025460c895b6..a17cd1a25a1b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java @@ -95,7 +95,9 @@ public void afterTest() throws Exception { // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 public void test() throws Exception { String coll = getTestClass().getSimpleName() + "_coll_" + inPlaceMove; - log.info("total_jettys: " + cluster.getJettySolrRunners().size()); + if (log.isInfoEnabled()) { + log.info("total_jettys: {}", cluster.getJettySolrRunners().size()); + } int REPLICATION = 2; CloudSolrClient cloudClient = cluster.getSolrClient(); @@ -154,7 +156,7 @@ public void test() throws Exception { boolean recovered = false; for (int i = 0; i < 300; i++) { DocCollection collState = getCollectionState(coll); - log.debug("###### " + collState); + log.debug("###### {}", collState); Collection replicas = collState.getSlice(shardId).getReplicas(); boolean allActive = true; boolean hasLeaders = true; @@ -164,7 +166,7 @@ public void test() throws Exception { continue; } if (!r.isActive(Collections.singleton(targetNode))) { - log.info("Not active: " + r); + log.info("Not active: {}", r); allActive = false; } } @@ -182,7 +184,7 @@ public void test() throws Exception { recovered = true; break; } else { - log.info("--- waiting, allActive=" + allActive + ", hasLeaders=" + hasLeaders); + log.info("--- waiting, allActive={}, hasLeaders={}", allActive, hasLeaders); Thread.sleep(1000); } } @@ -198,7 +200,7 @@ public void test() throws Exception { recovered = false; for (int i = 0; i < 300; i++) { DocCollection collState = getCollectionState(coll); - log.debug("###### " + collState); + log.debug("###### {}", collState); Collection replicas = collState.getSlice(shardId).getReplicas(); boolean allActive = true; boolean hasLeaders = true; @@ -208,7 +210,7 @@ public void test() throws Exception { continue; } if (!r.isActive(Collections.singleton(replica.getNodeName()))) { - log.info("Not active yet: " + r); + log.info("Not active yet: {}", r); allActive = false; } } @@ -301,7 +303,9 @@ public void testFailedMove() throws Exception { } assertFalse(success); - log.info("--- current collection state: " + cloudClient.getZkStateReader().getClusterState().getCollection(coll)); + if (log.isInfoEnabled()) { + log.info("--- current collection state: {}", cloudClient.getZkStateReader().getClusterState().getCollection(coll)); + } assertEquals(100, cluster.getSolrClient().query(coll, new SolrQuery("*:*")).getResults().getNumFound()); } diff --git a/solr/core/src/test/org/apache/solr/cloud/MultiThreadedOCPTest.java b/solr/core/src/test/org/apache/solr/cloud/MultiThreadedOCPTest.java index 19d6d9f4cc76..c8357afad01a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MultiThreadedOCPTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MultiThreadedOCPTest.java @@ -29,7 +29,6 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest.Create; import org.apache.solr.client.solrj.request.CollectionAdminRequest.SplitShard; import org.apache.solr.client.solrj.request.QueryRequest; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; import org.apache.solr.client.solrj.response.RequestStatusState; import org.apache.solr.common.params.CollectionParams; import org.apache.solr.common.params.ModifiableSolrParams; @@ -77,17 +76,24 @@ private void testFillWorkQueue() throws Exception { distributedQueue.offer(Utils.toJSON(Utils.makeMap( "collection", "A_COLL", QUEUE_OPERATION, MOCK_COLL_TASK.toLower(), - ASYNC, String.valueOf(i), + ASYNC, Integer.toString(i), - "sleep", (i == 0 ? "1000" : "1") //first task waits for 1 second, and thus blocking - // all other tasks. Subsequent tasks only wait for 1ms + // third task waits for a long time, and thus blocks the queue for all other tasks for A_COLL. + // Subsequent tasks as well as the first two only wait for 1ms + "sleep", (i == 2 ? "10000" : "1") ))); log.info("MOCK task added {}", i); - } - Thread.sleep(100);//wait and post the next message - //this is not going to be blocked because it operates on another collection + // Wait until we see the second A_COLL task getting processed (assuming the first got processed as well) + Long task1CollA = waitForTaskToCompleted(client, 1); + + assertNotNull("Queue did not process first two tasks on A_COLL, can't run test", task1CollA); + + // Make sure the long running task did not finish, otherwise no way the B_COLL task can be tested to run in parallel with it + assertNull("Long running task finished too early, can't test", checkTaskHasCompleted(client, 2)); + + // Enqueue a task on another collection not competing with the lock on A_COLL and see that it can be executed right away distributedQueue.offer(Utils.toJSON(Utils.makeMap( "collection", "B_COLL", QUEUE_OPERATION, MOCK_COLL_TASK.toLower(), @@ -95,24 +101,43 @@ private void testFillWorkQueue() throws Exception { "sleep", "1" ))); + // We now check that either the B_COLL task has completed before the third (long running) task on A_COLL, + // Or if both have completed (if this check got significantly delayed for some reason), we verify B_COLL was first. + Long taskCollB = waitForTaskToCompleted(client, 200); - Long acoll = null, bcoll = null; - for (int i = 0; i < 500; i++) { - if (bcoll == null) { - CollectionAdminResponse statusResponse = getStatusResponse("200", client); - bcoll = (Long) statusResponse.getResponse().get("MOCK_FINISHED"); - } - if (acoll == null) { - CollectionAdminResponse statusResponse = getStatusResponse("2", client); - acoll = (Long) statusResponse.getResponse().get("MOCK_FINISHED"); - } - if (acoll != null && bcoll != null) break; - Thread.sleep(100); + // We do not wait for the long running task to finish, that would be a waste of time. + Long task2CollA = checkTaskHasCompleted(client, 2); + + // Given the wait delay (500 iterations of 100ms), the task has plenty of time to complete, so this is not expected. + assertNotNull("Task on B_COLL did not complete, can't test", taskCollB); + // We didn't wait for the 3rd A_COLL task to complete (test can run quickly) but if it did, we expect the B_COLL to have finished first. + assertTrue("task2CollA: " + task2CollA + " taskCollB: " + taskCollB, task2CollA == null || task2CollA > taskCollB); + } + } + + /** + * Verifies the status of an async task submitted to the Overseer Collection queue. + * @return null if the task has not completed, the completion timestamp if the task has completed + * (see mockOperation() in {@link org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler}). + */ + private Long checkTaskHasCompleted(SolrClient client, int requestId) throws IOException, SolrServerException { + return (Long) getStatusResponse(Integer.toString(requestId), client).getResponse().get("MOCK_FINISHED"); + } + + /** + * Waits until the specified async task has completed or time ran out. + * @return null if the task has not completed, the completion timestamp if the task has completed + */ + private Long waitForTaskToCompleted(SolrClient client, int requestId) throws Exception { + for (int i = 0; i < 500; i++) { + Long task = checkTaskHasCompleted(client, requestId); + if (task != null) { + return task; } - assertTrue(acoll != null && bcoll != null); - assertTrue("acoll: " + acoll + " bcoll: " + bcoll, acoll > bcoll); + Thread.sleep(100); } + return null; } private void testParallelCollectionAPICalls() throws IOException, SolrServerException { @@ -265,6 +290,7 @@ public void run() { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString()); params.set("collection", "collection1"); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); diff --git a/solr/core/src/test/org/apache/solr/cloud/NestedShardedAtomicUpdateTest.java b/solr/core/src/test/org/apache/solr/cloud/NestedShardedAtomicUpdateTest.java index b640fe80cd7c..43fc6fdd1ed8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/NestedShardedAtomicUpdateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/NestedShardedAtomicUpdateTest.java @@ -108,10 +108,12 @@ public void doRootShardRoutingTest() throws Exception { QueryResponse rsp = getRandomSolrClient().query(params("qt","/get", "id","1", "fl", "*, [child]")); SolrDocument val = (SolrDocument) rsp.getResponse().get("doc"); assertEquals("1", val.getFieldValue("id")); + @SuppressWarnings({"unchecked"}) List children = (List) val.getFieldValues("children"); assertEquals(1, children.size()); SolrDocument childDoc = children.get(0); assertEquals("2", childDoc.getFieldValue("id")); + @SuppressWarnings({"unchecked"}) List grandChildren = (List) childDoc.getFieldValues("grandChildren"); assertEquals(idIndex + 1, grandChildren.size()); SolrDocument grandChild = grandChildren.get(0); @@ -164,10 +166,12 @@ public void doNestedInplaceUpdateTest() throws Exception { QueryResponse rsp = getRandomSolrClient().query(params("qt","/get", "id","1", "fl", "*, [child]")); SolrDocument val = (SolrDocument) rsp.getResponse().get("doc"); assertEquals("1", val.getFieldValue("id")); + @SuppressWarnings({"unchecked"}) List children = (List) val.getFieldValues("children"); assertEquals(1, children.size()); SolrDocument childDoc = children.get(0); assertEquals("2", childDoc.getFieldValue("id")); + @SuppressWarnings({"unchecked"}) List grandChildren = (List) childDoc.getFieldValues("grandChildren"); assertEquals(1, grandChildren.size()); SolrDocument grandChild = grandChildren.get(0); diff --git a/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java b/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java index 873480fbab73..6a5187cdcc40 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java @@ -58,11 +58,13 @@ public static void afterClass() throws InterruptedException { @Override public void setUp() throws Exception { super.setUp(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } createTempDir(); zkDir = createTempDir().resolve("zookeeper/server1/data"); - log.info("ZooKeeper dataDir:" + zkDir); + log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new ZkTestServer(zkDir); zkServer.run(); @@ -80,7 +82,9 @@ public void setUp() throws Exception { zkClient.create(SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH, "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); zkClient.close(); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } @Override @@ -123,7 +127,7 @@ public void testOpenACLUnsafeAllover() throws Exception { protected void assertOpenACLUnsafeAllover(SolrZkClient zkClient, String path, List verifiedList) throws Exception { List acls = zkClient.getSolrZooKeeper().getACL(path, new Stat()); if (log.isInfoEnabled()) { - log.info("Verifying " + path); + log.info("Verifying {}", path); } if (ZooDefs.CONFIG_NODE.equals(path)) { // Treat this node specially, from the ZK docs: diff --git a/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java b/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java index 4c299f4d45c3..5f2112bac800 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java @@ -63,11 +63,13 @@ public static void afterClass() throws InterruptedException { @Override public void setUp() throws Exception { super.setUp(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } createTempDir(); zkDir =createTempDir().resolve("zookeeper/server1/data"); - log.info("ZooKeeper dataDir:" + zkDir); + log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new ZkTestServer(zkDir); zkServer.run(false); @@ -92,7 +94,9 @@ public void setUp() throws Exception { zkClient.makePath("/unprotectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); zkClient.close(); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } @Override diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java index 66600153e438..a5f9e796d3ac 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java @@ -269,6 +269,7 @@ protected Set commonMocks(int liveNodesCount) throws Exception { when(workQueueMock.getTailId()).thenAnswer(invocation -> { Object result = null; + @SuppressWarnings({"rawtypes"}) Iterator iter = queue.iterator(); while(iter.hasNext()) { result = iter.next(); @@ -328,8 +329,6 @@ protected Set commonMocks(int liveNodesCount) throws Exception { when(zkStateReaderMock.getBaseUrlForNodeName(address)).thenAnswer(invocation -> address.replaceAll("_", "/")); } - when(zkStateReaderMock.getClusterProperty("legacyCloud", "false")).thenReturn("false"); - when(solrZkClientMock.getZkClientTimeout()).thenReturn(30000); when(clusterStateMock.hasCollection(anyString())).thenAnswer(invocation -> { @@ -745,7 +744,9 @@ protected void testTemplate(Integer numberOfNodes, Integer numberOfNodesToCreate overseerMock, completedMapMock, failureMapMock); - log.info("clusterstate " + clusterStateMock.hashCode()); + if (log.isInfoEnabled()) { + log.info("clusterstate {}", clusterStateMock.hashCode()); + } startComponentUnderTest(); diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java index c9a90a56b60a..ed113cd4cdd8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java @@ -73,6 +73,7 @@ public void testModifyColl() throws Exception { private String getConfigNameFromZk(String collName) throws KeeperException, InterruptedException { byte[] b = zkClient().getData(ZkStateReader.getCollectionPathRoot(collName), null, null, false); + @SuppressWarnings({"rawtypes"}) Map confData = (Map) Utils.fromJSON(b); return (String) confData.get(ZkController.CONFIGNAME_PROP); } diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java index bcfaeda852f3..99c406405051 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java @@ -76,7 +76,7 @@ else if (failOnIntermediateTransition) { } private void waitForNewOverseer(int seconds, String expected, boolean failOnIntermediateTransition) throws Exception { - log.info("Expecting node: "+expected); + log.info("Expecting node: {}", expected); waitForNewOverseer(seconds, s -> Objects.equals(s, expected), failOnIntermediateTransition); } @@ -97,8 +97,10 @@ private JettySolrRunner getOverseerJetty() throws Exception { } private void logOverseerState() throws KeeperException, InterruptedException { - log.info("Overseer: {}", getLeaderNode(zkClient())); - log.info("Election queue: {}", getSortedElectionNodes(zkClient(), "/overseer_elect/election")); + if (log.isInfoEnabled()) { + log.info("Overseer: {}", getLeaderNode(zkClient())); + log.info("Election queue: {}", getSortedElectionNodes(zkClient(), "/overseer_elect/election")); // logOk + } } @Test @@ -195,7 +197,7 @@ public void testDesignatedOverseerRestarts() throws Exception { logOverseerState(); // kill the current overseer, and check that the next node in the election queue assumes leadership leaderJetty.stop(); - log.info("Killing designated overseer: "+overseer1); + log.info("Killing designated overseer: {}", overseer1); // after 5 seconds, bring back dead designated overseer and assert that it assumes leadership "right away", // i.e. without any other node assuming leadership before this node becomes leader. diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java index 0d62d9e3729d..729303591e18 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java @@ -34,6 +34,7 @@ public static void setupCluster() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void test() throws Exception { // find existing command counts because collection may be created by base test class too diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java index 331bf41b4260..3472f589c98f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java @@ -78,6 +78,7 @@ public void testContainsTaskWithRequestId() throws Exception { List queueEvents = tq.peekTopN(2, s -> false, 1000); OverseerTaskQueue.QueueEvent requestId2Event = null; for (OverseerTaskQueue.QueueEvent queueEvent : queueEvents) { + @SuppressWarnings({"unchecked"}) Map eventProps = (Map) Utils.fromJSON(queueEvent.getBytes()); if (requestId2.equals(eventProps.get(CommonAdminParams.ASYNC))) { requestId2Event = queueEvent; diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java index 226b5dbac64e..8be7a2c2332a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java @@ -88,7 +88,6 @@ import org.apache.zookeeper.KeeperException.SessionExpiredException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher.Event; -import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.proto.WatcherEvent; import org.junit.After; import org.junit.AfterClass; @@ -181,16 +180,21 @@ public void close() { zkStateReader.close(); } + /** + * Create a collection. + * Note there's a similar but slightly different {@link OverseerTest#createCollection(String, int)}. + */ public void createCollection(String collection, int numShards) throws Exception { + // Create collection znode before having ClusterStateUpdater create state.json below it or it will fail. + zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection, true); ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), "name", collection, ZkStateReader.REPLICATION_FACTOR, "1", - ZkStateReader.NUM_SHARDS_PROP, numShards+"", + ZkStateReader.NUM_SHARDS_PROP, Integer.toString(numShards), "createNodeSet", ""); ZkDistributedQueue q = MiniSolrCloudCluster.getOpenOverseer(overseers).getStateUpdateQueue(); q.offer(Utils.toJSON(m)); - } public String publishState(String collection, String coreName, String coreNodeName, String shard, Replica.State stateName, int numShards, boolean startElection, Overseer overseer) @@ -375,6 +379,23 @@ public void tearDown() throws Exception { super.tearDown(); } + /** + * This method creates a collection. It is different from {@link MockZKController#createCollection(String, int)} in + * the way the {@link ZkDistributedQueue} is obtained. + */ + private void createCollection(String collection, int numShards) throws Exception { + // Create collection znode before having ClusterStateUpdater create state.json below it or it will fail. + zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection, true); + + ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), + "name", collection, + ZkStateReader.REPLICATION_FACTOR, "1", + ZkStateReader.NUM_SHARDS_PROP, Integer.toString(numShards), + "createNodeSet", ""); + ZkDistributedQueue q = overseers.get(0).getStateUpdateQueue(); + q.offer(Utils.toJSON(m)); + } + @Test public void testShardAssignment() throws Exception { @@ -382,8 +403,6 @@ public void testShardAssignment() throws Exception { SolrZkClient overseerClient = null; try { - - ZkController.createClusterZkNodes(zkClient); overseerClient = electNewOverseer(server.getZkAddress()); @@ -393,15 +412,9 @@ public void testShardAssignment() throws Exception { mockController = new MockZKController(server.getZkAddress(), "127.0.0.1", overseers); - final int numShards = 6; + final int numShards = 6; // this is not the number of shards in the collection - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", COLLECTION, - ZkStateReader.REPLICATION_FACTOR, "1", - ZkStateReader.NUM_SHARDS_PROP, "3", - "createNodeSet", ""); - ZkDistributedQueue q = overseers.get(0).getStateUpdateQueue(); - q.offer(Utils.toJSON(m)); + createCollection(COLLECTION, 3); for (int i = 0; i < numShards; i++) { assertNotNull("shard got no id?", mockController.publishState(COLLECTION, "core" + (i + 1), "node" + (i + 1), "shard" + ((i % 3) + 1), Replica.State.ACTIVE, 3, true, overseers.get(0))); @@ -494,6 +507,7 @@ public void testBadQueueItem() throws Exception { } @Test + @SuppressWarnings({"try"}) public void testDownNodeFailover() throws Exception { MockZKController mockController = null; SolrZkClient overseerClient = null; @@ -558,7 +572,8 @@ private void waitForCollections(ZkStateReader stateReader, String... collections } } - log.warn("Timeout waiting for collections: " + Arrays.asList(collections) + " state:" + stateReader.getClusterState()); + log.warn("Timeout waiting for collections: {} state: {}" + , Arrays.asList(collections), stateReader.getClusterState()); } @Test @@ -578,14 +593,9 @@ public void testStateChange() throws Exception { ZkDistributedQueue q = overseers.get(0).getStateUpdateQueue(); - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", COLLECTION, - ZkStateReader.REPLICATION_FACTOR, "1", - ZkStateReader.NUM_SHARDS_PROP, "1", - "createNodeSet", ""); - q.offer(Utils.toJSON(m)); + createCollection(COLLECTION, 1); - m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), + ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), ZkStateReader.BASE_URL_PROP, "http://127.0.0.1/solr", ZkStateReader.NODE_NAME_PROP, "node1", ZkStateReader.COLLECTION_PROP, COLLECTION, @@ -825,31 +835,19 @@ public void testExceptionWhenFlushClusterState() throws Exception { reader = new ZkStateReader(zkClient); reader.createClusterStateWatchersAndUpdate(); - // We did not create /collections -> this message will cause exception when Overseer try to flush the clusterstate + // We did not create /collections/collection1 -> this message will cause exception when Overseer tries to flush + // the collection state ZkNodeProps badMessage = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), "name", "collection1", ZkStateReader.REPLICATION_FACTOR, "1", ZkStateReader.NUM_SHARDS_PROP, "1", - DocCollection.STATE_FORMAT, "2", - "createNodeSet", ""); - ZkNodeProps goodMessage = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", "collection2", - ZkStateReader.REPLICATION_FACTOR, "1", - ZkStateReader.NUM_SHARDS_PROP, "1", - DocCollection.STATE_FORMAT, "1", "createNodeSet", ""); ZkDistributedQueue workQueue = Overseer.getInternalWorkQueue(zkClient, new Stats()); workQueue.offer(Utils.toJSON(badMessage)); - workQueue.offer(Utils.toJSON(goodMessage)); overseerClient = electNewOverseer(server.getZkAddress()); - waitForCollections(reader, "collection2"); ZkDistributedQueue q = getOpenOverseer().getStateUpdateQueue(); q.offer(Utils.toJSON(badMessage)); - q.offer(Utils.toJSON(goodMessage.plus("name", "collection3"))); - waitForCollections(reader, "collection2", "collection3"); - assertNotNull(reader.getClusterState().getCollectionOrNull("collection2")); - assertNotNull(reader.getClusterState().getCollectionOrNull("collection3")); TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); while(!timeOut.hasTimedOut()) { @@ -906,6 +904,9 @@ public void testShardLeaderChange() throws Exception { electNewOverseer(server.getZkAddress()); + // Create collection znode before repeatedly trying to enqueue the Cluster state change message + zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + COLLECTION, true); + for (int i = 0; i < atLeast(4); i++) { killCounter.incrementAndGet(); // for each round allow 1 kill @@ -914,7 +915,14 @@ public void testShardLeaderChange() throws Exception { TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (!timeout.hasTimedOut()) { try { - mockController.createCollection(COLLECTION, 1); + // We must only retry the enqueue to Overseer, not the collection znode creation (that doesn't depend on Overseer) + ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), + "name", COLLECTION, + ZkStateReader.REPLICATION_FACTOR, "1", + ZkStateReader.NUM_SHARDS_PROP, "1", + "createNodeSet", ""); + ZkDistributedQueue q = MiniSolrCloudCluster.getOpenOverseer(overseers).getStateUpdateQueue(); + q.offer(Utils.toJSON(m)); break; } catch (SolrException | KeeperException | AlreadyClosedException e) { e.printStackTrace(); @@ -1087,25 +1095,25 @@ public void testPerformance() throws Exception { try { ZkController.createClusterZkNodes(zkClient); + overseerClient = electNewOverseer(server.getZkAddress()); reader = new ZkStateReader(zkClient); reader.createClusterStateWatchersAndUpdate(); mockController = new MockZKController(server.getZkAddress(), "node1", overseers); - final int MAX_COLLECTIONS = 10, MAX_CORES = 10, MAX_STATE_CHANGES = 20000, STATE_FORMAT = 2; + final int MAX_COLLECTIONS = 10, MAX_CORES = 10, MAX_STATE_CHANGES = 20000; for (int i=0; i= MAX_COLLECTIONS - 1) j = 0; @@ -1125,30 +1133,13 @@ public void testPerformance() throws Exception { if (i > 0 && i % 100 == 0) log.info("Published {} items", i); } - // let's publish a sentinel collection which we'll use to wait for overseer to complete operations - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString(), - ZkStateReader.NODE_NAME_PROP, "node1", - ZkStateReader.CORE_NAME_PROP, "core1", - ZkStateReader.CORE_NODE_NAME_PROP, "node1", - ZkStateReader.COLLECTION_PROP, "perf_sentinel", - ZkStateReader.NUM_SHARDS_PROP, "1", - ZkStateReader.BASE_URL_PROP, "http://" + "node1" - + "/solr/"); - ZkDistributedQueue q = overseers.get(0).getStateUpdateQueue(); - q.offer(Utils.toJSON(m)); + // let's create a sentinel collection which we'll use to wait for overseer to complete operations + createCollection("perf_sentinel", 1); Timer t = new Timer(); Timer.Context context = t.time(); - try { - overseerClient = electNewOverseer(server.getZkAddress()); - assertTrue(overseers.size() > 0); - - reader.waitForState("perf_sentinel", 15000, TimeUnit.MILLISECONDS, (liveNodes, collectionState) -> collectionState != null); - - } finally { - context.stop(); - } + reader.waitForState("perf_sentinel", 15000, TimeUnit.MILLISECONDS, (liveNodes, collectionState) -> collectionState != null); + context.stop(); log.info("Overseer loop finished processing: "); printTimingStats(t); @@ -1163,7 +1154,9 @@ public void testPerformance() throws Exception { if (Arrays.binarySearch(interestingOps, op) < 0) continue; Stats.Stat stat = entry.getValue(); - log.info("op: {}, success: {}, failure: {}", op, stat.success.get(), stat.errors.get()); + if (log.isInfoEnabled()) { + log.info("op: {}, success: {}, failure: {}", op, stat.success.get(), stat.errors.get()); + } Timer timer = stat.requestTime; printTimingStats(timer); } @@ -1177,15 +1170,17 @@ public void testPerformance() throws Exception { private void printTimingStats(Timer timer) { Snapshot snapshot = timer.getSnapshot(); - log.info("\t avgRequestsPerSecond: {}", timer.getMeanRate()); - log.info("\t 5minRateRequestsPerSecond: {}", timer.getFiveMinuteRate()); - log.info("\t 15minRateRequestsPerSecond: {}", timer.getFifteenMinuteRate()); - log.info("\t avgTimePerRequest: {}", nsToMs(snapshot.getMean())); - log.info("\t medianRequestTime: {}", nsToMs(snapshot.getMedian())); - log.info("\t 75thPcRequestTime: {}", nsToMs(snapshot.get75thPercentile())); - log.info("\t 95thPcRequestTime: {}", nsToMs(snapshot.get95thPercentile())); - log.info("\t 99thPcRequestTime: {}", nsToMs(snapshot.get99thPercentile())); - log.info("\t 999thPcRequestTime: {}", nsToMs(snapshot.get999thPercentile())); + if (log.isInfoEnabled()) { + log.info("\t avgRequestsPerSecond: {}", timer.getMeanRate()); + log.info("\t 5minRateRequestsPerSecond: {}", timer.getFiveMinuteRate()); // logOk + log.info("\t 15minRateRequestsPerSecond: {}", timer.getFifteenMinuteRate()); // logOk + log.info("\t avgTimePerRequest: {}", nsToMs(snapshot.getMean())); // logOk + log.info("\t medianRequestTime: {}", nsToMs(snapshot.getMedian())); // logOk + log.info("\t 75thPcRequestTime: {}", nsToMs(snapshot.get75thPercentile())); // logOk + log.info("\t 95thPcRequestTime: {}", nsToMs(snapshot.get95thPercentile())); // logOk + log.info("\t 99thPcRequestTime: {}", nsToMs(snapshot.get99thPercentile())); // logOk + log.info("\t 999thPcRequestTime: {}", nsToMs(snapshot.get999thPercentile())); // logOk + } } private static long nsToMs(double ns) { @@ -1214,6 +1209,8 @@ public void testReplay() throws Exception{ //prepopulate work queue with some items to emulate previous overseer died before persisting state DistributedQueue queue = Overseer.getInternalWorkQueue(zkClient, new Stats()); + zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + COLLECTION, true); + ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), "name", COLLECTION, ZkStateReader.REPLICATION_FACTOR, "1", @@ -1275,8 +1272,6 @@ public void testExternalClusterStateChangeBehavior() throws Exception { ZkController.createClusterZkNodes(zkClient); - zkClient.create("/collections/test", null, CreateMode.PERSISTENT, true); - reader = new ZkStateReader(zkClient); reader.createClusterStateWatchersAndUpdate(); @@ -1284,15 +1279,9 @@ public void testExternalClusterStateChangeBehavior() throws Exception { ZkDistributedQueue q = overseers.get(0).getStateUpdateQueue(); + createCollection("c1", 1); - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", "c1", - ZkStateReader.REPLICATION_FACTOR, "1", - ZkStateReader.NUM_SHARDS_PROP, "1", - "createNodeSet", ""); - q.offer(Utils.toJSON(m)); - - m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), + ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), ZkStateReader.BASE_URL_PROP, "http://127.0.0.1/solr", ZkStateReader.SHARD_ID_PROP, "shard1", ZkStateReader.NODE_NAME_PROP, "node1", @@ -1330,28 +1319,32 @@ public void testExternalClusterStateChangeBehavior() throws Exception { q.offer(Utils.toJSON(m)); - Stat stat = new Stat(); - byte[] data = zkClient.getData("/clusterstate.json", null, stat, true); - // Simulate an external modification - zkClient.setData("/clusterstate.json", data, true); - + final String testCollectionName = "test"; + zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + testCollectionName, true); m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", "test", + "name", testCollectionName, ZkStateReader.NUM_SHARDS_PROP, "1", - ZkStateReader.REPLICATION_FACTOR, "1", - DocCollection.STATE_FORMAT, "2" + ZkStateReader.REPLICATION_FACTOR, "1" ); q.offer(Utils.toJSON(m)); + // Wait for the overseer to create state.json for the collection + waitForCollections(reader, testCollectionName); + + final String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + testCollectionName + "/state.json"; + byte[] data = zkClient.getData(path, null, null, true); + // Simulate an external modification of state.json + zkClient.setData(path, data, true); + m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATESHARD.toLower(), - "collection", "test", + "collection", testCollectionName, ZkStateReader.SHARD_ID_PROP, "x", ZkStateReader.REPLICATION_FACTOR, "1" ); q.offer(Utils.toJSON(m)); m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.ADDREPLICA.toLower(), - "collection", "test", + "collection", testCollectionName, ZkStateReader.SHARD_ID_PROP, "x", ZkStateReader.BASE_URL_PROP, "http://127.0.0.1/solr", ZkStateReader.CORE_NODE_NAME_PROP, "core_node1", @@ -1361,8 +1354,9 @@ public void testExternalClusterStateChangeBehavior() throws Exception { ); q.offer(Utils.toJSON(m)); - waitForCollections(reader, "test"); - verifyReplicaStatus(reader, "test", "x", "core_node1", Replica.State.DOWN); + // Verify replica creation worked ok in spite of external update of state.json (although in theory such updates + // do not happen unless an old overseer is still updating ZK after a new Overseer got elected...). + verifyReplicaStatus(reader, testCollectionName, "x", "core_node1", Replica.State.DOWN); waitForCollections(reader, "c1"); verifyReplicaStatus(reader, "c1", "shard1", "core_node1", Replica.State.ACTIVE); @@ -1478,6 +1472,7 @@ public void testRemovalOfLastReplica() throws Exception { // create collection { final Integer maxShardsPerNode = numReplicas * numShards; + zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + COLLECTION, true); ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), "name", COLLECTION, ZkStateReader.NUM_SHARDS_PROP, numShards.toString(), diff --git a/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java index a2a6de88f614..8bd0c8757486 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java @@ -132,6 +132,7 @@ public void testBasicReindexing() throws Exception { .setTarget(targetCollection); CollectionAdminResponse rsp = req.process(solrClient); assertNotNull(rsp.toString(), rsp.getResponse().get(ReindexCollectionCmd.REINDEX_STATUS)); + @SuppressWarnings({"unchecked"}) Map status = (Map)rsp.getResponse().get(ReindexCollectionCmd.REINDEX_STATUS); assertEquals(status.toString(), (long)NUM_DOCS, ((Number)status.get("inputDocs")).longValue()); assertEquals(status.toString(), (long)NUM_DOCS, ((Number)status.get("processedDocs")).longValue()); @@ -334,6 +335,7 @@ public void testFailure() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testAbort() throws Exception { final String sourceCollection = "abortReindexing"; final String targetCollection = "abortReindexingTarget"; diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeNoTargetTest.java b/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeNoTargetTest.java index 6778a1558459..cd5d4be5c6a5 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeNoTargetTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeNoTargetTest.java @@ -60,7 +60,9 @@ protected String getSolrXml() { @LuceneTestCase.AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-11067") public void test() throws Exception { String coll = "replacenodetest_coll_notarget"; - log.info("total_jettys: " + cluster.getJettySolrRunners().size()); + if (log.isInfoEnabled()) { + log.info("total_jettys: {}", cluster.getJettySolrRunners().size()); + } CloudSolrClient cloudClient = cluster.getSolrClient(); Set liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes(); @@ -71,6 +73,7 @@ public void test() throws Exception { String setClusterPolicyCommand = "{" + " 'set-cluster-policy': [" + " {'replica':'<5', 'shard': '#EACH', 'node': '#ANY'}]}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); solrClient.request(req); @@ -79,11 +82,13 @@ public void test() throws Exception { cloudClient.request(create); cluster.waitForActiveCollection(coll, 5, 10); - log.info("Current core status list for node we plan to decommision: {} => {}", - node2bdecommissioned, - getCoreStatusForNamedNode(cloudClient, node2bdecommissioned).getCoreStatus()); - - log.info("Decommisioning node: " + node2bdecommissioned); + if (log.isInfoEnabled()) { + log.info("Current core status list for node we plan to decommision: {} => {}", + node2bdecommissioned, + getCoreStatusForNamedNode(cloudClient, node2bdecommissioned).getCoreStatus()); + log.info("Decommisioning node: {}", node2bdecommissioned); + } + createReplaceNodeRequest(node2bdecommissioned, null, null).processAsync("001", cloudClient); CollectionAdminRequest.RequestStatus requestStatus = CollectionAdminRequest.requestStatus("001"); boolean success = false; diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java b/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java index 0412330b1cdb..b60c8508c2eb 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java @@ -60,7 +60,9 @@ protected String getSolrXml() { @Test public void test() throws Exception { String coll = "replacenodetest_coll"; - log.info("total_jettys: " + cluster.getJettySolrRunners().size()); + if (log.isInfoEnabled()) { + log.info("total_jettys: {}", cluster.getJettySolrRunners().size()); + } CloudSolrClient cloudClient = cluster.getSolrClient(); Set liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes(); @@ -89,7 +91,7 @@ public void test() throws Exception { cluster.waitForActiveCollection(coll, 5, 5 * (create.getNumNrtReplicas() + create.getNumPullReplicas() + create.getNumTlogReplicas())); DocCollection collection = cloudClient.getZkStateReader().getClusterState().getCollection(coll); - log.debug("### Before decommission: " + collection); + log.debug("### Before decommission: {}", collection); log.info("excluded_node : {} ", emptyNode); createReplaceNodeRequest(node2bdecommissioned, emptyNode, null).processAsync("000", cloudClient); CollectionAdminRequest.RequestStatus requestStatus = CollectionAdminRequest.requestStatus("000"); @@ -111,13 +113,13 @@ public void test() throws Exception { Thread.sleep(5000); collection = cloudClient.getZkStateReader().getClusterState().getCollection(coll); - log.debug("### After decommission: " + collection); + log.debug("### After decommission: {}", collection); // check what are replica states on the decommissioned node List replicas = collection.getReplicas(node2bdecommissioned); if (replicas == null) { replicas = Collections.emptyList(); } - log.debug("### Existing replicas on decommissioned node: " + replicas); + log.debug("### Existing replicas on decommissioned node: {}", replicas); //let's do it back - this time wait for recoveries CollectionAdminRequest.AsyncCollectionAdminRequest replaceNodeRequest = createReplaceNodeRequest(emptyNode, node2bdecommissioned, Boolean.TRUE); diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java b/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java index 3fc3580d2eab..b4e7e286b838 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java @@ -94,8 +94,10 @@ public void test() throws Exception { testRf2NotUsingDirectUpdates(); waitForThingsToLevelOut(30, TimeUnit.SECONDS); - log.info("replication factor testing complete! final clusterState is: "+ - cloudClient.getZkStateReader().getClusterState()); + if (log.isInfoEnabled()) { + log.info("replication factor testing complete! final clusterState is: {}", + cloudClient.getZkStateReader().getClusterState()); + } } protected void testRf2NotUsingDirectUpdates() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java index 3a0d7311dc92..a006d94335a7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java @@ -83,7 +83,7 @@ public void restartWithRolesTest() throws Exception { boolean sawLiveDesignate = false; int numRestarts = 1 + random().nextInt(TEST_NIGHTLY ? 12 : 2); for (int i = 0; i < numRestarts; i++) { - log.info("Rolling restart #{}", i + 1); + log.info("Rolling restart #{}", i + 1); // logOk for (CloudJettyRunner cloudJetty : designateJettys) { log.info("Restarting {}", cloudJetty); chaosMonkey.stopJetty(cloudJetty); @@ -95,8 +95,8 @@ public void restartWithRolesTest() throws Exception { if (!success) { leader = OverseerCollectionConfigSetProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient()); if (leader == null) - log.error("NOOVERSEER election queue is :" + - OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(), + log.error("NOOVERSEER election queue is : {}" + , OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(), "/overseer_elect/election")); fail("No overseer designate as leader found after restart #" + (i + 1) + ": " + leader); } @@ -106,8 +106,8 @@ public void restartWithRolesTest() throws Exception { if (!success) { leader = OverseerCollectionConfigSetProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient()); if (leader == null) - log.error("NOOVERSEER election queue is :" + - OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(), + log.error("NOOVERSEER election queue is :{}" + , OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(), "/overseer_elect/election")); fail("No overseer leader found after restart #" + (i + 1) + ": " + leader); } diff --git a/solr/core/src/test/org/apache/solr/cloud/RoutingToNodesWithPropertiesTest.java b/solr/core/src/test/org/apache/solr/cloud/RoutingToNodesWithPropertiesTest.java index 9133875b3a59..4f9cbfc09ba1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/RoutingToNodesWithPropertiesTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/RoutingToNodesWithPropertiesTest.java @@ -83,6 +83,7 @@ public void setupCluster() throws Exception { " 'shard':'#EACH'," + " 'sysprop.zone':'#EACH'}]}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = CloudTestUtils.AutoScalingRequest.create(SolrRequest.METHOD.POST, commands); cluster.getSolrClient().request(req); diff --git a/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java b/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java index c43bf6ce6ace..c2566a77357e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java @@ -123,6 +123,7 @@ private void setUrlScheme(String value) throws Exception { .toLowerCase(Locale.ROOT), "name", "urlScheme", "val", value); @SuppressWarnings("unchecked") SolrParams params = new MapSolrParams(m); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); diff --git a/solr/core/src/test/org/apache/solr/cloud/SaslZkACLProviderTest.java b/solr/core/src/test/org/apache/solr/cloud/SaslZkACLProviderTest.java index 4e3d62e30e22..aaeb9a9d5bc3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SaslZkACLProviderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SaslZkACLProviderTest.java @@ -69,11 +69,13 @@ public static void afterClass() { @Override public void setUp() throws Exception { super.setUp(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } createTempDir(); Path zkDir = createTempDir().resolve("zookeeper/server1/data"); - log.info("ZooKeeper dataDir:" + zkDir); + log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new SaslZkTestServer(zkDir, createTempDir().resolve("miniKdc")); zkServer.run(); @@ -84,7 +86,9 @@ public void setUp() throws Exception { } setupZNodes(); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } protected void setupZNodes() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java b/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java index 9a97264e7330..2c239a12b2b3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java @@ -56,8 +56,6 @@ public void test() throws Exception { private void doCustomSharding() throws Exception { printLayout(); - - File jettyDir = createTempDir("jetty").toFile(); jettyDir.mkdirs(); setupJettySolrHome(jettyDir); @@ -65,7 +63,6 @@ private void doCustomSharding() throws Exception { j.start(); assertEquals(0, CollectionAdminRequest .createCollection(DEFAULT_COLLECTION, "conf1", 1, 1) - .setStateFormat(Integer.parseInt(getStateFormat())) .setCreateNodeSet("") .process(cloudClient).getStatus()); assertTrue(CollectionAdminRequest diff --git a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java index 29ba03644d5d..2f2217b5f1db 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java @@ -116,11 +116,6 @@ public static void hdfsFailoverAfterClass() throws Exception { public void setUp() throws Exception { super.setUp(); collectionUlogDirMap.clear(); - if (random().nextBoolean()) { - CollectionAdminRequest.setClusterProperty("legacyCloud", "false").process(cloudClient); - } else { - CollectionAdminRequest.setClusterProperty("legacyCloud", "true").process(cloudClient); - } } @Override diff --git a/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java b/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java index cab5420d4c0d..7a3d02a02413 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java @@ -51,9 +51,9 @@ public void testDefaultSliceState() { slices.put("shard1", slice); collectionStates.put("collection1", new DocCollection("collection1", slices, null, DocRouter.DEFAULT)); - ClusterState clusterState = new ClusterState(-1,liveNodes, collectionStates); + ClusterState clusterState = new ClusterState(liveNodes, collectionStates); byte[] bytes = Utils.toJSON(clusterState); - ClusterState loadedClusterState = ClusterState.load(-1, bytes, liveNodes); + ClusterState loadedClusterState = ClusterState.createFromJson(-1, bytes, liveNodes); assertSame("Default state not set to active", Slice.State.ACTIVE, loadedClusterState.getCollection("collection1").getSlice("shard1").getState()); } diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java index 289fb3e42f44..7d63d8acf46c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java @@ -98,7 +98,7 @@ public void testLoadDocsIntoGettingStartedCollection() throws Exception { SolrCLI.CreateCollectionTool tool = new SolrCLI.CreateCollectionTool(); CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args); - log.info("Creating the '"+testCollectionName+"' collection using SolrCLI with: "+solrUrl); + log.info("Creating the '{}' collection using SolrCLI with: {}", testCollectionName, solrUrl); tool.runTool(cli); assertTrue("Collection '" + testCollectionName + "' doesn't exist after trying to create it!", cloudClient.getZkStateReader().getClusterState().hasCollection(testCollectionName)); @@ -140,7 +140,9 @@ public boolean accept(File dir, String name) { expectedXmlFileCount, xmlFiles.size()); for (File xml : xmlFiles) { - log.info("POSTing "+xml.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("POSTing {}", xml.getAbsolutePath()); + } cloudClient.request(new StreamingUpdateRequest("/update",xml,"application/xml")); } cloudClient.commit(); @@ -156,14 +158,14 @@ public boolean accept(File dir, String name) { } assertEquals("*:* found unexpected number of documents", expectedXmlDocCount, numFound); - log.info("Updating Config for " + testCollectionName); + log.info("Updating Config for {}", testCollectionName); doTestConfigUpdate(testCollectionName, solrUrl); - log.info("Running healthcheck for " + testCollectionName); + log.info("Running healthcheck for {}", testCollectionName); doTestHealthcheck(testCollectionName, cloudClient.getZkHost()); // verify the delete action works too - log.info("Running delete for "+testCollectionName); + log.info("Running delete for {}", testCollectionName); doTestDeleteAction(testCollectionName, solrUrl); log.info("testLoadDocsIntoGettingStartedCollection succeeded ... shutting down now!"); @@ -218,7 +220,7 @@ protected void doTestConfigUpdate(String testCollectionName, String solrUrl) thr SolrCLI.ConfigTool tool = new SolrCLI.ConfigTool(); CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args); - log.info("Sending set-property '" + prop + "'=" + maxTime + " to SolrCLI.ConfigTool."); + log.info("Sending set-property '{}'={} to SolrCLI.ConfigTool.", prop, maxTime); assertTrue("Set config property failed!", tool.runTool(cli) == 0); configJson = SolrCLI.getJson(configUrl); @@ -234,7 +236,9 @@ protected void doTestConfigUpdate(String testCollectionName, String solrUrl) thr assertEquals("Should have been able to get a value from the /query request handler", "explicit", SolrCLI.atPath("/config/requestHandler/\\/query/defaults/echoParams", configJson)); - log.info("live_nodes_count : " + cloudClient.getZkStateReader().getClusterState().getLiveNodes()); + if (log.isInfoEnabled()) { + log.info("live_nodes_count : {}", cloudClient.getZkStateReader().getClusterState().getLiveNodes()); + } // Since it takes some time for this command to complete we need to make sure all the reloads for // all the cores have been done. @@ -264,6 +268,7 @@ private Map getSoftAutocommitInterval(String collection) throws Ex for (Slice slice : coll.getActiveSlices()) { for (Replica replica : slice.getReplicas()) { String uri = "" + replica.get(ZkStateReader.BASE_URL_PROP) + "/" + replica.get(ZkStateReader.CORE_NAME_PROP) + "/config"; + @SuppressWarnings({"rawtypes"}) Map respMap = getAsMap(cloudClient, uri); Long maxTime = (Long) (getObjectByPath(respMap, true, asList("config", "updateHandler", "autoSoftCommit", "maxTime"))); ret.put(replica.getCoreName(), maxTime); @@ -272,6 +277,7 @@ private Map getSoftAutocommitInterval(String collection) throws Ex return ret; } + @SuppressWarnings({"rawtypes"}) private Map getAsMap(CloudSolrClient cloudClient, String uri) throws Exception { HttpGet get = new HttpGet(uri); HttpEntity entity = null; diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java index e8d0e92bad05..8e1358c9fb83 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java @@ -81,7 +81,9 @@ private void setUpZkAndDiskXml(boolean toZk, boolean leaveOnLocal) throws Except zkClient.close(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } // set some system properties for use by tests Properties props = new Properties(); @@ -89,7 +91,9 @@ private void setUpZkAndDiskXml(boolean toZk, boolean leaveOnLocal) throws Except props.setProperty("solr.test.sys.prop2", "proptwo"); cfg = SolrDispatchFilter.loadNodeConfig(solrHome, props); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } private void closeZK() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java b/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java index 9d780d310a9d..98240e6c250e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java @@ -181,7 +181,7 @@ long getNumDocs(CloudSolrClient client) throws Exception { long numFound = 0; try { numFound = replicaClient.query(params("q", "*:*", "distrib", "false")).getResults().getNumFound(); - log.info("Replica count=" + numFound + " for " + replica); + log.info("Replica count={} for {}", numFound, replica); } finally { replicaClient.close(); } @@ -269,11 +269,11 @@ void doLiveSplitShard(String collectionName, int repFactor, int nThreads) throws String id = (String) doc.get("id"); leftover.remove(id); } - log.error("MISSING DOCUMENTS: " + leftover); + log.error("MISSING DOCUMENTS: {}", leftover); } assertEquals("Documents are missing!", docsIndexed.get(), numDocs); - log.info("Number of documents indexed and queried : " + numDocs); + log.info("Number of documents indexed and queried : {}", numDocs); } diff --git a/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java b/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java index 4394eb14aa10..c5a249f58c6c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java @@ -102,6 +102,7 @@ public void test() throws Exception { params.set("action", CollectionAction.SYNCSHARD.toString()); params.set("collection", "collection1"); params.set("shard", "shard1"); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); diff --git a/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java b/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java index e78f3eaf21d9..fba4aff67eae 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java @@ -130,7 +130,7 @@ public void setupSystemCollection() throws Exception { long currentTime = getCoreStatus(r).getCoreStartTime().getTime(); allReloaded = allReloaded && (previousTime < currentTime); } catch (Exception e) { - log.warn("Error retrieving replica status of " + Utils.toJSONString(r), e); + log.warn("Error retrieving replica status of {}", Utils.toJSONString(r), e); allReloaded = false; } } @@ -180,12 +180,15 @@ public void testBackCompat() throws Exception { } assertNotNull(overseerNode); LogWatcherConfig watcherCfg = new LogWatcherConfig(true, null, "WARN", 100); + @SuppressWarnings({"rawtypes"}) LogWatcher watcher = LogWatcher.newRegisteredLogWatcher(watcherCfg, null); watcher.reset(); // restart Overseer to trigger the back-compat check - log.info("Stopping Overseer Node: {} ({})", overseerNode.getNodeName(), overseerNode.getLocalPort()); + if (log.isInfoEnabled()) { + log.info("Stopping Overseer Node: {} ({})", overseerNode.getNodeName(), overseerNode.getLocalPort()); + } cluster.stopJettySolrRunner(overseerNode); log.info("Waiting for new overseer election..."); TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, cloudManager.getTimeSource()); @@ -217,11 +220,15 @@ public void testBackCompat() throws Exception { continue; } if (doc.getFieldValue("message").toString().contains("re-indexing")) { - log.info("Found re-indexing message: {}", doc.getFieldValue("message")); + if (log.isInfoEnabled()) { + log.info("Found re-indexing message: {}", doc.getFieldValue("message")); + } foundWarning = true; } if (doc.getFieldValue("message").toString().contains("timestamp")) { - log.info("Found timestamp message: {}", doc.getFieldValue("message")); + if (log.isInfoEnabled()) { + log.info("Found timestamp message: {}", doc.getFieldValue("message")); + } foundSchemaWarning = true; } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java b/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java index c3635fb77c29..fa19dcc92b29 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java @@ -145,16 +145,16 @@ public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse re filterChain.doFilter(request, response); return true; } - HttpServletRequest httpRequest = (HttpServletRequest)request; + HttpServletRequest httpRequest = request; String username = httpRequest.getHeader("username"); String password = httpRequest.getHeader("password"); - log.info("Username: "+username+", password: "+password); + log.info("Username: {}, password: {}", username, password); if(MockAuthenticationPlugin.expectedUsername.equals(username) && MockAuthenticationPlugin.expectedPassword.equals(password)) { filterChain.doFilter(request, response); return true; } else { - ((HttpServletResponse)response).sendError(401, "Unauthorized request"); + response.sendError(401, "Unauthorized request"); return false; } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestBaseStatsCacheCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestBaseStatsCacheCloud.java index 85f9f5dda47f..2a200fa2f797 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestBaseStatsCacheCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestBaseStatsCacheCloud.java @@ -108,6 +108,7 @@ public void tearDownCluster() { } @Test + @SuppressWarnings({"unchecked"}) public void testBasicStats() throws Exception { QueryResponse cloudRsp = solrClient.query(collectionName, params("q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + NUM_DOCS, "debug", "true")); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java index 9d04f99c629e..61e6dbdc881c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java @@ -69,7 +69,9 @@ public void setupCluster() throws Exception { cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart cluster.startJettySolrRunner(jetty); proxy.open(jetty.getBaseUrl().toURI()); - log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl()); + if (log.isInfoEnabled()) { + log.info("Adding proxy for URL: {}. Proxy: {}", jetty.getBaseUrl(), proxy.getUrl()); + } proxies.put(jetty, proxy); jettys.put(proxy.getUrl(), jetty); } @@ -293,12 +295,14 @@ private void assertDocsExistInAllReplicas(List notLeaders, } private void assertDocExists(HttpSolrClient solr, String coll, String docId) throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList rsp = realTimeGetDocId(solr, docId); String match = JSONTestUtil.matchObj("/id", rsp.get("doc"), docId); assertTrue("Doc with id=" + docId + " not found in " + solr.getBaseURL() + " due to: " + match + "; rsp="+rsp, match == null); } + @SuppressWarnings({"rawtypes"}) private NamedList realTimeGetDocId(HttpSolrClient solr, String docId) throws SolrServerException, IOException { QueryRequest qr = new QueryRequest(params("qt", "/get", "id", docId, "distrib", "false")); return solr.request(qr); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudPhrasesIdentificationComponent.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudPhrasesIdentificationComponent.java index 8bd391950eb3..ff2af57a3286 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudPhrasesIdentificationComponent.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudPhrasesIdentificationComponent.java @@ -135,10 +135,12 @@ public void testBasicPhrases() throws Exception { req.setPath(path); final QueryResponse rsp = req.process(getRandClient(random())); try { + @SuppressWarnings({"unchecked"}) NamedList phrases = (NamedList) rsp.getResponse().get("phrases"); assertEquals("input", input, phrases.get("input")); assertEquals("summary", expected, phrases.get("summary")); + @SuppressWarnings({"unchecked"}) final List> details = (List>) phrases.get("details"); assertNotNull("null details", details); assertEquals("num phrases found", 2, details.size()); @@ -167,10 +169,12 @@ public void testEmptyInput() throws Exception { req.setPath("/phrases"); final QueryResponse rsp = req.process(getRandClient(random())); try { + @SuppressWarnings({"unchecked"}) NamedList phrases = (NamedList) rsp.getResponse().get("phrases"); assertEquals("input", input, phrases.get("input")); assertEquals("summary", input, phrases.get("summary")); + @SuppressWarnings({"unchecked"}) final List> details = (List>) phrases.get("details"); assertNotNull("null details", details); assertEquals("num phrases found", 0, details.size()); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java index e5fef491b388..64f3466941bb 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java @@ -362,7 +362,7 @@ private void assertPivotStats(String message, PivotField constraint, QueryRespon // no stats for this pivot, nothing to check // TODO: use a trace param to know if/how-many to expect ? - log.info("No stats to check for => " + message); + log.info("No stats to check for => {}", message); return; } @@ -380,7 +380,7 @@ private void assertPivotStats(String message, PivotField constraint, QueryRespon // StatsComponent results being "null" (and not even included in the // getFieldStatsInfo() Map due to specila SolrJ logic) - log.info("Requested stats missing in verification query, pivot stats: " + pivotStats); + log.info("Requested stats missing in verification query, pivot stats: {}", pivotStats); assertEquals("Special Count", 0L, pivotStats.getCount().longValue()); assertEquals("Special Missing", constraint.getCount(), pivotStats.getMissing().longValue()); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java index 70680c250a93..5e20994e5970 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java @@ -259,7 +259,9 @@ public boolean onStateChanged(Set liveNodes, DocCollection collectionSta log.info("Active replica: {}", coreNodeName); for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) { JettySolrRunner jettySolrRunner = cluster.getJettySolrRunner(i); - log.info("Checking node: {}", jettySolrRunner.getNodeName()); + if (log.isInfoEnabled()) { + log.info("Checking node: {}", jettySolrRunner.getNodeName()); + } if (jettySolrRunner.getNodeName().equals(replica.getNodeName())) { SolrDispatchFilter solrDispatchFilter = jettySolrRunner.getSolrDispatchFilter(); try (SolrCore core = solrDispatchFilter.getCores().getCore(coreName)) { @@ -268,7 +270,9 @@ public boolean onStateChanged(Set liveNodes, DocCollection collectionSta assert false; return false; } - log.info("Found SolrCore: {}, id: {}", core.getName(), core); + if (log.isInfoEnabled()) { + log.info("Found SolrCore: {}, id: {}", core.getName(), core); + } RefCounted registeredSearcher = core.getRegisteredSearcher(); if (registeredSearcher != null) { log.error("registered searcher not null, maxdocs = {}", registeredSearcher.get().maxDoc()); @@ -307,7 +311,7 @@ public boolean onStateChanged(Set liveNodes, DocCollection collectionSta public static class SleepingSolrEventListener implements SolrEventListener { @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { // No-Op } @@ -326,13 +330,19 @@ public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher current if (sleepTime.get() > 0) { TestCloudSearcherWarming.coreNodeNameRef.set(newSearcher.getCore().getCoreDescriptor().getCloudDescriptor().getCoreNodeName()); TestCloudSearcherWarming.coreNameRef.set(newSearcher.getCore().getName()); - log.info("Sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}", sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + if (log.isInfoEnabled()) { + log.info("Sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}" + , sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + } try { Thread.sleep(sleepTime.get()); } catch (InterruptedException e) { log.warn("newSearcher was interupdated", e); } - log.info("Finished sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}", sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + if (log.isInfoEnabled()) { + log.info("Finished sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}" + , sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + } } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java index c082e371ff48..aa272e5fa0de 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java @@ -41,13 +41,13 @@ public void setUp() throws Exception { @Test public void testClusterProperties() throws Exception { - assertEquals("false", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "false")); + assertEquals("false", props.getClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, "false")); - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "true").process(cluster.getSolrClient()); - assertEquals("true", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "false")); + CollectionAdminRequest.setClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, "true").process(cluster.getSolrClient()); + assertEquals("true", props.getClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, "false")); - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false").process(cluster.getSolrClient()); - assertEquals("false", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "true")); + CollectionAdminRequest.setClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, "false").process(cluster.getSolrClient()); + assertEquals("false", props.getClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, "true")); } @Test diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java index 6939aa232e33..86742fada2dc 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java @@ -39,6 +39,7 @@ import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; +import javax.script.ScriptEngineManager; import com.google.common.collect.ImmutableMap; import org.apache.commons.io.FileUtils; @@ -83,6 +84,7 @@ import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.junit.After; +import org.junit.Assume; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @@ -212,6 +214,7 @@ private void verifyCreate(String baseConfigSetName, String configSetName, solrClient.close(); } + @SuppressWarnings({"rawtypes"}) private NamedList getConfigSetPropertiesFromZk( SolrZkClient zkClient, String path) throws Exception { byte [] oldPropsData = null; @@ -234,6 +237,7 @@ private NamedList getConfigSetPropertiesFromZk( private void verifyProperties(String configSetName, Map oldProps, Map newProps, SolrZkClient zkClient) throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList properties = getConfigSetPropertiesFromZk(zkClient, ZkConfigManager.CONFIGS_ZKNODE + "/" + configSetName + "/" + DEFAULT_FILENAME); // let's check without merging the maps, since that's what the MessageHandler does @@ -258,6 +262,7 @@ private void verifyProperties(String configSetName, Map oldProps } // check the value in properties are correct + @SuppressWarnings({"unchecked"}) Iterator> it = properties.iterator(); while (it.hasNext()) { Map.Entry entry = it.next(); @@ -281,6 +286,7 @@ public void testUploadErrors() throws Exception { ByteBuffer emptyData = ByteBuffer.allocate(0); // Checking error when no configuration name is specified in request + @SuppressWarnings({"rawtypes"}) Map map = postDataAndGetResponse(solrCluster.getSolrClient(), solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/admin/configs?action=UPLOAD", emptyData, null, null); @@ -344,6 +350,9 @@ public void testUpload() throws Exception { @Test public void testUploadWithScriptUpdateProcessor() throws Exception { + Assume.assumeNotNull((new ScriptEngineManager()).getEngineByExtension("js")); + Assume.assumeNotNull((new ScriptEngineManager()).getEngineByName("JavaScript")); + // Authorization off // unprotectConfigsHandler(); // TODO Enable this back when testUploadWithLibDirective() is re-enabled final String untrustedSuffix = "-untrusted"; @@ -483,6 +492,7 @@ private long uploadConfigSet(String configSetName, String suffix, String usernam ZkConfigManager configManager = new ZkConfigManager(zkClient); assertFalse(configManager.configExists(configSetName+suffix)); + @SuppressWarnings({"rawtypes"}) Map map = postDataAndGetResponse(solrCluster.getSolrClient(), solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/admin/configs?action=UPLOAD&name="+configSetName+suffix, sampleZippedConfig, username, password); @@ -500,10 +510,14 @@ private String createTempZipFile(String directoryPath) { File.separator + TestUtil.randomSimpleString(random(), 6, 8) + ".zip"); File directory = TestDynamicLoading.getFile(directoryPath); - log.info("Directory: "+directory.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("Directory: {}", directory.getAbsolutePath()); + } try { zip (directory, zipFile); - log.info("Zipfile: "+zipFile.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("Zipfile: {}", zipFile.getAbsolutePath()); + } return zipFile.getAbsolutePath(); } catch (IOException e) { throw new RuntimeException(e); @@ -568,6 +582,7 @@ protected CollectionAdminResponse createCollection(String collectionName, String params.set("name", collectionName); params.set("numShards", numShards); params.set("replicationFactor", replicationFactor); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -576,6 +591,7 @@ protected CollectionAdminResponse createCollection(String collectionName, String return res; } + @SuppressWarnings({"rawtypes"}) public static Map postDataAndGetResponse(CloudSolrClient cloudClient, String uri, ByteBuffer bytarr, String username, String password) throws IOException { HttpPost httpPost = null; @@ -611,7 +627,9 @@ public static Map postDataAndGetResponse(CloudSolrClient cloudClient, return m; } - private static Object getObjectByPath(Map root, boolean onlyPrimitive, java.util.List hierarchy) { + private static Object getObjectByPath(@SuppressWarnings({"rawtypes"})Map root, + boolean onlyPrimitive, java.util.List hierarchy) { + @SuppressWarnings({"rawtypes"}) Map obj = root; for (int i = 0; i < hierarchy.size(); i++) { String s = hierarchy.get(i); @@ -668,7 +686,8 @@ public void testDeleteErrors() throws Exception { solrClient.close(); } - private void verifyException(SolrClient solrClient, ConfigSetAdminRequest request, + private void verifyException(SolrClient solrClient, + @SuppressWarnings({"rawtypes"})ConfigSetAdminRequest request, String errorContains) throws Exception { Exception e = expectThrows(Exception.class, () -> solrClient.request(request)); assertTrue("Expected exception message to contain: " + errorContains @@ -764,6 +783,7 @@ private StringBuilder getConfigSetProps(Map map) { } public static class CreateNoErrorChecking extends ConfigSetAdminRequest.Create { + @SuppressWarnings({"rawtypes"}) public ConfigSetAdminRequest setAction(ConfigSetAction action) { return super.setAction(action); } @@ -779,6 +799,7 @@ public SolrParams getParams() { } public static class DeleteNoErrorChecking extends ConfigSetAdminRequest.Delete { + @SuppressWarnings({"rawtypes"}) public ConfigSetAdminRequest setAction(ConfigSetAction action) { return super.setAction(action); } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java index 407828bca0ac..20a88e0dcac3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java @@ -112,11 +112,13 @@ public ConfigSetsAPIThread(MiniSolrCloudCluster solrCluster, int trials) { this.trials = trials; } + @SuppressWarnings({"rawtypes"}) public abstract ConfigSetAdminRequest createRequest(); public void run() { final String baseUrl = solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString(); final SolrClient solrClient = getHttpSolrClient(baseUrl); + @SuppressWarnings({"rawtypes"}) ConfigSetAdminRequest request = createRequest(); for (int i = 0; i < trials; ++i) { @@ -159,6 +161,7 @@ public CreateThread(MiniSolrCloudCluster solrCluster, String configSet, } @Override + @SuppressWarnings({"rawtypes"}) public ConfigSetAdminRequest createRequest() { Create create = new Create(); create.setBaseConfigSetName(baseConfigSet).setConfigSetName(configSet); @@ -175,6 +178,7 @@ public DeleteThread(MiniSolrCloudCluster solrCluster, String configSet, int tria } @Override + @SuppressWarnings({"rawtypes"}) public ConfigSetAdminRequest createRequest() { Delete delete = new Delete(); delete.setConfigSetName(configSet); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java b/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java index 146ad82fb0bd..321e208c2b65 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java @@ -126,6 +126,7 @@ public void test() throws Exception { Arrays.asList("overlay", "runtimeLib", blobName, "version"), 1l, 10); + @SuppressWarnings({"rawtypes"}) LinkedHashMapWriter map = TestSolrConfigHandler.getRespMap("/runtime", client); String s = map._getStr( "error/msg",null); assertNotNull(map.toString(), s); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java b/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java index e6048e153122..207e255e2973 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java @@ -64,7 +64,7 @@ public void test() throws Exception { void populateIndex(int numRuns) throws IOException, SolrServerException { try { for (int i = 0; i < numRuns; i++) { - log.debug("Iteration number: " + i); + log.debug("Iteration number: {}", i); cloudClient.deleteByQuery(COLLECTION, "*:*"); cloudClient.commit(COLLECTION); @@ -74,7 +74,7 @@ void populateIndex(int numRuns) throws IOException, SolrServerException { final SolrQuery solrQuery = new SolrQuery("*:*"); solrQuery.setRows(solrDocs.size()); final SolrDocumentList resultDocs = getSolrResponse(solrQuery, COLLECTION); - log.debug(resultDocs.toString()); + log.debug("{}", resultDocs); assertThatDocsHaveCorrectFields(solrDocs, resultDocs); } } finally { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java b/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java index 55e69fae0f76..2188f6bc094d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java @@ -267,6 +267,7 @@ public void testPrintHashCodes() throws Exception { + @SuppressWarnings({"unchecked"}) DocCollection createCollection(int nSlices, DocRouter router) { List ranges = router.partitionRange(nSlices, router.fullRange()); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java index c085b2876a8e..6bac5e84355f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java @@ -77,7 +77,9 @@ public void run() { try { String leaderNode = OverseerCollectionConfigSetProcessor.getLeaderNode(zc); if (leaderNode != null && !leaderNode.trim().isEmpty()) { - log.info("Time={} Overseer leader is = {}", System.nanoTime(), leaderNode); + if (log.isInfoEnabled()) { + log.info("Time={} Overseer leader is = {}", System.nanoTime(), leaderNode); + } found = true; break; } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java index 50404be5d638..65a03e8ef0a7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java @@ -52,7 +52,6 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.TimeSource; import org.apache.solr.core.SolrCore; import org.apache.solr.util.TestInjection; @@ -89,11 +88,6 @@ public static void setupCluster() throws Exception { configureCluster(2) // 2 + random().nextInt(3) .addConfig("conf", configset("cloud-minimal")) .configure(); - Boolean useLegacyCloud = rarely(); - log.info("Using legacyCloud?: {}", useLegacyCloud); - CollectionAdminRequest.ClusterProp clusterPropRequest = CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, String.valueOf(useLegacyCloud)); - CollectionAdminResponse response = clusterPropRequest.process(cluster.getSolrClient()); - assertEquals(0, response.getStatus()); } @AfterClass @@ -267,7 +261,7 @@ public void testAddDocs() throws Exception { "stats", "true"); QueryResponse statsResponse = pullReplicaClient.query(req); assertEquals("Replicas shouldn't process the add document request: " + statsResponse, - 0L, ((Map)((NamedList)statsResponse.getResponse()).findRecursive("plugins", "UPDATE", "updateHandler", "stats")).get("UPDATE.updateHandler.adds")); + 0L, ((Map)(statsResponse.getResponse()).findRecursive("plugins", "UPDATE", "updateHandler", "stats")).get("UPDATE.updateHandler.adds")); } } if (reloaded) { @@ -334,13 +328,17 @@ public void testPullReplicaStates() throws Exception { return false; } statesSeen.add(r.getState()); - log.info("CollectionStateWatcher saw state: {}", r.getState()); + if (log.isInfoEnabled()) { + log.info("CollectionStateWatcher saw state: {}", r.getState()); + } return r.getState() == Replica.State.ACTIVE; }); CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.PULL).process(cluster.getSolrClient()); waitForState("Replica not added", collectionName, activeReplicaCount(1, 0, 1)); zkClient().printLayoutToStream(System.out); - log.info("Saw states: " + Arrays.toString(statesSeen.toArray())); + if (log.isInfoEnabled()) { + log.info("Saw states: {}", Arrays.toString(statesSeen.toArray())); + } assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), 3, statesSeen.size()); assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), Replica.State.DOWN, statesSeen.get(0)); assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), Replica.State.RECOVERING, statesSeen.get(0)); @@ -393,6 +391,7 @@ public void testRealTimeGet() throws SolrServerException, IOException, KeeperExc /* * validate that replication still happens on a new leader */ + @SuppressWarnings({"try"}) private void doTestNoLeader(boolean removeReplica) throws Exception { CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1, 0, 1) .setMaxShardsPerNode(100) diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java index a44958913feb..4ede3ea43fb6 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java @@ -36,7 +36,6 @@ import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.cloud.CollectionStatePredicate; @@ -85,24 +84,12 @@ public static void setupCluster() throws Exception { cluster.startJettySolrRunner(jetty); cluster.waitForAllNodes(30); proxy.open(jetty.getBaseUrl().toURI()); - log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl()); + if (log.isInfoEnabled()) { + log.info("Adding proxy for URL: {}. Proxy: {}", jetty.getBaseUrl(), proxy.getUrl()); + } proxies.put(proxy.getUrl(), proxy); jettys.put(proxy.getUrl(), jetty); } - TimeOut t = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); - while (true) { - try { - CollectionAdminRequest.ClusterProp clusterPropRequest = CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false"); - CollectionAdminResponse response = clusterPropRequest.process(cluster.getSolrClient()); - assertEquals(0, response.getStatus()); - break; - } catch (SolrServerException e) { - Thread.sleep(50); - if (t.hasTimedOut()) { - throw e; - } - } - } } @AfterClass diff --git a/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java b/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java index 1cd70f4fc373..1ed2341e1fab 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java @@ -84,6 +84,7 @@ public void testQueryToDownCollectionShouldFailFast() throws Exception { SolrClient client = cluster.getJettySolrRunner(0).newClient(); + @SuppressWarnings({"rawtypes"}) SolrRequest req = new QueryRequest(new SolrQuery("*:*").setRows(0)).setBasicAuthCredentials(USERNAME, PASSWORD); // Without the SOLR-13793 fix, this causes requests to "down collection" to pile up (until the nodes run out @@ -113,6 +114,7 @@ public void testQueryToDownCollectionShouldFailFast() throws Exception { assertTrue(error.getMessage().contains("No active replicas found for collection: " + COLLECTION_NAME)); } + @SuppressWarnings({"unchecked"}) private void downAllReplicas() throws Exception { byte[] collectionState = cluster.getZkClient().getData("/collections/" + COLLECTION_NAME + "/state.json", null, null, true); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java index 703f02d6bde9..9ecc474e2046 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java @@ -119,7 +119,7 @@ private void testRequestTracking() throws Exception { long expectedTotalRequests = 0; Set uniqueCoreNames = new LinkedHashSet<>(); - log.info("Making requests to " + baseUrl + "a1x2"); + log.info("Making requests to {} a1x2", baseUrl); while (uniqueCoreNames.size() < counters.keySet().size() && expectedTotalRequests < 1000L) { expectedTotalRequests++; client.query(new SolrQuery("*:*")); @@ -135,7 +135,7 @@ private void testRequestTracking() throws Exception { assertEquals("Sanity Check: Num Queries So Far Doesn't Match Total????", expectedTotalRequests, actualTotalRequests); } - log.info("Total requests: " + expectedTotalRequests); + log.info("Total requests: {}", expectedTotalRequests); assertEquals("either request randomization code is broken of this test seed is really unlucky, " + "Gave up waiting for requests to hit every core at least once after " + expectedTotalRequests + " requests", @@ -179,7 +179,9 @@ private void testQueryAgainstDownReplica() throws Exception { ZkStateReader.ROLES_PROP, "", ZkStateReader.STATE_PROP, Replica.State.DOWN.toString()); - log.info("Forcing {} to go into 'down' state", notLeader.getStr(ZkStateReader.CORE_NAME_PROP)); + if (log.isInfoEnabled()) { + log.info("Forcing {} to go into 'down' state", notLeader.getStr(ZkStateReader.CORE_NAME_PROP)); + } ZkDistributedQueue q = jettys.get(0).getCoreContainer().getZkController().getOverseer().getStateUpdateQueue(); q.offer(Utils.toJSON(m)); @@ -190,7 +192,7 @@ private void testQueryAgainstDownReplica() throws Exception { String baseUrl = notLeader.getStr(ZkStateReader.BASE_URL_PROP); if (!baseUrl.endsWith("/")) baseUrl += "/"; String path = baseUrl + "football"; - log.info("Firing queries against path=" + path); + log.info("Firing queries against path={}", path); try (HttpSolrClient client = getHttpSolrClient(path, 2000, 5000)) { SolrCore leaderCore = null; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java index b207fa3c91cf..e981167b6ac2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java @@ -447,6 +447,7 @@ void setPropWithStandardRequest(Slice slice, Replica rep, String prop) throws IO params.set("shardUnique", "true"); } + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); cluster.getSolrClient().request(request); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSSLRandomization.java b/solr/core/src/test/org/apache/solr/cloud/TestSSLRandomization.java index e846f73bc332..23a453fc46b4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestSSLRandomization.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestSSLRandomization.java @@ -139,7 +139,7 @@ public void testSSLRandomizer() { // for some cases, we know exactly what the config should be regardless of randomization factors SSLTestConfig conf; - for (Class c : Arrays.asList(FullyAnnotated.class, InheritedFullyAnnotated.class, + for (@SuppressWarnings({"rawtypes"})Class c : Arrays.asList(FullyAnnotated.class, InheritedFullyAnnotated.class, GrandchildInheritedEmptyAnnotatationWithOverride.class )) { r = SSLRandomizer.getSSLRandomizerForClass(c); assertEquals(c.toString(), 0.42D, r.ssl, 0.0D); @@ -147,7 +147,7 @@ public void testSSLRandomizer() { assertTrue(c.toString(), r.debug.contains("foo")); } - for (Class c : Arrays.asList(NotAnnotated.class, InheritedNotAnnotated.class)) { + for (@SuppressWarnings({"rawtypes"})Class c : Arrays.asList(NotAnnotated.class, InheritedNotAnnotated.class)) { r = SSLRandomizer.getSSLRandomizerForClass(c); assertEquals(c.toString(), 0.0D, r.ssl, 0.0D); assertEquals(c.toString(), 0.0D, r.clientAuth, 0.0D); @@ -157,7 +157,7 @@ public void testSSLRandomizer() { assertEquals(c.toString(), false, conf.isClientAuthMode()); } - for (Class c : Arrays.asList(Suppressed.class, + for (@SuppressWarnings({"rawtypes"})Class c : Arrays.asList(Suppressed.class, InheritedSuppressed.class, InheritedAnnotationButSuppressed.class, InheritedSuppressedWithIgnoredAnnotation.class)) { @@ -171,13 +171,13 @@ public void testSSLRandomizer() { assertEquals(c.toString(), false, conf.isClientAuthMode()); } - for (Class c : Arrays.asList(EmptyAnnotated.class, InheritedEmptyAnnotated.class)) { + for (@SuppressWarnings({"rawtypes"})Class c : Arrays.asList(EmptyAnnotated.class, InheritedEmptyAnnotated.class)) { r = SSLRandomizer.getSSLRandomizerForClass(c); assertEquals(c.toString(), RandomizeSSL.DEFAULT_ODDS, r.ssl, 0.0D); assertEquals(c.toString(), RandomizeSSL.DEFAULT_ODDS, r.clientAuth, 0.0D); } - for (Class c : Arrays.asList(SimplyAnnotated.class, InheritedEmptyAnnotatationWithOverride.class)) { + for (@SuppressWarnings({"rawtypes"})Class c : Arrays.asList(SimplyAnnotated.class, InheritedEmptyAnnotatationWithOverride.class)) { r = SSLRandomizer.getSSLRandomizerForClass(c); assertEquals(c.toString(), 0.5D, r.ssl, 0.0D); assertEquals(c.toString(), 0.5D, r.clientAuth, 0.0D); @@ -205,7 +205,7 @@ public void testSSLRandomizer() { assertEquals(RandomizeSSL.DEFAULT_ODDS, r.ssl, 0.0D); assertEquals(0.42D, r.clientAuth, 0.0D); - for (Class c : Arrays.asList(SSLOutOfRangeAnnotated.class, + for (@SuppressWarnings({"rawtypes"})Class c : Arrays.asList(SSLOutOfRangeAnnotated.class, ClientAuthOutOfRangeAnnotated.class, InheritedOutOfRangeAnnotated.class)) { expectThrows(IllegalArgumentException.class, () -> { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithDelegationTokens.java b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithDelegationTokens.java index b1ec4aaf8890..d28fad77a674 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithDelegationTokens.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithDelegationTokens.java @@ -169,6 +169,7 @@ private void doSolrRequest(String token, int expectedStatusCode, HttpSolrClient assertEquals("Did not receive expected status code", expectedStatusCode, lastStatusCode); } + @SuppressWarnings({"rawtypes"}) private SolrRequest getAdminRequest(final SolrParams params) { return new CollectionAdminRequest.List() { @Override @@ -179,6 +180,7 @@ public SolrParams getParams() { } }; } + @SuppressWarnings({"rawtypes"}) private SolrRequest getUpdateRequest(boolean commit) { UpdateRequest request = new UpdateRequest(); if (commit) { @@ -190,6 +192,7 @@ private SolrRequest getUpdateRequest(boolean commit) { return request; } + @SuppressWarnings({"unchecked"}) private int getStatusCode(String token, final String user, final String op, HttpSolrClient client) throws Exception { SolrClient delegationTokenClient; @@ -210,6 +213,7 @@ private int getStatusCode(String token, final String user, final String op, Http ModifiableSolrParams p = new ModifiableSolrParams(); if (user != null) p.set(USER_PARAM, user); if (op != null) p.set("op", op); + @SuppressWarnings({"rawtypes"}) SolrRequest req = getAdminRequest(p); if (user != null || op != null) { Set queryParams = new HashSet<>(); @@ -228,7 +232,8 @@ private int getStatusCode(String token, final String user, final String op, Http } } - private void doSolrRequest(HttpSolrClient client, SolrRequest request, + private void doSolrRequest(HttpSolrClient client, + @SuppressWarnings({"rawtypes"})SolrRequest request, int expectedStatusCode) throws Exception { try { client.request(request); @@ -238,7 +243,8 @@ private void doSolrRequest(HttpSolrClient client, SolrRequest request, } } - private void doSolrRequest(HttpSolrClient client, SolrRequest request, String collectionName, + private void doSolrRequest(HttpSolrClient client, + @SuppressWarnings({"rawtypes"})SolrRequest request, String collectionName, int expectedStatusCode) throws Exception { try { client.request(request, collectionName); @@ -408,6 +414,7 @@ public void testDelegationTokenSolrClient() throws Exception { String token = getDelegationToken(null, "bar", solrClientPrimary); assertNotNull(token); + @SuppressWarnings({"rawtypes"}) SolrRequest request = getAdminRequest(new ModifiableSolrParams()); // test without token @@ -465,6 +472,7 @@ public void testDelegationTokenSolrClientWithUpdateRequests() throws Exception { try { // test update request with token via property and commit=true + @SuppressWarnings({"rawtypes"}) SolrRequest request = getUpdateRequest(true); doSolrRequest(scUpdateWToken, request, collectionName, HttpStatus.SC_OK); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java index 3daaa6485a95..2923211109c2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java @@ -97,7 +97,7 @@ private void setupMiniKdc() throws Exception { System.setProperty("solr.kerberos.keytab", keytabFile.getAbsolutePath()); System.setProperty("authenticationPlugin", "org.apache.solr.security.KerberosPlugin"); boolean enableDt = random().nextBoolean(); - log.info("Enable delegation token: " + enableDt); + log.info("Enable delegation token: {}", enableDt); System.setProperty("solr.kerberos.delegation.token.enabled", Boolean.toString(enableDt)); // Extracts 127.0.0.1 from HTTP/127.0.0.1@EXAMPLE.COM System.setProperty("solr.kerberos.name.rules", "RULE:[1:$1@$0](.*EXAMPLE.COM)s/@.*//" diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithSecureImpersonation.java b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithSecureImpersonation.java index 1b4fe9957889..ee1515f01c0f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithSecureImpersonation.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithSecureImpersonation.java @@ -193,14 +193,17 @@ public SolrParams getParams() { } } + @SuppressWarnings({"rawtypes"}) private SolrRequest getProxyRequest(String user, String doAs) { return getProxyRequest(user, doAs, null); } + @SuppressWarnings({"rawtypes"}) private SolrRequest getProxyRequest(String user, String doAs, String remoteHost) { return getProxyRequest(user, doAs, remoteHost, null); } + @SuppressWarnings({"rawtypes"}) private SolrRequest getProxyRequest(String user, String doAs, String remoteHost, String remoteAddress) { return new CollectionAdminRequest.List() { @Override diff --git a/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java b/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java index dd107f8e32e4..789136e22d1a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java @@ -105,8 +105,9 @@ public class TestStressCloudBlindAtomicUpdates extends SolrCloudTestCase { * initial index seeding has finished (we're focusing on testing atomic updates, not basic indexing). */ private String testInjection = null; - + @BeforeClass + @SuppressWarnings({"unchecked"}) private static void createMiniSolrCloudCluster() throws Exception { // NOTE: numDocsToCheck uses atLeast, so nightly & multiplier are alreayd a factor in index size // no need to redundently factor them in here as well @@ -144,8 +145,6 @@ private static void createMiniSolrCloudCluster() throws Exception { CLIENTS.add(getHttpSolrClient(baseUrl + "/" + COLLECTION_NAME + "/")); } - final boolean usingPoints = Boolean.getBoolean(NUMERIC_POINTS_SYSPROP); - // sanity check no one broke the assumptions we make about our schema checkExpectedSchemaType( map("name","long", "class", RANDOMIZED_NUMERIC_FIELDTYPES.get(Long.class), @@ -207,6 +206,7 @@ private void startTestInjection() { @Test + @SuppressWarnings({"unchecked"}) public void test_dv() throws Exception { String field = "long_dv"; checkExpectedSchemaField(map("name", field, @@ -219,6 +219,7 @@ public void test_dv() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void test_dv_stored() throws Exception { String field = "long_dv_stored"; checkExpectedSchemaField(map("name", field, @@ -230,6 +231,7 @@ public void test_dv_stored() throws Exception { checkField(field); } + @SuppressWarnings({"unchecked"}) public void test_dv_stored_idx() throws Exception { String field = "long_dv_stored_idx"; checkExpectedSchemaField(map("name", field, @@ -241,6 +243,7 @@ public void test_dv_stored_idx() throws Exception { checkField(field); } + @SuppressWarnings({"unchecked"}) public void test_dv_idx() throws Exception { String field = "long_dv_idx"; checkExpectedSchemaField(map("name", field, @@ -251,6 +254,7 @@ public void test_dv_idx() throws Exception { checkField(field); } + @SuppressWarnings({"unchecked"}) public void test_stored_idx() throws Exception { String field = "long_stored_idx"; checkExpectedSchemaField(map("name", field, @@ -270,7 +274,8 @@ public void checkField(final String numericFieldName) throws Exception { final int numDocsInIndex = (numDocsToCheck * DOC_ID_INCR); final AtomicLong[] expected = new AtomicLong[numDocsToCheck]; - log.info("Testing " + numericFieldName + ": numDocsToCheck=" + numDocsToCheck + ", numDocsInIndex=" + numDocsInIndex + ", incr=" + DOC_ID_INCR); + log.info("Testing {}: numDocsToCheck={}, numDocsInIndex={}, incr={}" + , numericFieldName, numDocsToCheck, numDocsInIndex, DOC_ID_INCR); // seed the index & keep track of what docs exist and with what values for (int id = 0; id < numDocsInIndex; id++) { @@ -281,7 +286,7 @@ public void checkField(final String numericFieldName) throws Exception { UpdateResponse rsp = update(doc).process(CLOUD_CLIENT); assertEquals(doc.toString() + " => " + rsp.toString(), 0, rsp.getStatus()); if (0 == id % DOC_ID_INCR) { - expected[(int)(id / DOC_ID_INCR)] = new AtomicLong(initValue); + expected[id / DOC_ID_INCR] = new AtomicLong(initValue); } } assertNotNull("Sanity Check no off-by-one in expected init: ", expected[expected.length-1]); @@ -336,7 +341,7 @@ public void checkField(final String numericFieldName) throws Exception { for (int id = 0; id < numDocsInIndex; id += DOC_ID_INCR) { assert 0 == id % DOC_ID_INCR : "WTF? " + id; - final long expect = expected[(int)(id / DOC_ID_INCR)].longValue(); + final long expect = expected[id / DOC_ID_INCR].longValue(); final String docId = "" + id; @@ -396,7 +401,7 @@ private void doRandomAtomicUpdate(int docId) throws Exception { UpdateResponse rsp = update(doc).process(client); assertEquals(doc + " => " + rsp, 0, rsp.getStatus()); - AtomicLong counter = expected[(int)(docId / DOC_ID_INCR)]; + AtomicLong counter = expected[docId / DOC_ID_INCR]; assertNotNull("null counter for " + docId + "/" + DOC_ID_INCR, counter); counter.getAndAdd(delta); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java b/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java index 01314d3ea68a..4230ec1fb3a4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java @@ -134,11 +134,13 @@ public void stressTest() throws Exception { int fullUpdatePercent = 20; **/ - log.info("{}", Arrays.asList - ("commitPercent", commitPercent, "softCommitPercent", softCommitPercent, + if (log.isInfoEnabled()) { + log.info("{}", Arrays.asList + ("commitPercent", commitPercent, "softCommitPercent", softCommitPercent, "deletePercent", deletePercent, "deleteByQueryPercent", deleteByQueryPercent, "ndocs", ndocs, "nWriteThreads", nWriteThreads, "percentRealtimeQuery", percentRealtimeQuery, "operations", operations, "nReadThreads", nReadThreads)); + } initModel(ndocs); @@ -218,8 +220,8 @@ public void run() { try { returnedVersion = deleteDocAndGetVersion(Integer.toString(id), params("_version_", Long.toString(info.version)), dbq); - log.info(delType + ": Deleting id=" + id + ", version=" + info.version - + ". Returned version=" + returnedVersion); + log.info("{}: Deleting id={}, version={}. Returned version={}" + , delType, id, info.version, returnedVersion); } catch (RuntimeException e) { if (e.getMessage() != null && e.getMessage().contains("version conflict") || e.getMessage() != null && e.getMessage().contains("Conflict")) { @@ -254,7 +256,8 @@ public void run() { nextVal2 = nextVal1 * 1000000000l; try { returnedVersion = addDocAndGetVersion("id", id, "title_s", "title" + id, "val1_i_dvo", nextVal1, "val2_l_dvo", nextVal2, "_version_", info.version); - log.info("FULL: Writing id=" + id + ", val=[" + nextVal1 + "," + nextVal2 + "], version=" + info.version + ", Prev was=[" + val1 + "," + val2 + "]. Returned version=" + returnedVersion); + log.info("FULL: Writing id={}, val=[{},{}], version={}, Prev was=[{},{}]. Returned version={}" + ,id, nextVal1, nextVal2, info.version, val1, val2, returnedVersion); } catch (RuntimeException e) { if (e.getMessage() != null && e.getMessage().contains("version conflict") @@ -271,7 +274,8 @@ public void run() { nextVal2 = val2 + val1; try { returnedVersion = addDocAndGetVersion("id", id, "val2_l_dvo", map("inc", String.valueOf(val1)), "_version_", info.version); - log.info("PARTIAL: Writing id=" + id + ", val=[" + nextVal1 + "," + nextVal2 + "], version=" + info.version + ", Prev was=[" + val1 + "," + val2 + "]. Returned version=" + returnedVersion); + log.info("PARTIAL: Writing id={}, val=[{},{}], version={}, Prev was=[{},{}]. Returned version={}" + ,id, nextVal1, nextVal2, info.version, val1, val2, returnedVersion); } catch (RuntimeException e) { if (e.getMessage() != null && e.getMessage().contains("version conflict") || e.getMessage() != null && e.getMessage().contains("Conflict")) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java b/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java index b97ae0abab85..06be968d1243 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java @@ -112,8 +112,10 @@ private static List getCachedLiveNodesFromLocalState(final int expectedC for (int i = 0; i < 10; i++) { result = new ArrayList<>(CLOUD_CLIENT.getZkStateReader().getClusterState().getLiveNodes()); if (expectedCount != result.size()) { - log.info("sleeping #{} to give watchers a chance to finish: {} != {}", - i, expectedCount, result.size()); + if (log.isInfoEnabled()) { + log.info("sleeping #{} to give watchers a chance to finish: {} != {}", + i, expectedCount, result.size()); + } Thread.sleep(200); } else { break; @@ -235,7 +237,7 @@ public Integer call() { client.makePath(nodePath, CreateMode.EPHEMERAL, true); numAdded++; } catch (Exception e) { - log.error("failed to create: " + nodePath, e); + log.error("failed to create: {}", nodePath, e); } } return numAdded; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java index d1c4d226962b..8b9f1cfd52b0 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java @@ -88,7 +88,9 @@ public void setupCluster() throws Exception { cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart cluster.startJettySolrRunner(jetty); proxy.open(jetty.getBaseUrl().toURI()); - log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl()); + if (log.isInfoEnabled()) { + log.info("Adding proxy for URL: {}. Proxy: {}", jetty.getBaseUrl(), proxy.getUrl()); + } proxies.put(jetty, proxy); jettys.put(proxy.getUrl(), jetty); } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java index 57129c3ea106..060ff61bc288 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java @@ -92,11 +92,6 @@ public static void setupCluster() throws Exception { configureCluster(2) // 2 + random().nextInt(3) .addConfig("conf", configset("cloud-minimal-inplace-updates")) .configure(); - Boolean useLegacyCloud = rarely(); - log.info("Using legacyCloud?: {}", useLegacyCloud); - CollectionAdminRequest.ClusterProp clusterPropRequest = CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, String.valueOf(useLegacyCloud)); - CollectionAdminResponse response = clusterPropRequest.process(cluster.getSolrClient()); - assertEquals(0, response.getStatus()); } @AfterClass @@ -248,7 +243,7 @@ public void testAddDocs() throws Exception { "stats", "true"); QueryResponse statsResponse = tlogReplicaClient.query(req); assertEquals("Append replicas should recive all updates. Replica: " + r + ", response: " + statsResponse, - 1L, ((Map)((NamedList)statsResponse.getResponse()).findRecursive("plugins", "UPDATE", "updateHandler", "stats")).get("UPDATE.updateHandler.cumulativeAdds.count")); + 1L, ((Map)(statsResponse.getResponse()).findRecursive("plugins", "UPDATE", "updateHandler", "stats")).get("UPDATE.updateHandler.cumulativeAdds.count")); break; } catch (AssertionError e) { if (t.hasTimedOut()) { @@ -434,7 +429,7 @@ private void addReplicaWithRetries() throws SolrServerException, IOException { } log.error("Unsuccessful attempt to add replica. Attempt: {}/{}", i, maxAttempts); } catch (SolrException e) { - log.error("Exception while adding replica. Attempt: " + i + "/" + maxAttempts, e); + log.error("Exception while adding replica. Attempt: {}/{}", i, maxAttempts, e); } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java index ef07a773b247..d91078fac823 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java @@ -264,8 +264,10 @@ public void testRandomUpdates() throws Exception { final UpdateResponse rsp = req.process(client); assertUpdateTolerantErrors(client.toString() + " => " + expectedErrors.toString(), rsp, expectedErrors.toArray(new ExpectedErr[expectedErrors.size()])); - - log.info("END ITER #{}, expecting #docs: {}", i, expectedDocIds.cardinality()); + + if (log.isInfoEnabled()) { + log.info("END ITER #{}, expecting #docs: {}", i, expectedDocIds.cardinality()); + } assertEquals("post update commit failed?", 0, CLOUD_CLIENT.commit().getStatus()); @@ -273,7 +275,7 @@ public void testRandomUpdates() throws Exception { if (expectedDocIds.cardinality() == countDocs(CLOUD_CLIENT)) { break; } - log.info("sleeping to give searchers a chance to re-open #" + j); + log.info("sleeping to give searchers a chance to re-open #{}", j); Thread.sleep(200); } @@ -289,7 +291,7 @@ public void testRandomUpdates() throws Exception { for (int b = x.nextSetBit(0); 0 <= b; b = x.nextSetBit(b+1)) { final boolean expectedBit = expectedDocIds.get(b); final boolean actualBit = actualDocIds.get(b); - log.error("bit #"+b+" mismatch: expected {} BUT actual {}", expectedBit, actualBit); + log.error("bit #{} mismatch: expected {} BUT actual {}", b, expectedBit, actualBit); } assertEquals(x.cardinality() + " mismatched bits", expectedDocIds.cardinality(), actualDocIds.cardinality()); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestUtilizeNode.java b/solr/core/src/test/org/apache/solr/cloud/TestUtilizeNode.java index 96d7704a2adc..cac7f76a0747 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestUtilizeNode.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestUtilizeNode.java @@ -87,7 +87,9 @@ public void test() throws Exception { assertNoReplicas("jettyX should not yet be utilized: ", coll, jettyX); - log.info("Sending UTILIZE command for jettyX ({})", jettyX.getNodeName()); + if (log.isInfoEnabled()) { + log.info("Sending UTILIZE command for jettyX ({})", jettyX.getNodeName()); + } cloudClient.request(new CollectionAdminRequest.UtilizeNode(jettyX.getNodeName())); // TODO: aparently we can't assert this? ... @@ -103,7 +105,9 @@ public void test() throws Exception { // // should we skip spinning up a *new* jettyX, and instead just pick an existing jetty? - log.info("jettyX replicas prior to being blacklisted: {}", getReplicaList(coll, jettyX)); + if (log.isInfoEnabled()) { + log.info("jettyX replicas prior to being blacklisted: {}", getReplicaList(coll, jettyX)); + } String setClusterPolicyCommand = "{" + " 'set-cluster-policy': [" + @@ -111,8 +115,11 @@ public void test() throws Exception { " , 'replica':0}" + " ]" + "}"; - log.info("Setting new policy to blacklist jettyX ({}) port={}", - jettyX.getNodeName(), jettyX.getLocalPort()); + if (log.isInfoEnabled()) { + log.info("Setting new policy to blacklist jettyX ({}) port={}", + jettyX.getNodeName(), jettyX.getLocalPort()); + } + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); NamedList response = cloudClient.request(req); assertEquals(req + " => " + response, @@ -123,9 +130,10 @@ public void test() throws Exception { cluster.waitForAllNodes(30); assertNoReplicas("jettyY should not yet be utilized: ", coll, jettyY); - - log.info("jettyX replicas prior to utilizing jettyY: {}", getReplicaList(coll, jettyX)); - log.info("Sending UTILIZE command for jettyY ({})", jettyY.getNodeName()); + if (log.isInfoEnabled()) { + log.info("jettyX replicas prior to utilizing jettyY: {}", getReplicaList(coll, jettyX)); + log.info("Sending UTILIZE command for jettyY ({})", jettyY.getNodeName()); // logOk + } cloudClient.request(new CollectionAdminRequest.UtilizeNode(jettyY.getNodeName())); assertSomeReplicas("jettyY should now be utilized: ", coll, jettyY); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java b/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java index 45dd428f2378..1b820a4388c1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java @@ -145,8 +145,10 @@ public LatchCountingPredicateWrapper(final CountDownLatch latch, final Collectio } public boolean matches(Set liveNodes, DocCollection collectionState) { final boolean result = inner.matches(liveNodes, collectionState); - log.info("Predicate called: result={}, (pre)latch={}, liveNodes={}, state={}", - result, latch.getCount(), liveNodes, collectionState); + if (log.isInfoEnabled()) { + log.info("Predicate called: result={}, (pre)latch={}, liveNodes={}, state={}", + result, latch.getCount(), liveNodes, collectionState); + } latch.countDown(); return result; } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestWithCollection.java b/solr/core/src/test/org/apache/solr/cloud/TestWithCollection.java index ca7f68791e55..2ee47f4e79a7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestWithCollection.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestWithCollection.java @@ -139,6 +139,7 @@ public void testCreateCollection() throws Exception { " {'cores':'<10', 'node':'#ANY'}," + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); solrClient.request(req); @@ -258,6 +259,7 @@ public void testAddReplicaWithPolicy() throws Exception { " {'replica':'<2', 'node':'#ANY'}," + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); solrClient.request(req); @@ -299,6 +301,7 @@ public void testMoveReplicaMainCollection() throws Exception { " {'replica':'<2', 'node':'#ANY'}," + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); solrClient.request(req); @@ -349,6 +352,7 @@ public void testMoveReplicaWithCollection() throws Exception { " {'replica':'<2', 'node':'#ANY'}," + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); solrClient.request(req); @@ -411,6 +415,7 @@ public void testNodeAdded() throws Exception { " {'replica':'<2', 'node':'#ANY'}," + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); solrClient.request(req); @@ -528,6 +533,7 @@ public void testMultipleWithCollections() throws Exception { " {'replica':'<2', 'node':'#ANY'}," + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); solrClient.request(req); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestZkChroot.java b/solr/core/src/test/org/apache/solr/cloud/TestZkChroot.java deleted file mode 100644 index 134e332b8337..000000000000 --- a/solr/core/src/test/org/apache/solr/cloud/TestZkChroot.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.cloud; - -import java.nio.file.Path; -import java.nio.file.Paths; - -import org.apache.solr.SolrJettyTestBase; -import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.ZkConfigManager; -import org.apache.solr.common.cloud.ZooKeeperException; -import org.apache.solr.core.CoreContainer; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class TestZkChroot extends SolrTestCaseJ4 { - protected CoreContainer cores = null; - private Path home; - - protected ZkTestServer zkServer; - protected Path zkDir; - - @Override - @Before - public void setUp() throws Exception { - super.setUp(); - - zkDir = createTempDir("zkData"); - zkServer = new ZkTestServer(zkDir); - zkServer.run(); - home = Paths.get(SolrJettyTestBase.legacyExampleCollection1SolrHome()); - - } - - @Override - @After - public void tearDown() throws Exception { - System.clearProperty("zkHost"); - - if (cores != null) { - cores.shutdown(); - cores = null; - } - - if (null != zkServer) { - zkServer.shutdown(); - zkServer = null; - } - zkDir = null; - - super.tearDown(); - } - - @Test - public void testChrootBootstrap() throws Exception { - String chroot = "/foo/bar"; - - System.setProperty("bootstrap_conf", "true"); - System.setProperty("zkHost", zkServer.getZkHost() + chroot); - SolrZkClient zkClient = null; - SolrZkClient zkClient2 = null; - - try { - cores = CoreContainer.createAndLoad(home); - zkClient = cores.getZkController().getZkClient(); - - assertTrue(zkClient.exists("/clusterstate.json", true)); - assertFalse(zkClient.exists(chroot + "/clusterstate.json", true)); - - zkClient2 = new SolrZkClient(zkServer.getZkHost(), - AbstractZkTestCase.TIMEOUT); - assertTrue(zkClient2.exists(chroot + "/clusterstate.json", true)); - assertFalse(zkClient2.exists("/clusterstate.json", true)); - } finally { - if (zkClient != null) zkClient.close(); - if (zkClient2 != null) zkClient2.close(); - } - } - - @Test - public void testNoBootstrapConf() throws Exception { - String chroot = "/foo/bar2"; - - System.setProperty("bootstrap_conf", "false"); - System.setProperty("zkHost", zkServer.getZkHost() + chroot); - - try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT)) { - expectThrows(ZooKeeperException.class, - "did not get a top level exception when more then 4 updates failed", - () -> { - assertFalse("Path '" + chroot + "' should not exist before the test", - zkClient.exists(chroot, true)); - cores = CoreContainer.createAndLoad(home); - }); - assertFalse("Path shouldn't have been created", - zkClient.exists(chroot, true));// check the path was not created - } - } - - @Test - public void testWithUploadDir() throws Exception { - String chroot = "/foo/bar3"; - String configName = "testWithUploadDir"; - - System.setProperty("bootstrap_conf", "false"); - System.setProperty("bootstrap_confdir", home + "/collection1/conf"); - System.setProperty("collection.configName", configName); - System.setProperty("zkHost", zkServer.getZkHost() + chroot); - - try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT)) { - assertFalse("Path '" + chroot + "' should not exist before the test", - zkClient.exists(chroot, true)); - cores = CoreContainer.createAndLoad(home); - assertTrue( - "solrconfig.xml should have been uploaded to zk to the correct config directory", - zkClient.exists(chroot + ZkConfigManager.CONFIGS_ZKNODE + "/" - + configName + "/solrconfig.xml", true)); - } - } - - @Test - public void testInitPathExists() throws Exception { - String chroot = "/foo/bar4"; - - System.setProperty("bootstrap_conf", "true"); - System.setProperty("zkHost", zkServer.getZkHost() + chroot); - - try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT)) { - zkClient.makePath("/foo/bar4", true); - assertTrue(zkClient.exists(chroot, true)); - assertFalse(zkClient.exists(chroot + "/clusterstate.json", true)); - - cores = CoreContainer.createAndLoad(home); - assertTrue(zkClient.exists(chroot + "/clusterstate.json", true)); - } - } -} diff --git a/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java index 31115175ff55..2d9da76c52e7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java @@ -40,6 +40,7 @@ import org.junit.Test; import java.io.IOException; +import java.nio.file.Path; import java.util.Collection; import java.util.Collections; import java.util.Random; @@ -64,6 +65,7 @@ protected String getSolrXml() { @Test public void test() throws Exception { + jettys.forEach(j -> j.getCoreContainer().getAllowPaths().add(Path.of("_ALL_"))); // Allow non-standard core instance path testCoreUnloadAndLeaders(); // long testUnloadLotsOfCores(); // long diff --git a/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java b/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java index a85f0f0c3768..b9db03d81740 100644 --- a/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java @@ -57,11 +57,13 @@ public static void afterClass() throws InterruptedException { @Override public void setUp() throws Exception { super.setUp(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } createTempDir(); zkDir = createTempDir().resolve("zookeeper/server1/data"); - log.info("ZooKeeper dataDir:" + zkDir); + log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new ZkTestServer(zkDir); zkServer.run(false); @@ -92,7 +94,9 @@ public void setUp() throws Exception { zkClient.makePath("/unprotectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); zkClient.close(); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } @Override diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java b/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java index c9c4b87ab6fe..b3a14dac97d1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java @@ -78,7 +78,9 @@ public static void afterClass() throws InterruptedException { @Override public void setUp() throws Exception { super.setUp(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } String exampleHome = SolrJettyTestBase.legacyExampleCollection1SolrHome(); @@ -86,7 +88,7 @@ public void setUp() throws Exception { solrHome = exampleHome; zkDir = tmpDir.resolve("zookeeper/server1/data"); - log.info("ZooKeeper dataDir:" + zkDir); + log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new ZkTestServer(zkDir); zkServer.run(); System.setProperty("zkHost", zkServer.getZkAddress()); @@ -98,7 +100,9 @@ public void setUp() throws Exception { this.zkClient = new SolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } @Test diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java index 4526ed4d0256..603c4143544d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java @@ -28,7 +28,6 @@ import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.ClusterProperties; -import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.cloud.ZkConfigManager; import org.apache.solr.common.cloud.ZkNodeProps; @@ -284,7 +283,7 @@ public List getCoreDescriptors() { ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), ZkStateReader.NODE_NAME_PROP, nodeName, ZkStateReader.NUM_SHARDS_PROP, "1", - "name", collectionName, DocCollection.STATE_FORMAT, "2"); + "name", collectionName); zkController.getOverseerJobQueue().offer(Utils.toJSON(m)); HashMap propMap = new HashMap<>(); diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkNodePropsTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkNodePropsTest.java index 88ce3c865d48..604c56b1aafa 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkNodePropsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkNodePropsTest.java @@ -45,7 +45,9 @@ public void testBasic() throws IOException { props.forEach((s, o) -> assertEquals(o, props2.get(s))); SimplePostTool.BAOS baos = new SimplePostTool.BAOS(); - new JavaBinCodec().marshal(zkProps.getProperties(), baos); + try (JavaBinCodec jbc = new JavaBinCodec()) { + jbc.marshal(zkProps.getProperties(), baos); + } bytes = baos.toByteArray(); System.out.println("BIN size : " + bytes.length); ZkNodeProps props3 = ZkNodeProps.load(bytes); diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java index 6a0916200634..e9afc3bdd9d0 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.cloud; +import java.io.IOException; import java.nio.file.Path; import java.util.HashSet; import java.util.Set; @@ -41,6 +42,7 @@ public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); } + @SuppressWarnings({"try"}) static class ZkConnection implements AutoCloseable { private ZkTestServer server = null; @@ -67,18 +69,20 @@ public SolrZkClient getClient () { } @Override - public void close() throws Exception { + public void close() throws IOException, InterruptedException { if (zkClient != null) zkClient.close(); if (server != null) server.shutdown(); } } + @SuppressWarnings({"try"}) public void testConnect() throws Exception { try (ZkConnection conn = new ZkConnection (false)) { // do nothing } } + @SuppressWarnings({"try"}) public void testMakeRootNode() throws Exception { try (ZkConnection conn = new ZkConnection ()) { final SolrZkClient zkClient = new SolrZkClient(conn.getServer().getZkHost(), AbstractZkTestCase.TIMEOUT); @@ -90,6 +94,7 @@ public void testMakeRootNode() throws Exception { } } + @SuppressWarnings({"try"}) public void testClean() throws Exception { try (ZkConnection conn = new ZkConnection ()) { final SolrZkClient zkClient = conn.getClient(); @@ -226,6 +231,7 @@ public void testZkCmdExectutor() throws Exception { } @Test + @SuppressWarnings({"try"}) public void testMultipleWatchesAsync() throws Exception { try (ZkConnection conn = new ZkConnection()) { final SolrZkClient zkClient = conn.getClient(); @@ -276,6 +282,7 @@ public void process(WatchedEvent event) { } } + @SuppressWarnings({"try"}) public void testWatchChildren() throws Exception { try (ZkConnection conn = new ZkConnection ()) { final SolrZkClient zkClient = conn.getClient(); @@ -323,7 +330,8 @@ public void process(WatchedEvent event) { } } - + + @SuppressWarnings({"try"}) public void testSkipPathPartsOnMakePath() throws Exception { try (ZkConnection conn = new ZkConnection()) { final SolrZkClient zkClient = conn.getClient(); diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/AbstractCloudBackupRestoreTestCase.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/AbstractCloudBackupRestoreTestCase.java index e4bb328facee..21a362b442f2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/AbstractCloudBackupRestoreTestCase.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/AbstractCloudBackupRestoreTestCase.java @@ -421,8 +421,6 @@ private void testBackupAndRestore(String collectionName, int backupReplFactor) t assertEquals(restoreCollectionName, backupCollection.getMaxShardsPerNode(), restoreCollection.getMaxShardsPerNode()); } - assertEquals("Restore collection should use stateFormat=2", 2, restoreCollection.getStateFormat()); - //SOLR-12605: Add more docs after restore is complete to see if they are getting added fine //explicitly querying the leaders. If we use CloudSolrClient there is no guarantee that we'll hit a nrtReplica { diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/AssignTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/AssignTest.java index 8c97c8de09da..962d04e0fd9d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/AssignTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/AssignTest.java @@ -194,6 +194,7 @@ public void testUseLegacyByDefault() throws Exception { // lets provide a custom preference and assert that autoscaling is used even if useLegacyReplicaAssignment=false // our custom preferences are exactly the same as the default ones // but because we are providing them explicitly, they must cause autoscaling to turn on + @SuppressWarnings({"rawtypes"}) List customPreferences = Policy.DEFAULT_PREFERENCES .stream().map(preference -> preference.getOriginal()).collect(Collectors.toList()); diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java index bec55d344bd6..c0214f8f0f7f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java @@ -40,7 +40,6 @@ import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrNamedThreadFactory; import org.junit.After; @@ -99,13 +98,6 @@ public void testSolrJAPICalls() throws Exception { @Test public void testAsyncRequests() throws Exception { - boolean legacy = random().nextBoolean(); - if (legacy) { - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "true").process(cluster.getSolrClient()); - } else { - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false").process(cluster.getSolrClient()); - } - final String collection = "testAsyncOperations"; final CloudSolrClient client = cluster.getSolrClient(); @@ -214,11 +206,9 @@ public void testAsyncRequests() throws Exception { .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("DeleteReplica did not complete", RequestStatusState.COMPLETED, state); - if (!legacy) { - state = CollectionAdminRequest.deleteCollection(collection) - .processAndWait(client, MAX_TIMEOUT_SECONDS); - assertSame("DeleteCollection did not complete", RequestStatusState.COMPLETED, state); - } + state = CollectionAdminRequest.deleteCollection(collection) + .processAndWait(client, MAX_TIMEOUT_SECONDS); + assertSame("DeleteCollection did not complete", RequestStatusState.COMPLETED, state); } public void testAsyncIdRaceCondition() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIDistributedZkTest.java index af3cd55d40f8..ad02ab1e0b48 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIDistributedZkTest.java @@ -100,6 +100,7 @@ public void setupCluster() throws Exception { System.setProperty("createCollectionWaitTimeTillActive", "5"); TestInjection.randomDelayInCoreCreation = "true:5"; System.setProperty("validateAfterInactivity", "200"); + System.setProperty("solr.allowPaths", "*"); configureCluster(4) .addConfig("conf", configset(getConfigSet())) @@ -114,6 +115,7 @@ public void tearDownCluster() throws Exception { shutdownCluster(); } finally { System.clearProperty("createCollectionWaitTimeTillActive"); + System.clearProperty("solr.allowPaths"); super.tearDown(); } } @@ -213,6 +215,7 @@ public void testMissingRequiredParameters() { params.set("action", CollectionAction.CREATE.toString()); params.set("numShards", 2); // missing required collection parameter + @SuppressWarnings({"rawtypes"}) final SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -223,6 +226,7 @@ public void testMissingRequiredParameters() { @Test public void testTooManyReplicas() { + @SuppressWarnings({"rawtypes"}) CollectionAdminRequest req = CollectionAdminRequest.createCollection("collection", "conf", 2, 10); expectThrows(Exception.class, () -> { @@ -239,6 +243,7 @@ public void testMissingNumShards() { params.set(REPLICATION_FACTOR, 10); params.set("collection.configName", "conf"); + @SuppressWarnings({"rawtypes"}) final SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -256,6 +261,7 @@ public void testZeroNumShards() { params.set("numShards", 0); params.set("collection.configName", "conf"); + @SuppressWarnings({"rawtypes"}) final SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); expectThrows(Exception.class, () -> { diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentCreateCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentCreateCollectionTest.java index 42fd19d10e99..d9fb8b19eaaf 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentCreateCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentCreateCollectionTest.java @@ -159,6 +159,7 @@ public void testConcurrentCreatePlacement() throws Exception { " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = CloudTestUtils.AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); client.request(req); } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java index f0e92fabd3f6..1dd3f8f1c951 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java @@ -103,11 +103,6 @@ public void test() throws Exception { waitForThingsToLevelOut(15, TimeUnit.SECONDS); - if (usually()) { - log.info("Using legacyCloud=false for cluster"); - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false") - .process(cloudClient); - } incompleteOrOverlappingCustomRangeTest(); splitByUniqueKeyTest(); splitByRouteFieldTest(); @@ -416,10 +411,6 @@ private void verifyShard(DocCollection coll, String shard, Slice.State expectedS public void testSplitWithChaosMonkey() throws Exception { waitForThingsToLevelOut(15, TimeUnit.SECONDS); - log.info("Using legacyCloud=false for cluster"); - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false") - .process(cloudClient); - List indexers = new ArrayList<>(); try { for (int i = 0; i < 1; i++) { @@ -645,12 +636,6 @@ public void testSplitShardWithRuleLink() throws Exception { private void doSplitShardWithRule(SolrIndexSplitter.SplitMethod splitMethod) throws Exception { waitForThingsToLevelOut(15, TimeUnit.SECONDS); - if (usually()) { - log.info("Using legacyCloud=false for cluster"); - CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false") - .process(cloudClient); - } - log.info("Starting testSplitShardWithRule"); String collectionName = "shardSplitWithRule_" + splitMethod.toLower(); CollectionAdminRequest.Create createRequest = CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 2) @@ -1061,6 +1046,7 @@ protected void splitShard(String collection, String shardId, List request = create.process(cloudClient).getResponse(); if (request.get("success") != null) { assertTrue(cloudClient.getZkStateReader().getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, false)); + @SuppressWarnings({"rawtypes"}) CollectionAdminRequest delete = CollectionAdminRequest.deleteCollection(collectionName); cloudClient.request(delete); @@ -92,8 +92,7 @@ public void test() throws Exception { // create collection again on a node other than the overseer leader create = CollectionAdminRequest.createCollection(collectionName,1,1) - .setCreateNodeSet(notOverseerNode) - .setStateFormat(2); + .setCreateNodeSet(notOverseerNode); request = create.process(cloudClient).getResponse(); assertTrue("Collection creation should not have failed", request.get("success") != null); } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java index 804728b5b8a1..b0a96913af22 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java @@ -25,7 +25,6 @@ import java.util.concurrent.atomic.AtomicLong; import com.google.common.collect.Lists; -import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.BaseHttpSolrClient; @@ -34,7 +33,6 @@ import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.request.V2Request; import org.apache.solr.client.solrj.response.CollectionAdminResponse; -import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.cloud.ZkTestServer; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; @@ -94,7 +92,6 @@ public void test() throws Exception { clusterStatusBadCollectionTest(); replicaPropTest(); clusterStatusZNodeVersion(); - testClusterStateMigration(); testCollectionCreationCollectionNameValidation(); testCollectionCreationTooManyShards(); testReplicationFactorValidaton(); @@ -114,6 +111,7 @@ private void testCollectionCreationTooManyShards() throws Exception { params.set("numShards", "10"); params.set("maxShardsPerNode", 1); params.set("shards", "b0,b1,b2,b3,b4,b5,b6,b7,b8,b9"); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -134,12 +132,14 @@ private void assertMissingCollection(CloudSolrClient client, String collectionNa assertNull(clusterState.getCollectionOrNull(collectionName)); } + @SuppressWarnings({"unchecked", "rawtypes"}) private void testModifyCollection() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.MODIFYCOLLECTION.toString()); params.set("collection", COLLECTION_NAME); params.set("replicationFactor", 25); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -201,6 +201,7 @@ private void testReplicationFactorValidaton() throws Exception { params.set("numShards", "1"); params.set("replicationFactor", "1"); params.set("nrtReplicas", "2"); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -248,6 +249,7 @@ private void testNoConfigset() throws Exception { params.set("numShards", "1"); params.set("replicationFactor", "1"); params.set("collection.configName", configSet); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -263,8 +265,10 @@ private void testNoConfigset() throws Exception { final CollectionAdminRequest.ClusterStatus req = CollectionAdminRequest.getClusterStatus(); NamedList rsp = client.request(req); + @SuppressWarnings({"unchecked"}) NamedList cluster = (NamedList) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); + @SuppressWarnings({"unchecked"}) NamedList collections = (NamedList) cluster.get("collections"); assertNotNull("Collections should not be null in cluster state", collections); assertNotNull("Testing to insure collections are returned", collections.get(COLLECTION_NAME1)); @@ -287,11 +291,14 @@ private void assertCountsForRepFactorAndNrtReplicas(CloudSolrClient client, Stri request.setPath("/admin/collections"); NamedList rsp = client.request(request); + @SuppressWarnings({"unchecked"}) NamedList cluster = (NamedList) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); + @SuppressWarnings({"unchecked"}) NamedList collections = (NamedList) cluster.get("collections"); assertNotNull("Collections should not be null in cluster state", collections); assertEquals(1, collections.size()); + @SuppressWarnings({"unchecked"}) Map collection = (Map) collections.get(collectionName); assertNotNull(collection); assertEquals(collection.get("replicationFactor"), collection.get("nrtReplicas")); @@ -304,19 +311,25 @@ private void clusterStatusWithCollectionAndShard() throws IOException, SolrServe params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString()); params.set("collection", COLLECTION_NAME); params.set("shard", SHARD1); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); NamedList rsp = client.request(request); + @SuppressWarnings({"unchecked"}) NamedList cluster = (NamedList) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); + @SuppressWarnings({"unchecked"}) NamedList collections = (NamedList) cluster.get("collections"); assertNotNull("Collections should not be null in cluster state", collections); assertNotNull(collections.get(COLLECTION_NAME)); assertEquals(1, collections.size()); + @SuppressWarnings({"unchecked"}) Map collection = (Map) collections.get(COLLECTION_NAME); + @SuppressWarnings({"unchecked"}) Map shardStatus = (Map) collection.get("shards"); assertEquals(1, shardStatus.size()); + @SuppressWarnings({"unchecked"}) Map selectedShardStatus = (Map) shardStatus.get(SHARD1); assertNotNull(selectedShardStatus); @@ -330,17 +343,23 @@ private void clusterStatusWithCollectionAndMultipleShards() throws IOException, request.setShardName(SHARD1 + "," + SHARD2); NamedList rsp = request.process(client).getResponse(); + @SuppressWarnings({"unchecked"}) NamedList cluster = (NamedList) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); + @SuppressWarnings({"unchecked"}) NamedList collections = (NamedList) cluster.get("collections"); assertNotNull("Collections should not be null in cluster state", collections); assertNotNull(collections.get(COLLECTION_NAME)); assertEquals(1, collections.size()); + @SuppressWarnings({"unchecked"}) Map collection = (Map) collections.get(COLLECTION_NAME); + @SuppressWarnings({"unchecked"}) Map shardStatus = (Map) collection.get("shards"); assertEquals(2, shardStatus.size()); + @SuppressWarnings({"unchecked"}) Map firstSelectedShardStatus = (Map) shardStatus.get(SHARD1); assertNotNull(firstSelectedShardStatus); + @SuppressWarnings({"unchecked"}) Map secondSelectedShardStatus = (Map) shardStatus.get(SHARD2); assertNotNull(secondSelectedShardStatus); } @@ -351,10 +370,12 @@ private void listCollection() throws IOException, SolrServerException { try (CloudSolrClient client = createCloudClient(null)) { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.LIST.toString()); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); NamedList rsp = client.request(request); + @SuppressWarnings({"unchecked"}) List collections = (List) rsp.get("collections"); assertTrue("control_collection was not found in list", collections.contains("control_collection")); assertTrue(DEFAULT_COLLECTION + " was not found in list", collections.contains(DEFAULT_COLLECTION)); @@ -369,17 +390,21 @@ private void clusterStatusNoCollection() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString()); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); NamedList rsp = client.request(request); + @SuppressWarnings({"unchecked"}) NamedList cluster = (NamedList) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); + @SuppressWarnings({"unchecked"}) NamedList collections = (NamedList) cluster.get("collections"); assertNotNull("Collections should not be null in cluster state", collections); assertNotNull(collections.get(COLLECTION_NAME1)); assertEquals(4, collections.size()); + @SuppressWarnings({"unchecked"}) List liveNodes = (List) cluster.get("live_nodes"); assertNotNull("Live nodes should not be null", liveNodes); assertFalse(liveNodes.isEmpty()); @@ -392,15 +417,19 @@ private void clusterStatusWithCollection() throws IOException, SolrServerExcepti ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString()); params.set("collection", COLLECTION_NAME); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); NamedList rsp = client.request(request); + @SuppressWarnings({"unchecked"}) NamedList cluster = (NamedList) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); + @SuppressWarnings({"unchecked"}) NamedList collections = (NamedList) cluster.get("collections"); assertNotNull("Collections should not be null in cluster state", collections); assertEquals(1, collections.size()); + @SuppressWarnings({"unchecked"}) Map collection = (Map) collections.get(COLLECTION_NAME); assertNotNull(collection); assertEquals("conf1", collection.get("configName")); @@ -408,6 +437,7 @@ private void clusterStatusWithCollection() throws IOException, SolrServerExcepti } } + @SuppressWarnings({"unchecked"}) private void clusterStatusZNodeVersion() throws Exception { String cname = "clusterStatusZNodeVersion"; try (CloudSolrClient client = createCloudClient(null)) { @@ -418,6 +448,7 @@ private void clusterStatusZNodeVersion() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString()); params.set("collection", cname); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -451,6 +482,7 @@ private void clusterStatusZNodeVersion() throws Exception { private static long totalexpectedV2Calls; + @SuppressWarnings({"rawtypes"}) public static SolrRequest setV2(SolrRequest req) { if (V2Request.v2Calls.get() == null) V2Request.v2Calls.set(new AtomicLong()); totalexpectedV2Calls = V2Request.v2Calls.get().get(); @@ -477,25 +509,32 @@ private void clusterStatusWithRouteKey() throws IOException, SolrServerException params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString()); params.set("collection", DEFAULT_COLLECTION); params.set(ShardParams._ROUTE_, "a!"); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); NamedList rsp = client.request(request); + @SuppressWarnings({"unchecked"}) NamedList cluster = (NamedList) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); + @SuppressWarnings({"unchecked"}) NamedList collections = (NamedList) cluster.get("collections"); assertNotNull("Collections should not be null in cluster state", collections); assertNotNull(collections.get(DEFAULT_COLLECTION)); assertEquals(1, collections.size()); + @SuppressWarnings({"unchecked"}) Map collection = (Map) collections.get(DEFAULT_COLLECTION); assertEquals("conf1", collection.get("configName")); + @SuppressWarnings({"unchecked"}) Map shardStatus = (Map) collection.get("shards"); assertEquals(1, shardStatus.size()); + @SuppressWarnings({"unchecked"}) Map selectedShardStatus = (Map) shardStatus.get(SHARD2); assertNotNull(selectedShardStatus); } } + @SuppressWarnings({"unchecked"}) private void clusterStatusAliasTest() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { // create an alias named myalias @@ -503,6 +542,7 @@ private void clusterStatusAliasTest() throws Exception { params.set("action", CollectionParams.CollectionAction.CREATEALIAS.toString()); params.set("name", "myalias"); params.set("collections", DEFAULT_COLLECTION + "," + COLLECTION_NAME); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -575,6 +615,7 @@ private void clusterStatusRolesTest() throws Exception { params.set("action", CollectionParams.CollectionAction.ADDROLE.toString()); params.set("node", replica.getNodeName()); params.set("role", "overseer"); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); client.request(request); @@ -586,10 +627,13 @@ private void clusterStatusRolesTest() throws Exception { request.setPath("/admin/collections"); NamedList rsp = client.request(request); + @SuppressWarnings({"unchecked"}) NamedList cluster = (NamedList) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); + @SuppressWarnings({"unchecked"}) Map roles = (Map) cluster.get("roles"); assertNotNull("Role information should not be null", roles); + @SuppressWarnings({"unchecked"}) List overseer = (List) roles.get("overseer"); assertNotNull(overseer); assertEquals(1, overseer.size()); @@ -602,6 +646,7 @@ private void clusterStatusBadCollectionTest() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString()); params.set("collection", "bad_collection_name"); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -653,6 +698,7 @@ private void replicaPropTest() throws Exception { missingParamsError(client, params); params.set("property.value", "true"); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); client.request(request); @@ -895,39 +941,12 @@ private void replicaPropTest() throws Exception { } } - private void testClusterStateMigration() throws Exception { - try (CloudSolrClient client = createCloudClient(null)) { - client.connect(); - - CollectionAdminRequest.createCollection("testClusterStateMigration","conf1",1,1).setStateFormat(1).process(client); - - waitForRecoveriesToFinish("testClusterStateMigration", true); - - assertEquals(1, client.getZkStateReader().getClusterState().getCollection("testClusterStateMigration").getStateFormat()); - - for (int i = 0; i < 10; i++) { - SolrInputDocument doc = new SolrInputDocument(); - doc.addField("id", "id_" + i); - client.add("testClusterStateMigration", doc); - } - client.commit("testClusterStateMigration"); - - CollectionAdminRequest.migrateCollectionFormat("testClusterStateMigration").process(client); - - client.getZkStateReader().forceUpdateCollection("testClusterStateMigration"); - - assertEquals(2, client.getZkStateReader().getClusterState().getCollection("testClusterStateMigration").getStateFormat()); - - QueryResponse response = client.query("testClusterStateMigration", new SolrQuery("*:*")); - assertEquals(10, response.getResults().getNumFound()); - } - } - private void testCollectionCreationCollectionNameValidation() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.CREATE.toString()); params.set("name", "invalid@name#with$weird%characters"); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -951,6 +970,7 @@ private void testCollectionCreationShardNameValidation() throws Exception { params.set("router.name", "implicit"); params.set("numShards", "1"); params.set("shards", "invalid@name#with$weird%characters"); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -972,6 +992,7 @@ private void testAliasCreationNameValidation() throws Exception{ params.set("action", CollectionParams.CollectionAction.CREATEALIAS.toString()); params.set("name", "invalid@name#with$weird%characters"); params.set("collections", COLLECTION_NAME); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -996,6 +1017,7 @@ private void testShardCreationNameValidation() throws Exception { params.set("name", "valid_collection_name"); params.set("shards", "a"); params.set("router.name", "implicit"); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); client.request(request); @@ -1040,6 +1062,7 @@ private Map getProps(CloudSolrClient client, String collectionNa private void missingParamsError(CloudSolrClient client, ModifiableSolrParams origParams) throws IOException, SolrServerException { + @SuppressWarnings({"rawtypes"}) SolrRequest request; try { request = new QueryRequest(origParams); diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestReplicaProperties.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestReplicaProperties.java index d327aec470d3..f80edabd74c9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestReplicaProperties.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestReplicaProperties.java @@ -73,10 +73,12 @@ private void listCollection() throws IOException, SolrServerException { try (CloudSolrClient client = createCloudClient(null)) { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.LIST.toString()); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); NamedList rsp = client.request(request); + @SuppressWarnings({"unchecked"}) List collections = (List) rsp.get("collections"); assertTrue("control_collection was not found in list", collections.contains("control_collection")); assertTrue(DEFAULT_COLLECTION + " was not found in list", collections.contains(DEFAULT_COLLECTION)); diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestRequestStatusCollectionAPI.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestRequestStatusCollectionAPI.java index a0fa70c2b375..58c4686443f4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestRequestStatusCollectionAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestRequestStatusCollectionAPI.java @@ -60,7 +60,9 @@ public void test() throws Exception { // Check for the request to be completed. + @SuppressWarnings({"rawtypes"}) NamedList r = null; + @SuppressWarnings({"rawtypes"}) NamedList status = null; String message = null; diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java index 15c8d37ad8c5..7a595d5e855d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java @@ -382,19 +382,23 @@ public SolrParams getParams() { } private void disableAutoAddReplicasInCluster() throws SolrServerException, IOException { + @SuppressWarnings({"rawtypes"}) Map m = makeMap( "action", CollectionParams.CollectionAction.CLUSTERPROP.toLower(), "name", ZkStateReader.AUTO_ADD_REPLICAS, "val", "false"); + @SuppressWarnings({"unchecked"}) QueryRequest request = new QueryRequest(new MapSolrParams(m)); request.setPath("/admin/collections"); cluster.getSolrClient().request(request); } private void enableAutoAddReplicasInCluster() throws SolrServerException, IOException { + @SuppressWarnings({"rawtypes"}) Map m = makeMap( "action", CollectionParams.CollectionAction.CLUSTERPROP.toLower(), "name", ZkStateReader.AUTO_ADD_REPLICAS); + @SuppressWarnings({"unchecked"}) QueryRequest request = new QueryRequest(new MapSolrParams(m)); request.setPath("/admin/collections"); cluster.getSolrClient().request(request); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java index b6e6d2079086..8fca98da48da 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java @@ -84,6 +84,7 @@ public void testSimple() throws Exception { String collection1 = "testSimple1"; String collection2 = "testSimple2"; + String collection3 = "testSimple3"; CollectionAdminRequest.createCollection(collection1, "conf", 2, 2) .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName()) .setAutoAddReplicas(true) @@ -94,8 +95,8 @@ public void testSimple() throws Exception { .setAutoAddReplicas(false) .setMaxShardsPerNode(1) .process(cluster.getSolrClient()); - // the number of cores in jetty1 (5) will be larger than jetty3 (1) - CollectionAdminRequest.createCollection("testSimple3", "conf", 3, 1) + // the number of cores in jetty1 (6) will be larger than jetty3 (1) + CollectionAdminRequest.createCollection(collection3, "conf", 3, 1) .setCreateNodeSet(jetty1.getNodeName()) .setAutoAddReplicas(false) .setMaxShardsPerNode(3) @@ -103,7 +104,7 @@ public void testSimple() throws Exception { cluster.waitForActiveCollection(collection1, 2, 4); cluster.waitForActiveCollection(collection2, 1, 2); - cluster.waitForActiveCollection("testSimple3", 3, 3); + cluster.waitForActiveCollection(collection3, 3, 3); // we remove the implicit created trigger, so the replicas won't be moved String removeTriggerCommand = "{" + @@ -112,7 +113,9 @@ public void testSimple() throws Exception { "'removeListeners': true" + "}" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, removeTriggerCommand); + @SuppressWarnings({"rawtypes"}) NamedList response = cluster.getSolrClient().request(req); assertEquals(response.get("result").toString(), "success"); @@ -131,6 +134,7 @@ public void testSimple() throws Exception { reader.waitForLiveNodes(30, TimeUnit.SECONDS, missingLiveNode(lostNodeName)); + @SuppressWarnings({"rawtypes"}) List operations = getOperations(jetty3, lostNodeName); assertOperations(collection1, operations, lostNodeName, cloudDescriptors, null); @@ -139,7 +143,7 @@ public void testSimple() throws Exception { cluster.waitForActiveCollection(collection1, 2, 4); cluster.waitForActiveCollection(collection2, 1, 2); - cluster.waitForActiveCollection("testSimple3", 3, 3); + cluster.waitForActiveCollection(collection3, 3, 3); assertTrue("Timeout waiting for all live and active", ClusterStateUtil.waitForAllActiveAndLiveReplicas(cluster.getSolrClient().getZkStateReader(), 30000)); @@ -184,7 +188,7 @@ public void testSimple() throws Exception { cluster.waitForActiveCollection(collection1, 2, 4); cluster.waitForActiveCollection(collection2, 1, 2); - cluster.waitForActiveCollection("testSimple3", 3, 3); + cluster.waitForActiveCollection(collection3, 3, 3); assertTrue("Timeout waiting for all live and active", ClusterStateUtil.waitForAllActiveAndLiveReplicas(cluster.getSolrClient().getZkStateReader(), 30000)); @@ -209,21 +213,25 @@ public SolrParams getParams() { } @SuppressForbidden(reason = "Needs currentTimeMillis to create unique id") + @SuppressWarnings({"rawtypes"}) private List getOperations(JettySolrRunner actionJetty, String lostNodeName) throws Exception { try (AutoAddReplicasPlanAction action = new AutoAddReplicasPlanAction()) { + action.configure(actionJetty.getCoreContainer().getResourceLoader(), actionJetty.getCoreContainer().getZkController().getSolrCloudManager(), new HashMap<>()); TriggerEvent lostNode = new NodeLostTrigger.NodeLostEvent(TriggerEventType.NODELOST, ".auto_add_replicas", Collections.singletonList(System.currentTimeMillis()), Collections.singletonList(lostNodeName), CollectionParams.CollectionAction.MOVEREPLICA.toLower()); ActionContext context = new ActionContext(actionJetty.getCoreContainer().getZkController().getSolrCloudManager(), null, new HashMap<>()); action.process(lostNode, context); + @SuppressWarnings({"unchecked", "rawtypes"}) List operations = (List) context.getProperty("operations"); return operations; } } - private void assertOperations(String collection, List operations, String lostNodeName, + private void assertOperations(String collection, + @SuppressWarnings({"rawtypes"})List operations, String lostNodeName, List cloudDescriptors, JettySolrRunner destJetty) { assertEquals("Replicas of " + collection + " is not fully moved, operations="+operations, cloudDescriptors.stream().filter(cd -> cd.getCollectionName().equals(collection)).count(), operations.size()); - for (SolrRequest solrRequest : operations) { + for (@SuppressWarnings({"rawtypes"})SolrRequest solrRequest : operations) { assertTrue(solrRequest instanceof CollectionAdminRequest.MoveReplica); SolrParams params = solrRequest.getParams(); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java index fb65c5896978..2a8c9c116730 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java @@ -77,10 +77,13 @@ private static void testAutoAddReplicas() throws Exception { while (!timeOut.hasTimedOut()) { byte[] data = zkClient().getData(SOLR_AUTOSCALING_CONF_PATH, null, null, true); ZkNodeProps loaded = ZkNodeProps.load(data); + @SuppressWarnings({"rawtypes"}) Map triggers = (Map) loaded.get("triggers"); if (triggers != null && triggers.containsKey(".auto_add_replicas")) { + @SuppressWarnings({"unchecked"}) Map autoAddReplicasTrigger = (Map) triggers.get(".auto_add_replicas"); assertNotNull(autoAddReplicasTrigger); + @SuppressWarnings({"unchecked"}) List> actions = (List>) autoAddReplicasTrigger.get("actions"); assertNotNull(actions); assertEquals(2, actions.size()); @@ -116,6 +119,7 @@ public void testSuggestionsWithPayload() throws Exception { " 'cluster-policy': [{'replica': 0, 'node': '_NODE'}]\n" + "}"; configPayload = configPayload.replaceAll("_NODE", aReplica.getNodeName()); + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, "/suggestions", configPayload); NamedList response = solrClient.request(req); assertFalse(((Collection) response.get("suggestions")).isEmpty()); @@ -149,6 +153,7 @@ public void testDiagnosticsWithPayload() throws Exception { " 'cluster-policy': [{'replica': 0, 'node': '_NODE'}]\n" + "}"; configPayload = configPayload.replaceAll("_NODE", aReplica.getNodeName()); + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, "/diagnostics", configPayload); NamedList response = solrClient.request(req); assertEquals(response._getStr("diagnostics/violations[0]/node",null),response._getStr("diagnostics/violations[0]/node",null)); @@ -157,6 +162,7 @@ public void testDiagnosticsWithPayload() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testSuspendTrigger() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String suspendEachCommand = "{\n" + @@ -170,6 +176,7 @@ public void testSuspendTrigger() throws Exception { "\t}\n" + "}"; // these should be no-ops because there are no triggers, and it should succeed + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, suspendEachCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -319,6 +326,7 @@ public void testSuspendTrigger() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void test() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String setTriggerCommand = "{" + @@ -332,6 +340,7 @@ public void test() throws Exception { "'name' : 'compute_plan'," + "'class' : 'solr.ComputePlanAction'" + "}]}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); @@ -474,6 +483,7 @@ public void testErrorHandling() throws Exception { " ]" + "}"; try { + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); solrClient.request(req); fail("expect exception"); @@ -501,6 +511,7 @@ public void testValidation() throws Exception { "'name' : 'compute_plan'," + "'class' : 'solr.ComputePlanAction'" + "}]}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); try { @@ -601,6 +612,7 @@ public void testValidation() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testPolicyAndPreferences() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); // add multiple policies @@ -614,6 +626,7 @@ public void testPolicyAndPreferences() throws Exception { " {'replica':'<2', 'shard': '#EACH', 'node': '#ANY'}" + " ]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setPolicyCommand); NamedList response = null; try { @@ -658,6 +671,7 @@ public void testPolicyAndPreferences() throws Exception { data = zkClient().getData(SOLR_AUTOSCALING_CONF_PATH, null, null, true); loaded = ZkNodeProps.load(data); policies = (Map) loaded.get("policies"); + @SuppressWarnings({"rawtypes"}) List conditions = (List) policies.get("xyz"); assertEquals(1, conditions.size()); @@ -683,6 +697,7 @@ public void testPolicyAndPreferences() throws Exception { assertEquals(response.get("result").toString(), "success"); data = zkClient().getData(SOLR_AUTOSCALING_CONF_PATH, null, null, true); loaded = ZkNodeProps.load(data); + @SuppressWarnings({"rawtypes"}) List preferences = (List) loaded.get("cluster-preferences"); assertEquals(3, preferences.size()); @@ -711,6 +726,7 @@ public void testPolicyAndPreferences() throws Exception { assertEquals(response.get("result").toString(), "success"); data = zkClient().getData(SOLR_AUTOSCALING_CONF_PATH, null, null, true); loaded = ZkNodeProps.load(data); + @SuppressWarnings({"rawtypes"}) List clusterPolicy = (List) loaded.get("cluster-policy"); assertNotNull(clusterPolicy); assertEquals(3, clusterPolicy.size()); @@ -734,6 +750,7 @@ public void testPolicyAndPreferences() throws Exception { @Test // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 17-Aug-2018 + @SuppressWarnings({"unchecked", "rawtypes"}) public void testReadApi() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); // first trigger @@ -927,6 +944,7 @@ public void testConcurrentUpdates() throws Exception { "'waitFor' : '0s'," + "'enabled' : true" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = null; try { @@ -945,13 +963,16 @@ public void testConcurrentUpdates() throws Exception { t2.start(); boolean await = updateLatch.await(60, TimeUnit.SECONDS); assertTrue("not all updates executed in time, remaining=" + updateLatch.getCount(), await); + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.GET, null); NamedList response = solrClient.request(req); + @SuppressWarnings({"rawtypes"}) Map triggers = (Map) response.get("triggers"); assertNotNull(triggers); assertEquals(1, countNotImplicitTriggers(triggers)); assertTrue(triggers.containsKey("node_added_trigger1")); + @SuppressWarnings({"rawtypes"}) Map node_added_trigger1 = (Map) triggers.get("node_added_trigger1"); assertEquals(4, node_added_trigger1.size()); assertEquals(0L, node_added_trigger1.get("waitFor")); @@ -960,7 +981,7 @@ public void testConcurrentUpdates() throws Exception { } - private int countNotImplicitTriggers(Map triggers) { + private int countNotImplicitTriggers(@SuppressWarnings({"rawtypes"})Map triggers) { if (triggers == null) return 0; int count = 0; for (Object trigger : triggers.keySet()) { @@ -1003,8 +1024,10 @@ public void testSetProperties() throws Exception { "\t}\n" + "}"; solrClient.request(AutoScalingRequest.create(SolrRequest.METHOD.POST, setPropertiesCommand)); + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.GET, null); NamedList response = solrClient.request(req); + @SuppressWarnings({"rawtypes"}) Map properties = (Map) response.get("properties"); assertNotNull(properties); assertEquals(1, properties.size()); @@ -1077,6 +1100,7 @@ public void testUpdatePolicy() throws IOException, SolrServerException { String setPropertiesCommand = "{'set-cluster-policy': [" + "{'cores': '<4','node': '#ANY'}]}"; solrClient.request(AutoScalingRequest.create(SolrRequest.METHOD.POST, setPropertiesCommand)); + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.GET, null); NamedList response = solrClient.request(req); assertEquals("<4", response._get("cluster-policy[0]/cores", null)); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java index 1f5c8e37bc5c..eab52d5f30cb 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java @@ -17,19 +17,6 @@ package org.apache.solr.cloud.autoscaling; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; - import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.cloud.NodeStateProvider; @@ -55,14 +42,18 @@ import org.apache.solr.common.util.Utils; import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.util.LogLevel; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.*; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + import static java.nio.charset.StandardCharsets.UTF_8; import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA; @@ -76,6 +67,7 @@ public class ComputePlanActionTest extends SolrCloudTestCase { private static final AtomicBoolean fired = new AtomicBoolean(false); private static final int NODE_COUNT = 1; private static CountDownLatch triggerFiredLatch = new CountDownLatch(1); + @SuppressWarnings({"rawtypes"}) private static final AtomicReference actionContextPropsRef = new AtomicReference<>(); private static final AtomicReference eventRef = new AtomicReference<>(); private static SolrCloudManager cloudManager; @@ -122,6 +114,7 @@ public void setUp() throws Exception { " {'nodeRole':'overseer', 'replica':0}" + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -198,6 +191,7 @@ public void testNodeLost() throws Exception { "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name':'test','class':'" + ComputePlanActionTest.AssertingTriggerAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -234,11 +228,14 @@ public void testNodeLost() throws Exception { assertTrue("Trigger was not fired even after 10 seconds", triggerFiredLatch.await(10, TimeUnit.SECONDS)); assertTrue(fired.get()); + @SuppressWarnings({"rawtypes"}) Map context = actionContextPropsRef.get(); assertNotNull(context); + @SuppressWarnings({"unchecked", "rawtypes"}) List operations = (List) context.get("operations"); assertNotNull("The operations computed by ComputePlanAction should not be null , "+ getNodeStateProviderState() + eventRef.get(), operations); assertEquals("ComputePlanAction should have computed exactly 1 operation", 1, operations.size()); + @SuppressWarnings({"rawtypes"}) SolrRequest solrRequest = operations.get(0); SolrParams params = solrRequest.getParams(); assertEquals("Expected MOVEREPLICA action after adding node", MOVEREPLICA, CollectionParams.CollectionAction.get(params.get("action"))); @@ -283,6 +280,7 @@ public void testNodeWithMultipleReplicasLost() throws Exception { "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name':'test','class':'" + ComputePlanActionTest.AssertingTriggerAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -324,8 +322,10 @@ public void testNodeWithMultipleReplicasLost() throws Exception { assertEquals(TriggerEventType.NODELOST, triggerEvent.getEventType()); // TODO assertEquals(stoppedNodeName, triggerEvent.getProperty(TriggerEvent.NODE_NAME)); + @SuppressWarnings({"rawtypes"}) Map context = actionContextPropsRef.get(); assertNotNull(context); + @SuppressWarnings({"unchecked", "rawtypes"}) List operations = (List) context.get("operations"); assertNotNull("The operations computed by ComputePlanAction should not be null "+ getNodeStateProviderState() + actionContextPropsRef.get(), operations); if (log.isInfoEnabled()) { @@ -333,7 +333,7 @@ public void testNodeWithMultipleReplicasLost() throws Exception { } assertEquals("ComputePlanAction should have computed exactly 2 operation", 2, operations.size()); - for (SolrRequest solrRequest : operations) { + for (@SuppressWarnings({"rawtypes"})SolrRequest solrRequest : operations) { SolrParams params = solrRequest.getParams(); assertEquals("Expected MOVEREPLICA action after adding node", MOVEREPLICA, CollectionParams.CollectionAction.get(params.get("action"))); String moved = params.get("replica"); @@ -354,6 +354,7 @@ public void testNodeAdded() throws Exception { "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name':'test','class':'" + ComputePlanActionTest.AssertingTriggerAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -393,11 +394,14 @@ public void testNodeAdded() throws Exception { JettySolrRunner runner = cluster.startJettySolrRunner(); assertTrue("Trigger was not fired even after 5 seconds", triggerFiredLatch.await(5, TimeUnit.SECONDS)); assertTrue(fired.get()); + @SuppressWarnings({"rawtypes"}) Map context = actionContextPropsRef.get(); assertNotNull(context); + @SuppressWarnings({"unchecked", "rawtypes"}) List operations = (List) context.get("operations"); assertNotNull("The operations computed by ComputePlanAction should not be null" + getNodeStateProviderState() + context, operations); assertEquals("ComputePlanAction should have computed exactly 1 operation", 1, operations.size()); + @SuppressWarnings({"rawtypes"}) SolrRequest request = operations.get(0); SolrParams params = request.getParams(); assertEquals("Expected MOVEREPLICA action after adding node", MOVEREPLICA, CollectionParams.CollectionAction.get(params.get("action"))); @@ -426,6 +430,7 @@ public String getName() { @Override public void process(TriggerEvent event, ActionContext context) { if (expectedNode != null) { + @SuppressWarnings({"rawtypes"}) Collection nodes = (Collection) event.getProperty(TriggerEvent.NODE_NAMES); if (nodes == null || !nodes.contains(expectedNode)) return;//this is not the event we are looking for } @@ -444,7 +449,34 @@ public void close() throws IOException { @Test //2018-06-18 (commented) @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 09-Apr-2018 - public void testSelectedCollections() throws Exception { + public void testSelectedCollectionsByName() throws Exception { + String collectionsFilter = "'testSelected1,testSelected2'"; + testCollectionsPredicate(collectionsFilter, Collections.emptyMap()); + } + + @Test + //2018-06-18 (commented) @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 09-Apr-2018 + public void testSelectedCollectionsByPolicy() throws Exception { + CloudSolrClient solrClient = cluster.getSolrClient(); + String setSearchPolicyCommand = "{" + + " 'set-policy': {" + + " 'search': [" + + " {'replica':'<5', 'shard': '#EACH', 'node': '#ANY'}," + + " ]" + + "}}"; + @SuppressWarnings({"rawtypes"}) + SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setSearchPolicyCommand); + NamedList response = solrClient.request(req); + assertEquals(response.get("result").toString(), "success"); + + String collectionsFilter = "{'policy': 'search'}"; + Map createCollectionParameters = new HashMap<>(); + createCollectionParameters.put("testSelected1", "search"); + createCollectionParameters.put("testSelected2", "search"); + testCollectionsPredicate(collectionsFilter, createCollectionParameters); + } + + private void testCollectionsPredicate(String collectionsFilter, Map createCollectionParameters) throws Exception { if (log.isInfoEnabled()) { log.info("Found number of jetties: {}", cluster.getJettySolrRunners().size()); } @@ -457,28 +489,38 @@ public void testSelectedCollections() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String setTriggerCommand = "{" + - "'set-trigger' : {" + - "'name' : 'node_lost_trigger'," + - "'event' : 'nodeLost'," + - "'waitFor' : '1s'," + - "'enabled' : true," + - "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction', 'collections' : 'testSelected1,testSelected2'}," + - "{'name':'test','class':'" + ComputePlanActionTest.AssertingTriggerAction.class.getName() + "'}]" + - "}}"; + "'set-trigger' : {" + + "'name' : 'node_lost_trigger'," + + "'event' : 'nodeLost'," + + "'waitFor' : '1s'," + + "'enabled' : true," + + "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction', 'collections' : " + collectionsFilter + "}," + + "{'name':'test','class':'" + ComputePlanActionTest.AssertingTriggerAction.class.getName() + "'}]" + + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection("testSelected1", "conf", 2, 2); + if (createCollectionParameters.get("testSelected1") != null) { + create.setPolicy(createCollectionParameters.get("testSelected1")); + } create.process(solrClient); create = CollectionAdminRequest.createCollection("testSelected2", "conf", 2, 2); + if (createCollectionParameters.get("testSelected2") != null) { + create.setPolicy(createCollectionParameters.get("testSelected2")); + } create.process(solrClient); create = CollectionAdminRequest.createCollection("testSelected3", "conf", 2, 2); + if (createCollectionParameters.get("testSelected3") != null) { + create.setPolicy(createCollectionParameters.get("testSelected3")); + } create.process(solrClient); cluster.waitForActiveCollection("testSelected1", 2, 4); @@ -516,11 +558,14 @@ public void testSelectedCollections() throws Exception { } assertTrue("Trigger was not fired even after 5 seconds", triggerFiredLatch.await(5, TimeUnit.SECONDS)); assertTrue(fired.get()); + @SuppressWarnings({"rawtypes"}) Map context = actionContextPropsRef.get(); assertNotNull(context); + @SuppressWarnings({"unchecked", "rawtypes"}) List operations = (List) context.get("operations"); assertNotNull("The operations computed by ComputePlanAction should not be null. " + getNodeStateProviderState() + context, operations); assertEquals("ComputePlanAction should have computed exactly 2 operations", 2, operations.size()); + @SuppressWarnings({"rawtypes"}) SolrRequest request = operations.get(0); SolrParams params = request.getParams(); assertEquals("Expected MOVEREPLICA action after adding node", MOVEREPLICA, CollectionParams.CollectionAction.get(params.get("action"))); @@ -610,6 +655,7 @@ private void nodeAddedTriggerWithAddReplicaPreferredOp(String collectionNamePref } private void nodeAddedTriggerWithAddReplicaPreferredOp(String collectionNamePrefix, int numShards, int numCollections, String setTriggerCommand, String setClusterPolicyCommand, Integer nNrtReplicas, Integer nTlogReplicas, Integer nPullReplicas) throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -631,7 +677,9 @@ private void nodeAddedTriggerWithAddReplicaPreferredOp(String collectionNamePref cluster.waitForAllNodes(30); assertTrue(triggerFiredLatch.await(30, TimeUnit.SECONDS)); assertTrue(fired.get()); + @SuppressWarnings({"rawtypes"}) Map actionContext = actionContextPropsRef.get(); + @SuppressWarnings({"rawtypes"}) List operations = (List) actionContext.get("operations"); assertNotNull(operations); assertEquals(numShards, operations.size()); @@ -698,6 +746,7 @@ public void testNodeLostTriggerWithDeleteNodePreferredOp() throws Exception { "{'name':'execute_plan','class':'solr.ExecutePlanAction'}" + "{'name':'test','class':'" + AssertingTriggerAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -728,7 +777,9 @@ public void testNodeLostTriggerWithDeleteNodePreferredOp() throws Exception { cluster.stopJettySolrRunner(newNode); assertTrue(triggerFiredLatch.await(30, TimeUnit.SECONDS)); assertTrue(fired.get()); + @SuppressWarnings({"rawtypes"}) Map actionContext = actionContextPropsRef.get(); + @SuppressWarnings({"rawtypes"}) List operations = (List) actionContext.get("operations"); assertNotNull(operations); assertEquals(1, operations.size()); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ExecutePlanActionTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ExecutePlanActionTest.java index d286faf57b2e..25a7616eb2c6 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ExecutePlanActionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ExecutePlanActionTest.java @@ -167,6 +167,7 @@ public void setAsyncId(String asyncId) { if (!children.isEmpty()) { String child = children.get(0); byte[] data = zkClient().getData(parentPath + "/" + child, null, null, true); + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSON(data); if (m.containsKey("requestid")) { znodeCreated.set(m.get("requestid").equals(asyncId)); @@ -190,6 +191,7 @@ public void setAsyncId(String asyncId) { action.process(nodeLostEvent, actionContext); // assertTrue("ExecutePlanAction should have stored the requestid in ZK before executing the request", znodeCreated.get()); + @SuppressWarnings({"unchecked"}) List> responses = (List>) actionContext.getProperty("responses"); assertNotNull(responses); assertEquals(2, responses.size()); @@ -215,6 +217,7 @@ public void testIntegration() throws Exception { "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name':'execute_plan','class':'solr.ExecutePlanAction'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -281,6 +284,7 @@ public void testTaskTimeout() throws Exception { "{'name':'execute_plan','class':'solr.ExecutePlanAction', 'taskTimeoutSeconds' : '1','taskTimeoutFail':'" + taskTimeoutFail + "'}," + "{'name':'finish','class':'" + FinishAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -339,6 +343,7 @@ public void testTaskFail() throws Exception { "{'name':'execute_plan','class':'solr.ExecutePlanAction'}," + "{'name':'finish','class':'" + FinishAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/HttpTriggerListenerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/HttpTriggerListenerTest.java index af10586f2117..a712aee6c004 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/HttpTriggerListenerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/HttpTriggerListenerTest.java @@ -89,6 +89,7 @@ public void testHttpListenerIntegration() throws Exception { "{'name':'test','class':'" + TestDummyAction.class.getName() + "'}" + "]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerMixedBoundsTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerMixedBoundsTest.java index 87f5b23354ea..2075fea06457 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerMixedBoundsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerMixedBoundsTest.java @@ -120,6 +120,7 @@ public void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String } @Test + @SuppressWarnings({"unchecked"}) public void testMixedBounds() throws Exception { String collectionName = "testMixedBounds_collection"; CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, @@ -181,6 +182,7 @@ public void testMixedBounds() throws Exception { "'actions' : [{'name' : 'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name' : 'execute_plan', 'class' : '" + ExecutePlanAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerSizeEstimationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerSizeEstimationTest.java index c791d4cb3f53..150a67f00bd8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerSizeEstimationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerSizeEstimationTest.java @@ -184,6 +184,7 @@ public void testEstimatedIndexSize() throws Exception { "'actions' : [{'name' : 'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name' : 'execute_plan', 'class' : '" + ExecutePlanAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -250,12 +251,14 @@ public void testEstimatedIndexSize() throws Exception { assertNotNull(listenerEvents.toString(), events); assertFalse("empty events?", events.isEmpty()); CapturedEvent ev = events.get(0); + @SuppressWarnings({"unchecked"}) List ops = (List< TriggerEvent.Op>)ev.event.properties.get(TriggerEvent.REQUESTED_OPS); assertNotNull("no requested ops in " + ev, ops); assertFalse("empty list of ops in " + ev, ops.isEmpty()); Set parentShards = new HashSet<>(); ops.forEach(op -> { assertTrue(op.toString(), op.getAction() == CollectionParams.CollectionAction.SPLITSHARD); + @SuppressWarnings({"unchecked"}) Collection> hints = (Collection>)op.getHints().get(Suggester.Hint.COLL_SHARD); assertNotNull("no hints in op " + op, hints); hints.forEach(h -> parentShards.add(h.second())); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java index 4852b861657f..b937668f0fbb 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java @@ -197,6 +197,7 @@ public void testTrigger() throws Exception { trigger.run(); ev = eventRef.get(); assertNotNull("should have fired an event", ev); + @SuppressWarnings({"unchecked"}) List ops = (List) ev.getProperty(TriggerEvent.REQUESTED_OPS); assertNotNull("should contain requestedOps", ops); assertEquals("number of ops: " + ops, 2, ops.size()); @@ -204,6 +205,7 @@ public void testTrigger() throws Exception { boolean shard2 = false; for (TriggerEvent.Op op : ops) { assertEquals(CollectionParams.CollectionAction.SPLITSHARD, op.getAction()); + @SuppressWarnings({"unchecked"}) Set> hints = (Set>)op.getHints().get(Suggester.Hint.COLL_SHARD); assertNotNull("hints", hints); assertEquals("hints", 1, hints.size()); @@ -216,6 +218,7 @@ public void testTrigger() throws Exception { } else { fail("unexpected shard name " + p.second()); } + @SuppressWarnings({"unchecked"}) Map params = (Map)op.getHints().get(Suggester.Hint.PARAMS); assertNotNull("params are null: " + op, params); @@ -281,6 +284,7 @@ public void testSplitIntegration() throws Exception { "'actions' : [{'name' : 'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name' : 'execute_plan', 'class' : '" + ExecutePlanAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -346,6 +350,7 @@ public void testSplitIntegration() throws Exception { assertEquals(TriggerEventProcessorStage.AFTER_ACTION, events.get(4).stage); assertEquals(TriggerEventProcessorStage.SUCCEEDED, events.get(5).stage); // check ops + @SuppressWarnings({"unchecked"}) List ops = (List) events.get(4).event.getProperty(TriggerEvent.REQUESTED_OPS); assertNotNull("should contain requestedOps", ops); assertEquals("number of ops", 2, ops.size()); @@ -353,6 +358,7 @@ public void testSplitIntegration() throws Exception { boolean shard2 = false; for (TriggerEvent.Op op : ops) { assertEquals(CollectionParams.CollectionAction.SPLITSHARD, op.getAction()); + @SuppressWarnings({"unchecked"}) Set> hints = (Set>)op.getHints().get(Suggester.Hint.COLL_SHARD); assertNotNull("hints", hints); assertEquals("hints", 1, hints.size()); @@ -409,6 +415,7 @@ public void testMergeIntegration() throws Exception { "'actions' : [{'name' : 'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name' : 'execute_plan', 'class' : '" + ExecutePlanAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -472,11 +479,13 @@ public void testMergeIntegration() throws Exception { assertEquals(TriggerEventProcessorStage.AFTER_ACTION, events.get(4).stage); assertEquals(TriggerEventProcessorStage.SUCCEEDED, events.get(5).stage); // check ops + @SuppressWarnings({"unchecked"}) List ops = (List) events.get(4).event.getProperty(TriggerEvent.REQUESTED_OPS); assertNotNull("should contain requestedOps", ops); assertTrue("number of ops: " + ops, ops.size() > 0); for (TriggerEvent.Op op : ops) { assertEquals(CollectionParams.CollectionAction.MERGESHARDS, op.getAction()); + @SuppressWarnings({"unchecked"}) Set> hints = (Set>)op.getHints().get(Suggester.Hint.COLL_SHARD); assertNotNull("hints", hints); assertEquals("hints", 2, hints.size()); @@ -485,6 +494,7 @@ public void testMergeIntegration() throws Exception { } // TODO: fix this once MERGESHARDS is supported + @SuppressWarnings({"unchecked"}) List unsupportedOps = (List)events.get(2).context.get("properties.unsupportedOps"); assertNotNull("should have unsupportedOps", unsupportedOps); assertEquals(unsupportedOps.toString() + "\n" + ops, ops.size(), unsupportedOps.size()); @@ -492,6 +502,7 @@ public void testMergeIntegration() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testMaxOps() throws Exception { String collectionName = "testMaxOps_collection"; CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, @@ -512,6 +523,7 @@ public void testMaxOps() throws Exception { "'enabled' : false," + "'actions' : [{'name' : 'compute_plan', 'class' : 'solr.ComputePlanAction'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -684,6 +696,7 @@ public void testSplitConfig() throws Exception { trigger.run(); ev = eventRef.get(); assertNotNull("should have fired an event", ev); + @SuppressWarnings({"unchecked"}) List ops = (List) ev.getProperty(TriggerEvent.REQUESTED_OPS); assertNotNull("should contain requestedOps", ops); assertEquals("number of ops: " + ops, 2, ops.size()); @@ -691,6 +704,7 @@ public void testSplitConfig() throws Exception { boolean shard2 = false; for (TriggerEvent.Op op : ops) { assertEquals(CollectionParams.CollectionAction.SPLITSHARD, op.getAction()); + @SuppressWarnings({"unchecked"}) Set> hints = (Set>)op.getHints().get(Suggester.Hint.COLL_SHARD); assertNotNull("hints", hints); assertEquals("hints", 1, hints.size()); @@ -703,6 +717,7 @@ public void testSplitConfig() throws Exception { } else { fail("unexpected shard name " + p.second()); } + @SuppressWarnings({"unchecked"}) Map params = (Map)op.getHints().get(Suggester.Hint.PARAMS); assertNotNull("params are null: " + op, params); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/MetricTriggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/MetricTriggerIntegrationTest.java index da06a75410de..bba3096d1058 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/MetricTriggerIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/MetricTriggerIntegrationTest.java @@ -115,6 +115,7 @@ public void testMetricTrigger() throws Exception { "{'name':'test','class':'" + MetricAction.class.getName() + "'}" + "]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeAddedTriggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeAddedTriggerIntegrationTest.java index 08bf6eaab8d7..bbd24826b48d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeAddedTriggerIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeAddedTriggerIntegrationTest.java @@ -126,6 +126,7 @@ private void deleteChildrenRecursively(String path) throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testNodeAddedTriggerRestoreState() throws Exception { final String triggerName = "node_added_restore_trigger"; @@ -241,6 +242,7 @@ public void testNodeAddedTrigger() throws Exception { assertTrue(triggerFired.get()); NodeAddedTrigger.NodeAddedEvent nodeAddedEvent = (NodeAddedTrigger.NodeAddedEvent) events.iterator().next(); assertNotNull(nodeAddedEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List) nodeAddedEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(newNode.getNodeName())); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeAddedTriggerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeAddedTriggerTest.java index a2b820f09dc3..88cdcc34b6f2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeAddedTriggerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeAddedTriggerTest.java @@ -118,6 +118,7 @@ public void testTrigger() throws Exception { TriggerEvent nodeAddedEvent = eventRef.get(); assertNotNull(nodeAddedEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List)nodeAddedEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(newNode1.getNodeName())); assertTrue(nodeNames.contains(newNode2.getNodeName())); @@ -172,6 +173,7 @@ public void testTrigger() throws Exception { public void testActionLifecycle() throws Exception { CoreContainer container = cluster.getJettySolrRunners().get(0).getCoreContainer(); Map props = createTriggerProps(0); + @SuppressWarnings({"unchecked"}) List> actions = (List>) props.get("actions"); Map action = new HashMap<>(2); action.put("name", "testActionInit"); @@ -298,6 +300,7 @@ public void testRestoreState() throws Exception { if (currentTimeNanos - eventTimeNanos <= waitForNanos) { fail("NodeAddedListener was fired before the configured waitFor period: currentTimeNanos=" + currentTimeNanos + ", eventTimeNanos=" + eventTimeNanos + ",waitForNanos=" + waitForNanos); } + @SuppressWarnings({"unchecked"}) List nodeNames = (List) event.getProperty(NodeAddedTrigger.NodeAddedEvent.NODE_NAMES); if (nodeNames.contains(newNode.getNodeName())) { stop.set(true); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeLostTriggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeLostTriggerIntegrationTest.java index 06f20df65418..ef52267bcaa4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeLostTriggerIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeLostTriggerIntegrationTest.java @@ -129,6 +129,7 @@ private void deleteChildrenRecursively(String path) throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testNodeLostTriggerRestoreState() throws Exception { final String triggerName = "node_lost_restore_trigger"; @@ -276,6 +277,7 @@ public void testNodeLostTrigger() throws Exception { assertTrue(triggerFired.get()); NodeLostTrigger.NodeLostEvent nodeLostEvent = (NodeLostTrigger.NodeLostEvent) events.iterator().next(); assertNotNull(nodeLostEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List) nodeLostEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(lostNodeName)); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeLostTriggerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeLostTriggerTest.java index bf55a85acc2c..5d417f987e12 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeLostTriggerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeLostTriggerTest.java @@ -120,6 +120,7 @@ public void testTrigger() throws Exception { TriggerEvent nodeLostEvent = eventRef.get(); assertNotNull(nodeLostEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List)nodeLostEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames + " doesn't contain " + lostNodeName1, nodeNames.contains(lostNodeName1)); assertTrue(nodeNames + " doesn't contain " + lostNodeName2, nodeNames.contains(lostNodeName2)); @@ -189,6 +190,7 @@ public void testTrigger() throws Exception { public void testActionLifecycle() throws Exception { CoreContainer container = cluster.getJettySolrRunners().get(0).getCoreContainer(); Map props = createTriggerProps(0); + @SuppressWarnings({"unchecked"}) List> actions = (List>) props.get("actions"); Map action = new HashMap<>(2); action.put("name", "testActionInit"); @@ -366,6 +368,7 @@ public void testRestoreState() throws Exception { TriggerEvent nodeLostEvent = eventRef.get(); assertNotNull(nodeLostEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List)nodeLostEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(lostNodeName)); } diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java index 849c5c81f9ba..5ed30c04c8de 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java @@ -179,6 +179,7 @@ public void testNodeMarkersRegistration() throws Exception { "'enabled' : true," + "'actions' : [{'name':'test','class':'" + TestEventMarkerAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -274,6 +275,7 @@ public void testNodeMarkersRegistration() throws Exception { } assertEquals(1, events.size()); TriggerEvent ev = events.iterator().next(); + @SuppressWarnings({"unchecked"}) List nodeNames = (List) ev.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(overseerLeader)); assertEquals(TriggerEventType.NODELOST, ev.getEventType()); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/RestoreTriggerStateTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/RestoreTriggerStateTest.java index 66fac4cac58e..7fa4dc747017 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/RestoreTriggerStateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/RestoreTriggerStateTest.java @@ -85,6 +85,7 @@ public void testEventFromRestoredState() throws Exception { "'enabled' : true," + "'actions' : [{'name':'test','class':'" + TestTriggerAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -116,6 +117,7 @@ public void testEventFromRestoredState() throws Exception { triggerFiredLatch = new CountDownLatch(1); NodeAddedTrigger.NodeAddedEvent nodeAddedEvent = (NodeAddedTrigger.NodeAddedEvent) events.iterator().next(); assertNotNull(nodeAddedEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List) nodeAddedEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(newNode.getNodeName())); // add a second node - state of the trigger will change but it won't fire for waitFor sec. diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledMaintenanceTriggerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledMaintenanceTriggerTest.java index cb222a3f5b86..68808e19e47e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledMaintenanceTriggerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledMaintenanceTriggerTest.java @@ -91,6 +91,7 @@ public void initTest() throws Exception { String suspendTriggerCommand = "{" + "'suspend-trigger' : {'name' : '.scheduled_maintenance'}" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, suspendTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -127,6 +128,7 @@ private void stopNode(String nodeName) throws Exception { @After public void restoreDefaults() throws Exception { + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, "{'set-trigger' : " + AutoScaling.SCHEDULED_MAINTENANCE_TRIGGER_DSL + "}"); NamedList response = solrClient.request(req); @@ -200,6 +202,7 @@ public void process(TriggerEvent event, ActionContext context) throws Exception @Test @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 17-Mar-2018 + @SuppressWarnings({"unchecked"}) public void testInactiveShardCleanup() throws Exception { String collection1 = getClass().getSimpleName() + "_collection1"; CollectionAdminRequest.Create create1 = CollectionAdminRequest.createCollection(collection1, @@ -231,6 +234,7 @@ public void testInactiveShardCleanup() throws Exception { "'class' : '" + CapturingTriggerListener.class.getName() + "'" + "}" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setListenerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -337,6 +341,7 @@ public void testInactiveMarkersCleanup() throws Exception { "'actions' : [" + "{'name' : 'test', 'class' : '" + TestTriggerAction2.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerIntegrationTest.java index af6a761d6de2..63c0e70d7c8c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerIntegrationTest.java @@ -94,6 +94,7 @@ public void testScheduledTrigger() throws Exception { " {\"cores\" : \"<2\", \"node\" : \"#EACH\"}\n" + " ]\n" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicy); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -122,10 +123,11 @@ public void testScheduledTrigger() throws Exception { Map actionContextProps = actionContextPropertiesRef.get(); assertNotNull(actionContextProps); TriggerEvent event = events.iterator().next(); + @SuppressWarnings({"unchecked", "rawtypes"}) List operations = (List) actionContextProps.get("operations"); assertNotNull(operations); assertEquals(1, operations.size()); - for (SolrRequest operation : operations) { + for (@SuppressWarnings({"rawtypes"})SolrRequest operation : operations) { SolrParams params = operation.getParams(); assertEquals(newNode.getNodeName(), params.get("targetNode")); } diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerIntegrationTest.java index 46e419b417ff..15ab70101131 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerIntegrationTest.java @@ -126,6 +126,7 @@ private void deleteChildrenRecursively(String path) throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testAboveSearchRate() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String COLL1 = "aboveRate_collection"; @@ -278,6 +279,7 @@ public void testAboveSearchRate() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testBelowSearchRate() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String COLL1 = "belowRate_collection"; @@ -630,6 +632,7 @@ public void testDeleteNode() throws Exception { CapturedEvent ev = events.get(0); assertEquals(ev.toString(), "compute", ev.actionName); + @SuppressWarnings({"unchecked"}) List ops = (List)ev.event.getProperty(TriggerEvent.REQUESTED_OPS); assertNotNull("there should be some requestedOps: " + ev.toString(), ops); // 4 DELETEREPLICA, 4 DELETENODE (minReplicas==1 & leader should be protected) @@ -664,6 +667,7 @@ public void testDeleteNode() throws Exception { // check status ev = events.get(1); assertEquals(ev.toString(), "execute", ev.actionName); + @SuppressWarnings({"unchecked"}) List> responses = (List>)ev.context.get("properties.responses"); assertNotNull(ev.toString(), responses); assertEquals(responses.toString(), 8, responses.size()); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerTest.java index d3b523d87a28..41bbd8bb79b9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerTest.java @@ -89,6 +89,7 @@ public void after() throws Exception { @Test @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") + @SuppressWarnings({"unchecked"}) public void testTrigger() throws Exception { JettySolrRunner targetNode = cluster.getJettySolrRunner(0); SolrZkClient zkClient = cluster.getSolrClient().getZkStateReader().getZkClient(); @@ -214,6 +215,7 @@ public void testTrigger() throws Exception { private static final AtomicDouble mockRate = new AtomicDouble(); @Test + @SuppressWarnings({"unchecked"}) public void testWaitForElapsed() throws Exception { SolrResourceLoader loader = cluster.getJettySolrRunner(0).getCoreContainer().getResourceLoader(); CloudSolrClient solrClient = cluster.getSolrClient(); @@ -315,6 +317,7 @@ public void testDefaultsAndBackcompat() throws Exception { try (SearchRateTrigger trigger = new SearchRateTrigger("search_rate_trigger2")) { trigger.configure(loader, cloudManager, props); Map config = trigger.getConfig(); + @SuppressWarnings({"unchecked"}) Set collections = (Set)config.get(SearchRateTrigger.COLLECTIONS_PROP); assertEquals(collections.toString(), 1, collections.size()); assertEquals("test", collections.iterator().next()); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/SystemLogListenerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/SystemLogListenerTest.java index ee9750e00daf..d2f7c238b6c9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/SystemLogListenerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/SystemLogListenerTest.java @@ -61,6 +61,7 @@ public class SystemLogListenerTest extends SolrCloudTestCase { private static final AtomicBoolean fired = new AtomicBoolean(false); private static final int NODE_COUNT = 3; private static CountDownLatch triggerFiredLatch = new CountDownLatch(1); + @SuppressWarnings({"rawtypes"}) private static final AtomicReference actionContextPropsRef = new AtomicReference<>(); private static final AtomicReference eventRef = new AtomicReference<>(); @@ -111,6 +112,7 @@ public void test() throws Exception { "{'name':'test','class':'" + AssertingTriggerAction.class.getName() + "'}," + "{'name':'error','class':'" + ErrorTriggerAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -158,6 +160,7 @@ public void test() throws Exception { assertTrue("Trigger was not fired ", triggerFiredLatch.await(60, TimeUnit.SECONDS)); assertTrue(fired.get()); + @SuppressWarnings({"rawtypes"}) Map context = actionContextPropsRef.get(); assertNotNull(context); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java index de7522ea2ba7..9899e946616c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java @@ -151,6 +151,7 @@ public void testDataProviderPerReplicaDetails() throws Exception { " ]" + " }" + "}"; + @SuppressWarnings({"unchecked"}) AutoScalingConfig config = new AutoScalingConfig((Map) Utils.fromJSONString(autoScaleJson)); AtomicInteger count = new AtomicInteger(0); try (SolrCloudManager cloudManager = new SolrClientCloudManager(new ZkDistributedQueueFactory(cluster.getZkClient()), cluster.getSolrClient())) { @@ -351,6 +352,7 @@ public void testMetricsTag() throws Exception { " {'metrics:abc':'overseer', 'replica':0}" + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); try { solrClient.request(req); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerCooldownIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerCooldownIntegrationTest.java index 6f377568a22d..b7f40cf6e0b1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerCooldownIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerCooldownIntegrationTest.java @@ -88,6 +88,7 @@ public void testCooldown() throws Exception { "{'name':'test','class':'" + TestTriggerAction.class.getName() + "'}" + "]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerEventQueueTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerEventQueueTest.java index 6e83c6bb7789..9dc8169faf32 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerEventQueueTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerEventQueueTest.java @@ -57,6 +57,7 @@ public void init() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testSerialization() throws Exception { TriggerEventQueue queue = new TriggerEventQueue(cloudManager, "test", null); Map hotHosts = new HashMap<>(); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerIntegrationTest.java index 0f9d009c6aa6..73cfcfa139d3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TriggerIntegrationTest.java @@ -196,6 +196,7 @@ public void testTriggerThrottling() throws Exception { "'enabled' : true," + "'actions' : [{'name':'test','class':'" + ThrottlingTesterAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -352,6 +353,7 @@ public void testContinueTriggersOnOverseerRestart() throws Exception { "'enabled' : true," + "'actions' : [{'name':'test','class':'" + TestTriggerAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -369,6 +371,7 @@ public void testContinueTriggersOnOverseerRestart() throws Exception { assertTrue(triggerFired.get()); NodeAddedTrigger.NodeAddedEvent nodeAddedEvent = (NodeAddedTrigger.NodeAddedEvent) events.iterator().next(); assertNotNull(nodeAddedEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List)nodeAddedEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(newNode.getNodeName())); } @@ -466,6 +469,7 @@ public void testEventQueue() throws Exception { break; } } + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -575,6 +579,7 @@ public void testListeners() throws Exception { "{'name':'test1','class':'" + TestDummyAction.class.getName() + "'}," + "]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimSolrCloudTestCase.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimSolrCloudTestCase.java index 607adc74cc2c..3d2e31489a12 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimSolrCloudTestCase.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimSolrCloudTestCase.java @@ -225,6 +225,7 @@ public static void assertReplicaEquals(Replica one, Replica two) { assertReplicaPropsEquals(one.getProperties(), two.getProperties()); } + @SuppressWarnings({"unchecked"}) public static void assertReplicaInfoEquals(ReplicaInfo one, ReplicaInfo two) { assertEquals(one.getName(), two.getName()); assertEquals(one.getNode(), two.getNode()); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimComputePlanAction.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimComputePlanAction.java index aea4b2ef41b0..9cc3e1a27abc 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimComputePlanAction.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimComputePlanAction.java @@ -69,6 +69,7 @@ public class TestSimComputePlanAction extends SimSolrCloudTestCase { private static final AtomicBoolean fired = new AtomicBoolean(false); private static final int NODE_COUNT = 1; private static CountDownLatch triggerFiredLatch = new CountDownLatch(1); + @SuppressWarnings({"rawtypes"}) private static final AtomicReference actionContextPropsRef = new AtomicReference<>(); private static final AtomicReference eventRef = new AtomicReference<>(); @@ -86,6 +87,7 @@ public void init() throws Exception { " {'nodeRole':'overseer', 'replica':0}" + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); SolrResponse rsp = cluster.request(req); NamedList response = rsp.getResponse(); @@ -144,6 +146,7 @@ public void testNodeLost() throws Exception { "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name':'test','class':'" + TestSimComputePlanAction.AssertingTriggerAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -175,11 +178,14 @@ public void testNodeLost() throws Exception { assertTrue("Trigger was not fired even after 10 seconds", triggerFiredLatch.await(10, TimeUnit.SECONDS)); assertTrue(fired.get()); + @SuppressWarnings({"rawtypes"}) Map context = actionContextPropsRef.get(); assertNotNull(context); + @SuppressWarnings({"unchecked", "rawtypes"}) List operations = (List) context.get("operations"); assertNotNull("The operations computed by ComputePlanAction should not be null , " + eventRef.get(), operations); assertEquals("ComputePlanAction should have computed exactly 1 operation", 1, operations.size()); + @SuppressWarnings({"rawtypes"}) SolrRequest solrRequest = operations.get(0); SolrParams params = solrRequest.getParams(); assertEquals("Expected MOVEREPLICA action after adding node", MOVEREPLICA, CollectionParams.CollectionAction.get(params.get("action"))); @@ -209,6 +215,7 @@ public void testNodeWithMultipleReplicasLost() throws Exception { "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name':'test','class':'" + AssertingTriggerAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -249,8 +256,10 @@ public void testNodeWithMultipleReplicasLost() throws Exception { assertEquals(TriggerEventType.NODELOST, triggerEvent.getEventType()); // TODO assertEquals(stoppedNodeName, triggerEvent.getProperty(TriggerEvent.NODE_NAME)); + @SuppressWarnings({"rawtypes"}) Map context = actionContextPropsRef.get(); assertNotNull(context); + @SuppressWarnings({"unchecked", "rawtypes"}) List operations = (List) context.get("operations"); assertNotNull("The operations computed by ComputePlanAction should not be null " + actionContextPropsRef.get() + "\nevent: " + eventRef.get(), operations); if (log.isInfoEnabled()) { @@ -260,7 +269,7 @@ public void testNodeWithMultipleReplicasLost() throws Exception { // TODO: this can be 3! // assertEquals("ComputePlanAction should have computed exactly 2 operation", 2, operations.size()); - for (SolrRequest solrRequest : operations) { + for (@SuppressWarnings({"rawtypes"})SolrRequest solrRequest : operations) { SolrParams params = solrRequest.getParams(); assertEquals("Expected MOVEREPLICA action after adding node", MOVEREPLICA, CollectionParams.CollectionAction.get(params.get("action"))); String moved = params.get("replica"); @@ -285,6 +294,7 @@ public void testNodeAdded() throws Exception { "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name':'test','class':'" + TestSimComputePlanAction.AssertingTriggerAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); @@ -328,6 +338,7 @@ public void testNodeAdded() throws Exception { String newNode = cluster.simAddNode(); assertTrue("Trigger was not fired even after 5 seconds", triggerFiredLatch.await(5, TimeUnit.SECONDS)); assertTrue(fired.get()); + @SuppressWarnings({"rawtypes"}) Map context = actionContextPropsRef.get(); assertNotNull(context); if (log.isInfoEnabled()) { @@ -336,12 +347,14 @@ public void testNodeAdded() throws Exception { , cluster.getClusterStateProvider().getLiveNodes(), cluster.getClusterStateProvider().getClusterState().getCollection("testNodeAdded")); // logOk } + @SuppressWarnings({"unchecked", "rawtypes"}) List operations = (List) context.get("operations"); assertNotNull("The operations computed by ComputePlanAction should not be null" + context, operations); // TODO: can be 2! // assertEquals("ComputePlanAction should have computed exactly 1 operation, but was: " + operations, 1, operations.size()); + @SuppressWarnings({"rawtypes"}) SolrRequest request = operations.get(0); SolrParams params = request.getParams(); assertEquals("Expected MOVEREPLICA action after adding node", MOVEREPLICA, CollectionParams.CollectionAction.get(params.get("action"))); @@ -370,6 +383,7 @@ public String getName() { @Override public void process(TriggerEvent event, ActionContext context) { if (expectedNode != null) { + @SuppressWarnings({"rawtypes"}) Collection nodes = (Collection) event.getProperty(TriggerEvent.NODE_NAMES); if (nodes == null || !nodes.contains(expectedNode)) return;//this is not the event we are looking for } diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExecutePlanAction.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExecutePlanAction.java index 5117a2c86fcb..5919c1cffaec 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExecutePlanAction.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExecutePlanAction.java @@ -132,6 +132,7 @@ public void setAsyncId(String asyncId) { if (!children.isEmpty()) { String child = children.get(0); VersionedData data = cluster.getDistribStateManager().getData(parentPath + "/" + child); + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSON(data.getData()); if (m.containsKey("requestid")) { znodeCreated.set(m.get("requestid").equals(asyncId)); @@ -153,6 +154,7 @@ public void setAsyncId(String asyncId) { action.process(nodeLostEvent, actionContext); // assertTrue("ExecutePlanAction should have stored the requestid in ZK before executing the request", znodeCreated.get()); + @SuppressWarnings({"unchecked"}) List> responses = (List>) actionContext.getProperty("responses"); assertNotNull(responses); assertEquals(2, responses.size()); @@ -180,6 +182,7 @@ public void testIntegration() throws Exception { "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name':'execute_plan','class':'solr.ExecutePlanAction'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java index 5c03d59f3f22..13fb8b4766a2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java @@ -115,6 +115,7 @@ public void testScaleUp() throws Exception { "'actions' : [{'name' : 'compute_plan', 'class' : 'solr.ComputePlanAction'}," + "{'name' : 'execute_plan', 'class' : '" + ExecutePlanAction.class.getName() + "'}]" + "}}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); NamedList response = solrClient.request(req); assertEquals(response.get("result").toString(), "success"); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimLargeCluster.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimLargeCluster.java index 16dde7eb3cf5..01336ef40e15 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimLargeCluster.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimLargeCluster.java @@ -695,6 +695,7 @@ private long doTestNodeLost(int waitFor, long killDelay, int minIgnored) throws } @Test + @SuppressWarnings({"unchecked"}) public void testSearchRate() throws Exception { SolrClient solrClient = cluster.simGetSolrClient(); String collectionName = "testSearchRate"; diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimNodeAddedTrigger.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimNodeAddedTrigger.java index 928046a8822a..759715425d65 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimNodeAddedTrigger.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimNodeAddedTrigger.java @@ -115,6 +115,7 @@ public void testTrigger() throws Exception { TriggerEvent nodeAddedEvent = eventRef.get(); assertNotNull(nodeAddedEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List)nodeAddedEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(newNode1)); assertTrue(nodeNames.contains(newNode2)); @@ -163,6 +164,7 @@ public void testTrigger() throws Exception { public void testActionLifecycle() throws Exception { Map props = createTriggerProps(0); + @SuppressWarnings({"unchecked"}) List> actions = (List>) props.get("actions"); Map action = new HashMap<>(2); action.put("name", "testActionInit"); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimNodeLostTrigger.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimNodeLostTrigger.java index 8eb6156b936b..f1d38aab8e50 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimNodeLostTrigger.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimNodeLostTrigger.java @@ -117,6 +117,7 @@ public void testTrigger() throws Exception { TriggerEvent nodeLostEvent = eventRef.get(); assertNotNull(nodeLostEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List)nodeLostEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames + " doesn't contain " + lostNodeName1, nodeNames.contains(lostNodeName1)); assertTrue(nodeNames + " doesn't contain " + lostNodeName2, nodeNames.contains(lostNodeName2)); @@ -176,6 +177,7 @@ public void testTrigger() throws Exception { public void testActionLifecycle() throws Exception { Map props = createTriggerProps(0); + @SuppressWarnings({"unchecked"}) List> actions = (List>) props.get("actions"); Map action = new HashMap<>(2); action.put("name", "testActionInit"); @@ -322,6 +324,7 @@ public void testRestoreState() throws Exception { TriggerEvent nodeLostEvent = eventRef.get(); assertNotNull(nodeLostEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List)nodeLostEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(newNode)); } diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimPolicyCloud.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimPolicyCloud.java index c5af182561ec..915aa3fba7ee 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimPolicyCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimPolicyCloud.java @@ -98,6 +98,7 @@ public void testDataProviderPerReplicaDetails() throws Exception { " ]" + " }" + "}"; + @SuppressWarnings({"unchecked"}) AutoScalingConfig config = new AutoScalingConfig((Map) Utils.fromJSONString(autoScaleJson)); Policy.Session session = config.getPolicy().createSession(cluster); @@ -201,6 +202,7 @@ public void testMetricsTag() throws Exception { " {'metrics:abc':'overseer', 'replica':0}" + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); try { solrClient.request(req); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimScenario.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimScenario.java index c87fccf6e935..f2460627100b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimScenario.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimScenario.java @@ -93,15 +93,19 @@ public void testSuggestions() throws Exception { scenario.context.put("iterative", "0"); scenario.context.put("justCalc", "1"); scenario.run(); + @SuppressWarnings({"unchecked"}) List suggestions = (List)scenario.context.get(SimScenario.SUGGESTIONS_CTX_PROP); assertNotNull(suggestions); assertEquals(suggestions.toString(), 1, suggestions.size()); // reconstruct the snapshot from the dump + @SuppressWarnings({"unchecked"}) Map snapshot = (Map)Utils.fromJSON(baos.toByteArray()); + @SuppressWarnings({"unchecked"}) Map autoscalingState = (Map)snapshot.get(SnapshotCloudManager.AUTOSCALING_STATE_KEY); assertNotNull(autoscalingState); assertEquals(autoscalingState.toString(), 1, autoscalingState.size()); assertTrue(autoscalingState.toString(), autoscalingState.containsKey("suggestions")); + @SuppressWarnings({"unchecked"}) List> snapSuggestions = (List>)autoscalingState.get("suggestions"); assertEquals(snapSuggestions.toString(), 1, snapSuggestions.size()); // _loop_iter_ should be present and 0 (first iteration) diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java index 65baf60b7c15..a49a7396e0d9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java @@ -286,6 +286,7 @@ public void process(TriggerEvent event, ActionContext actionContext) { } @Test + @SuppressWarnings({"unchecked"}) public void testNodeLostTriggerRestoreState() throws Exception { final String triggerName = "node_lost_restore_trigger"; @@ -396,6 +397,7 @@ public void testNodeLostTriggerRestoreState() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testNodeAddedTriggerRestoreState() throws Exception { final String triggerName = "node_added_restore_trigger"; @@ -522,6 +524,7 @@ public void testNodeAddedTrigger() throws Exception { assertTrue(triggerFired.get()); TriggerEvent nodeAddedEvent = events.iterator().next(); assertNotNull(nodeAddedEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List)nodeAddedEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeAddedEvent.toString(), nodeNames.contains(newNode)); @@ -575,6 +578,7 @@ public void testNodeLostTrigger() throws Exception { assertTrue(triggerFired.get()); TriggerEvent nodeLostEvent = events.iterator().next(); assertNotNull(nodeLostEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List)nodeLostEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(lostNodeName)); @@ -831,6 +835,7 @@ public void testEventFromRestoredState() throws Exception { triggerFiredLatch = new CountDownLatch(1); TriggerEvent nodeAddedEvent = events.iterator().next(); assertNotNull(nodeAddedEvent); + @SuppressWarnings({"unchecked"}) List nodeNames = (List)nodeAddedEvent.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(newNode)); // add a second node - state of the trigger will change but it won't fire for waitFor sec. @@ -1066,6 +1071,7 @@ public void testNodeMarkersRegistration() throws Exception { } assertEquals(1, events.size()); TriggerEvent ev = events.iterator().next(); + @SuppressWarnings({"unchecked"}) List nodeNames = (List) ev.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(overseerLeader)); assertEquals(TriggerEventType.NODELOST, ev.getEventType()); @@ -1385,6 +1391,7 @@ public void process(TriggerEvent event, ActionContext context) throws Exception @Test + @SuppressWarnings({"unchecked"}) public void testSearchRate() throws Exception { SolrClient solrClient = cluster.simGetSolrClient(); String COLL1 = "collection1"; diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java index adc41cccd8bd..81d2f8f4a63b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java @@ -121,10 +121,11 @@ public void testPersistance() throws Exception { public void testRedaction() throws Exception { Path tmpPath = createTempDir(); File tmpDir = tmpPath.toFile(); - SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(realManager, null); Set redacted = new HashSet<>(realManager.getClusterStateProvider().getLiveNodes()); - redacted.addAll(realManager.getClusterStateProvider().getClusterState().getCollectionStates().keySet()); - snapshotCloudManager.saveSnapshot(tmpDir, true, true); + try (SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(realManager, null)) { + redacted.addAll(realManager.getClusterStateProvider().getClusterState().getCollectionStates().keySet()); + snapshotCloudManager.saveSnapshot(tmpDir, true, true); + } for (String key : SnapshotCloudManager.REQUIRED_KEYS) { File src = new File(tmpDir, key + ".json"); assertTrue(src.toString() + " doesn't exist", src.exists()); @@ -185,6 +186,7 @@ public void testSimulatorFromSnapshot() throws Exception { } } + @SuppressWarnings({"unchecked"}) private static void assertNodeStateProvider(SolrCloudManager oneMgr, SolrCloudManager twoMgr, String... ignorableNodeValues) throws Exception { NodeStateProvider one = oneMgr.getNodeStateProvider(); NodeStateProvider two = twoMgr.getNodeStateProvider(); @@ -228,8 +230,8 @@ private static void assertNodeStateProvider(SolrCloudManager oneMgr, SolrCloudMa Pattern.compile("/autoscaling/triggerState/.*"), // some triggers may have run after the snapshot was taken Pattern.compile("/autoscaling/events/.*"), - // we always use format 1 in SimClusterStateProvider Pattern.compile("/clusterstate\\.json"), + Pattern.compile("/collections/[^/]+?/state.json"), // depending on the startup sequence leaders may differ Pattern.compile("/collections/[^/]+?/leader_elect/.*"), Pattern.compile("/collections/[^/]+?/leaders/.*"), @@ -254,6 +256,14 @@ private static void assertDistribStateManager(DistribStateManager one, DistribSt .filter(STATE_FILTER_FUN).collect(Collectors.toList())); Collections.sort(treeOne); Collections.sort(treeTwo); + if (!treeOne.equals(treeTwo)) { + List t1 = new ArrayList<>(treeOne); + t1.removeAll(treeTwo); + log.warn("Only in tree one: {}", t1); + List t2 = new ArrayList<>(treeTwo); + t2.removeAll(treeOne); + log.warn("Only in tree two: {}", t2); + } assertEquals(treeOne, treeTwo); for (String path : treeOne) { VersionedData vd1 = one.getData(path); diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/BaseCdcrDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/BaseCdcrDistributedZkTest.java index a0013e78303e..a6256aea407f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/BaseCdcrDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/BaseCdcrDistributedZkTest.java @@ -174,6 +174,7 @@ public static void afterClass() throws Exception { } @Before + @SuppressWarnings({"rawtypes"}) public void baseBefore() throws Exception { this.createSourceCollection(); if (this.createTargetCollection) this.createTargetCollection(); @@ -283,6 +284,7 @@ protected void assertNumDocs(int expectedNumDocs, String collection) /** * Invokes a CDCR action on a given node. */ + @SuppressWarnings({"rawtypes"}) protected NamedList invokeCdcrAction(CloudJettyRunner jetty, CdcrParams.CdcrAction action) throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.set(CommonParams.ACTION, action.toString()); @@ -294,13 +296,15 @@ protected NamedList invokeCdcrAction(CloudJettyRunner jetty, CdcrParams.CdcrActi } protected void waitForCdcrStateReplication(String collection) throws Exception { - log.info("Wait for CDCR state to replicate - collection: " + collection); + log.info("Wait for CDCR state to replicate - collection: {}", collection); int cnt = 30; while (cnt > 0) { + @SuppressWarnings({"rawtypes"}) NamedList status = null; boolean allEquals = true; for (CloudJettyRunner jetty : cloudJettys.get(collection)) { // check all replicas + @SuppressWarnings({"rawtypes"}) NamedList rsp = invokeCdcrAction(jetty, CdcrParams.CdcrAction.STATUS); if (status == null) { status = (NamedList) rsp.get(CdcrParams.CdcrAction.STATUS.toLower()); @@ -321,7 +325,7 @@ protected void waitForCdcrStateReplication(String collection) throws Exception { } } - log.info("CDCR state is identical across nodes - collection: " + collection); + log.info("CDCR state is identical across nodes - collection: {}", collection); } /** @@ -331,7 +335,9 @@ protected void assertState(String collection, CdcrParams.ProcessState processSta throws Exception { this.waitForCdcrStateReplication(collection); // ensure that cdcr state is replicated and stable for (CloudJettyRunner jetty : cloudJettys.get(collection)) { // check all replicas + @SuppressWarnings({"rawtypes"}) NamedList rsp = invokeCdcrAction(jetty, CdcrParams.CdcrAction.STATUS); + @SuppressWarnings({"rawtypes"}) NamedList status = (NamedList) rsp.get(CdcrParams.CdcrAction.STATUS.toLower()); assertEquals(processState.toLower(), status.get(CdcrParams.ProcessState.getParam())); assertEquals(bufferState.toLower(), status.get(CdcrParams.BufferState.getParam())); @@ -460,6 +466,7 @@ private CollectionAdminResponse createCollection(Map> coll collectionInfos.put(collectionName, list); } params.set("name", collectionName); + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -485,7 +492,7 @@ protected CollectionAdminResponse deleteCollection(String collectionName) throws res = new CollectionAdminResponse(); res.setResponse(client.request(request)); } catch (Exception e) { - log.warn("Error while deleting the collection " + collectionName, e); + log.warn("Error while deleting the collection {}", collectionName, e); return new CollectionAdminResponse(); } finally { client.close(); @@ -785,6 +792,7 @@ protected static SolrClient createNewSolrServer(String baseUrl) { } protected void waitForBootstrapToComplete(String collectionName, String shardId) throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList rsp;// we need to wait until bootstrap is complete otherwise the replicator thread will never start TimeOut timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (!timeOut.hasTimedOut()) { @@ -812,8 +820,11 @@ protected void waitForReplicationToComplete(String collectionName, String shardI } protected long getQueueSize(String collectionName, String shardId) throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList rsp = this.invokeCdcrAction(shardToLeaderJetty.get(collectionName).get(shardId), CdcrParams.CdcrAction.QUEUES); + @SuppressWarnings({"rawtypes"}) NamedList host = (NamedList) ((NamedList) rsp.get(CdcrParams.QUEUES)).getVal(0); + @SuppressWarnings({"rawtypes"}) NamedList status = (NamedList) host.get(TARGET_COLLECTION); return (Long) status.get(CdcrParams.QUEUE_SIZE); } diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBidirectionalTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBidirectionalTest.java index 4c7f15f606a7..fdd5317edf61 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBidirectionalTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBidirectionalTest.java @@ -49,10 +49,14 @@ public void testBiDir() throws Exception { MiniSolrCloudCluster cluster2 = new MiniSolrCloudCluster(1, createTempDir("cdcr-cluster2"), buildJettyConfig("/solr")); MiniSolrCloudCluster cluster1 = new MiniSolrCloudCluster(1, createTempDir("cdcr-cluster1"), buildJettyConfig("/solr")); try { - log.info("cluster2 zkHost = " + cluster2.getZkServer().getZkAddress()); + if (log.isInfoEnabled()) { + log.info("cluster2 zkHost = {}", cluster2.getZkServer().getZkAddress()); + } System.setProperty("cdcr.cluster2.zkHost", cluster2.getZkServer().getZkAddress()); - log.info("cluster1 zkHost = " + cluster1.getZkServer().getZkAddress()); + if (log.isInfoEnabled()) { + log.info("cluster1 zkHost = {}", cluster1.getZkServer().getZkAddress()); + } System.setProperty("cdcr.cluster1.zkHost", cluster1.getZkServer().getZkAddress()); @@ -89,7 +93,7 @@ public void testBiDir() throws Exception { req.add(doc); } req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); - log.info("Adding " + docs + " docs with commit=true, numDocs=" + numDocs_c1); + log.info("Adding {} docs with commit=true, numDocs={}", docs, numDocs_c1); req.process(cluster1SolrClient); } @@ -112,7 +116,7 @@ public void testBiDir() throws Exception { req.add(doc); } req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); - log.info("Adding " + docs + " docs with commit=true, numDocs=" + numDocs_c2); + log.info("Adding {} docs with commit=true, numDocs= {}", docs, numDocs_c2); req.process(cluster2SolrClient); } @@ -125,9 +129,13 @@ public void testBiDir() throws Exception { // logging cdcr clusters queue response response = CdcrTestsUtil.getCdcrQueue(cluster1SolrClient); - log.info("Cdcr cluster1 queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr cluster1 queue response: {}", response.getResponse()); + } response = CdcrTestsUtil.getCdcrQueue(cluster2SolrClient); - log.info("Cdcr cluster2 queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr cluster2 queue response: {}", response.getResponse()); + } // lets find and keep the maximum version assigned by cluster1 & cluster2 across all our updates @@ -150,8 +158,8 @@ public void testBiDir() throws Exception { Long checkpoint_1 = (Long) response.getResponse().get(CdcrParams.CHECKPOINT); assertNotNull(checkpoint_1); - log.info("v1: " + maxVersion_c1 + "\t" + "v2: " + maxVersion_c2 + "\t" + - "checkpoint1: " + checkpoint_1 + "\t" + "checkpoint2: " + checkpoint_2); + log.info("v1: {}\tv2: {}\tcheckpoint1: {}\tcheckpoint2: {}" + , maxVersion_c1, maxVersion_c2, checkpoint_1, checkpoint_2); assertEquals("COLLECTIONCHECKPOINT from cluster2 should have returned the maximum " + "version across all updates made to cluster1", maxVersion_c1, checkpoint_2.longValue()); @@ -203,9 +211,13 @@ public void testBiDir() throws Exception { // logging cdcr clusters queue response response = CdcrTestsUtil.getCdcrQueue(cluster1SolrClient); - log.info("Cdcr cluster1 queue response at end of testcase: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr cluster1 queue response at end of testcase: {}", response.getResponse()); + } response = CdcrTestsUtil.getCdcrQueue(cluster2SolrClient); - log.info("Cdcr cluster2 queue response at end of testcase: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr cluster2 queue response at end of testcase: {}", response.getResponse()); + } CdcrTestsUtil.cdcrStop(cluster1SolrClient); CdcrTestsUtil.cdcrStop(cluster2SolrClient); diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java index 70c9f26c4df7..af4b0a618ce8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java @@ -63,7 +63,9 @@ public void testConvertClusterToCdcrAndBootstrap() throws Exception { // start the target first so that we know its zkhost MiniSolrCloudCluster target = new MiniSolrCloudCluster(1, createTempDir("cdcr-target"), buildJettyConfig("/solr")); try { - log.info("Target zkHost = " + target.getZkServer().getZkAddress()); + if (log.isInfoEnabled()) { + log.info("Target zkHost = {}", target.getZkServer().getZkAddress()); + } System.setProperty("cdcr.target.zkHost", target.getZkServer().getZkAddress()); // start a cluster with no cdcr @@ -120,7 +122,9 @@ public void testConvertClusterToCdcrAndBootstrap() throws Exception { CdcrTestsUtil.cdcrStart(sourceSolrClient); response = CdcrTestsUtil.getCdcrQueue(sourceSolrClient); - log.info("Cdcr queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr queue response: {}", response.getResponse()); + } long foundDocs = CdcrTestsUtil.waitForClusterToSync(numDocs, targetSolrClient); assertEquals("Document mismatch on target after sync", numDocs, foundDocs); assertTrue(CdcrTestsUtil.assertShardInSync("cdcr-target", "shard1", targetSolrClient)); // with more than 1 replica @@ -155,7 +159,7 @@ private int indexDocs(CloudSolrClient sourceSolrClient, String collection, int b req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); req.process(sourceSolrClient); } - log.info("Adding numDocs=" + numDocs); + log.info("Adding numDocs={}", numDocs); return numDocs; } /** @@ -197,7 +201,9 @@ public void testBootstrapWithSourceCluster() throws Exception { CdcrTestsUtil.cdcrStart(sourceSolrClient); response = CdcrTestsUtil.getCdcrQueue(sourceSolrClient); - log.info("Cdcr queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr queue response: {}", response.getResponse()); + } long foundDocs = CdcrTestsUtil.waitForClusterToSync(numDocs, targetSolrClient); assertEquals("Document mismatch on target after sync", numDocs, foundDocs); @@ -220,7 +226,7 @@ public void testBootstrapWithSourceCluster() throws Exception { req.add(doc); } req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); - log.info("Adding 100 docs with commit=true, numDocs=" + numDocs); + log.info("Adding 100 docs with commit=true, numDocs={}", numDocs); req.process(sourceSolrClient); } @@ -280,7 +286,9 @@ public void testBootstrapWithMultipleReplicas() throws Exception { CdcrTestsUtil.cdcrStart(sourceSolrClient); response = CdcrTestsUtil.getCdcrQueue(sourceSolrClient); - log.info("Cdcr queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr queue response: {}", response.getResponse()); + } long foundDocs = CdcrTestsUtil.waitForClusterToSync(numDocs, targetSolrClient); assertEquals("Document mismatch on target after sync", numDocs, foundDocs); assertTrue("leader followers didnt' match", CdcrTestsUtil.assertShardInSync("cdcr-target", "shard1", targetSolrClient)); // with more than 1 replica @@ -301,7 +309,9 @@ public void testBootstrapWithContinousIndexingOnSourceCluster() throws Exception // start the target first so that we know its zkhost MiniSolrCloudCluster target = new MiniSolrCloudCluster(1, createTempDir("cdcr-target"), buildJettyConfig("/solr")); try { - log.info("Target zkHost = " + target.getZkServer().getZkAddress()); + if (log.isInfoEnabled()) { + log.info("Target zkHost = {}", target.getZkServer().getZkAddress()); + } System.setProperty("cdcr.target.zkHost", target.getZkServer().getZkAddress()); MiniSolrCloudCluster source = new MiniSolrCloudCluster(1, createTempDir("cdcr-source"), buildJettyConfig("/solr")); @@ -340,7 +350,7 @@ public void testBootstrapWithContinousIndexingOnSourceCluster() throws Exception req.add(doc); } req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); - log.info("Adding " + docs + " docs with commit=true, numDocs=" + numDocs); + log.info("Adding {} docs with commit=true, numDocs={}", docs, numDocs); req.process(sourceSolrClient); } @@ -348,7 +358,9 @@ public void testBootstrapWithContinousIndexingOnSourceCluster() throws Exception assertEquals("", numDocs, response.getResults().getNumFound()); response = CdcrTestsUtil.getCdcrQueue(sourceSolrClient); - log.info("Cdcr queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr queue response: {}", response.getResponse()); + } long foundDocs = CdcrTestsUtil.waitForClusterToSync(numDocs, targetSolrClient); assertEquals("Document mismatch on target after sync", numDocs, foundDocs); diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrOpsAndBoundariesTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrOpsAndBoundariesTest.java index 4827a126374d..2eb8d9f8fe25 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrOpsAndBoundariesTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrOpsAndBoundariesTest.java @@ -70,7 +70,7 @@ public void after() throws Exception { * Check the ops statistics. */ @Test - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 14-Oct-2018 + @SuppressWarnings({"rawtypes"}) public void testOps() throws Exception { createCollections(); @@ -150,9 +150,12 @@ public void testTargetCollectionNotAvailable() throws Exception { while (cnt > 0) { try { QueryResponse rsp = CdcrTestsUtil.invokeCdcrAction(sourceSolrClient, CdcrParams.CdcrAction.ERRORS); + @SuppressWarnings({"rawtypes"}) NamedList collections = (NamedList) ((NamedList) rsp.getResponse().get(CdcrParams.ERRORS)).getVal(0); + @SuppressWarnings({"rawtypes"}) NamedList errors = (NamedList) collections.get(TARGET_COLLECTION); assertTrue(0 < (Long) errors.get(CdcrParams.CONSECUTIVE_ERRORS)); + @SuppressWarnings({"rawtypes"}) NamedList lastErrors = (NamedList) errors.get(CdcrParams.LAST); assertNotNull(lastErrors); assertTrue(0 < lastErrors.size()); diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrReplicationHandlerTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrReplicationHandlerTest.java index 264b62465e21..7bd371f6bfc5 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrReplicationHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrReplicationHandlerTest.java @@ -276,7 +276,9 @@ public void run() { } index(SOURCE_COLLECTION, docs); numDocs.getAndAdd(10); - log.info("Sent batch of {} updates - numDocs:{}", docs.size(), numDocs); + if (log.isInfoEnabled()) { + log.info("Sent batch of {} updates - numDocs:{}", docs.size(), numDocs); + } } catch (Exception e) { throw new RuntimeException(e); diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrRequestHandlerTest.java index e12c693e6012..0944a610c063 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrRequestHandlerTest.java @@ -42,7 +42,9 @@ public void testLifeCycleActions() throws Exception { this.assertState(SOURCE_COLLECTION, CdcrParams.ProcessState.STOPPED, CdcrParams.BufferState.ENABLED); // send start action to first shard + @SuppressWarnings({"rawtypes"}) NamedList rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD1), CdcrParams.CdcrAction.START); + @SuppressWarnings({"rawtypes"}) NamedList status = (NamedList) rsp.get(CdcrParams.CdcrAction.STATUS.toLower()); assertEquals(CdcrParams.ProcessState.STARTED.toLower(), status.get(CdcrParams.ProcessState.getParam())); @@ -69,6 +71,7 @@ public void testLifeCycleActions() throws Exception { @ShardsFixed(num = 2) public void testCheckpointActions() throws Exception { // initial request on an empty index, must return -1 + @SuppressWarnings({"rawtypes"}) NamedList rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD1), CdcrParams.CdcrAction.COLLECTIONCHECKPOINT); assertEquals(-1l, rsp.get(CdcrParams.CHECKPOINT)); @@ -152,7 +155,9 @@ public void testBufferActions() throws Exception { this.assertState(SOURCE_COLLECTION, CdcrParams.ProcessState.STOPPED, CdcrParams.BufferState.ENABLED); // send disable buffer action to first shard + @SuppressWarnings({"rawtypes"}) NamedList rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD1), CdcrParams.CdcrAction.DISABLEBUFFER); + @SuppressWarnings({"rawtypes"}) NamedList status = (NamedList) rsp.get(CdcrParams.CdcrAction.STATUS.toLower()); assertEquals(CdcrParams.BufferState.DISABLED.toLower(), status.get(CdcrParams.BufferState.getParam())); diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrTestsUtil.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrTestsUtil.java index 5ea1aa9be074..869e5be32d41 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrTestsUtil.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrTestsUtil.java @@ -105,7 +105,7 @@ protected static Object getFingerPrintMaxVersion(CloudSolrClient client, String } Thread.sleep(200); } - log.error("maxVersionEncountered not found for client : " + client + "in 20 attempts"); + log.error("maxVersionEncountered not found for client : {} in 20 attempts", client); return null; } @@ -236,7 +236,9 @@ public static int numberOfFiles(String dir) { if (!file.isDirectory()) { assertTrue("Path to tlog " + dir + " does not exists or it's not a directory.", false); } - log.debug("Update log dir {} contains: {}", dir, file.listFiles()); + if (log.isDebugEnabled()) { + log.debug("Update log dir {} contains: {}", dir, file.listFiles()); + } return file.listFiles().length; } diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrVersionReplicationTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrVersionReplicationTest.java index ff9afe2c6c40..6953a3240187 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrVersionReplicationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrVersionReplicationTest.java @@ -82,7 +82,7 @@ public void testCdcrDocVersions() throws Exception { private void doTestCdcrDocVersions(SolrClient solrClient) throws Exception { this.solrServer = solrClient; - log.info("### STARTING doCdcrTestDocVersions - Add commands, client: " + solrClient); + log.info("### STARTING doCdcrTestDocVersions - Add commands, client: {}", solrClient); vadd("doc1", 10, CdcrUpdateProcessor.CDCR_UPDATE, "", vfield, "10"); vadd("doc2", 11, CdcrUpdateProcessor.CDCR_UPDATE, "", vfield, "11"); diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java index 04f93875f9bf..2cf162423313 100644 --- a/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java @@ -193,7 +193,9 @@ private void createAndDeleteCollection() throws Exception { NamedList response = c.query( new SolrQuery().setRequestHandler("/admin/system")).getResponse(); + @SuppressWarnings({"unchecked"}) NamedList coreInfo = (NamedList) response.get("core"); + @SuppressWarnings({"unchecked"}) String dataDir = (String) ((NamedList) coreInfo.get("directory")).get("data"); dataDirs.add(dataDir); } diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/TestClusterStateMutator.java b/solr/core/src/test/org/apache/solr/cloud/overseer/TestClusterStateMutator.java index 67e32444852f..0be579cf4f30 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/TestClusterStateMutator.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/TestClusterStateMutator.java @@ -39,7 +39,7 @@ public static void beforeClass() { } public void testCreateCollection() throws Exception { - ClusterState clusterState = new ClusterState(-1, Collections.emptySet(), Collections.emptyMap()); + ClusterState clusterState = new ClusterState(Collections.emptySet(), Collections.emptyMap()); DistribStateManager mockStateManager = mock(DistribStateManager.class); SolrCloudManager dataProvider = mock(SolrCloudManager.class); when(dataProvider.getDistribStateManager()).thenReturn(mockStateManager); @@ -55,7 +55,7 @@ public void testCreateCollection() throws Exception { assertEquals(1, collection.getSlicesMap().size()); assertEquals(1, collection.getMaxShardsPerNode()); - ClusterState state = new ClusterState(-1, Collections.emptySet(), Collections.singletonMap("xyz", collection)); + ClusterState state = new ClusterState(Collections.emptySet(), Collections.singletonMap("xyz", collection)); message = new ZkNodeProps(Utils.makeMap( "name", "abc", "numShards", "2", diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkCollectionPropsCachingTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkCollectionPropsCachingTest.java index a765ada9594c..604aec578fb7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkCollectionPropsCachingTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkCollectionPropsCachingTest.java @@ -46,11 +46,7 @@ public class ZkCollectionPropsCachingTest extends SolrCloudTestCase { @BeforeClass public static void setupClass() throws Exception { - Boolean useLegacyCloud = rarely(); - log.info("Using legacyCloud?: {}", useLegacyCloud); - configureCluster(4) - .withProperty(ZkStateReader.LEGACY_CLOUD, String.valueOf(useLegacyCloud)) .addConfig("conf", configset("cloud-minimal")) .configure(); } diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java index f4c5bb273273..2bf0971de44f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java @@ -19,7 +19,6 @@ import java.nio.file.Path; import java.util.Collections; import java.util.HashMap; -import java.util.Map; import java.util.concurrent.TimeUnit; import org.apache.lucene.util.IOUtils; @@ -34,109 +33,12 @@ import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.TimeSource; -import org.apache.solr.common.util.Utils; import org.apache.solr.util.TimeOut; public class ZkStateReaderTest extends SolrTestCaseJ4 { private static final long TIMEOUT = 30; - /** Uses explicit refresh to ensure latest changes are visible. */ - public void testStateFormatUpdateWithExplicitRefresh() throws Exception { - testStateFormatUpdate(true, true); - } - - /** Uses explicit refresh to ensure latest changes are visible. */ - public void testStateFormatUpdateWithExplicitRefreshLazy() throws Exception { - testStateFormatUpdate(true, false); - } - - /** ZkStateReader should automatically pick up changes based on ZK watches. */ - public void testStateFormatUpdateWithTimeDelay() throws Exception { - testStateFormatUpdate(false, true); - } - - /** ZkStateReader should automatically pick up changes based on ZK watches. */ - public void testStateFormatUpdateWithTimeDelayLazy() throws Exception { - testStateFormatUpdate(false, false); - } - - public void testStateFormatUpdate(boolean explicitRefresh, boolean isInteresting) throws Exception { - Path zkDir = createTempDir("testStateFormatUpdate"); - - ZkTestServer server = new ZkTestServer(zkDir); - - SolrZkClient zkClient = null; - ZkStateReader reader = null; - - try { - server.run(); - - zkClient = new SolrZkClient(server.getZkAddress(), OverseerTest.DEFAULT_CONNECTION_TIMEOUT); - ZkController.createClusterZkNodes(zkClient); - - reader = new ZkStateReader(zkClient); - reader.createClusterStateWatchersAndUpdate(); - if (isInteresting) { - reader.registerCore("c1"); - } - - ZkStateWriter writer = new ZkStateWriter(reader, new Stats()); - - zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true); - - { - // create new collection with stateFormat = 1 - DocCollection stateV1 = new DocCollection("c1", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0, ZkStateReader.CLUSTER_STATE); - ZkWriteCommand c1 = new ZkWriteCommand("c1", stateV1); - writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c1), null); - writer.writePendingUpdates(); - - Map map = (Map) Utils.fromJSON(zkClient.getData("/clusterstate.json", null, null, true)); - assertNotNull(map.get("c1")); - boolean exists = zkClient.exists(ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json", true); - assertFalse(exists); - - if (explicitRefresh) { - reader.forceUpdateCollection("c1"); - } else { - reader.waitForState("c1", TIMEOUT, TimeUnit.SECONDS, (n, c) -> c != null); - } - - DocCollection collection = reader.getClusterState().getCollection("c1"); - assertEquals(1, collection.getStateFormat()); - } - - - { - // Now update the collection to stateFormat = 2 - DocCollection stateV2 = new DocCollection("c1", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0, ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json"); - ZkWriteCommand c2 = new ZkWriteCommand("c1", stateV2); - writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c2), null); - writer.writePendingUpdates(); - - Map map = (Map) Utils.fromJSON(zkClient.getData("/clusterstate.json", null, null, true)); - assertNull(map.get("c1")); - boolean exists = zkClient.exists(ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json", true); - assertTrue(exists); - - if (explicitRefresh) { - reader.forceUpdateCollection("c1"); - } else { - reader.waitForState("c1", TIMEOUT, TimeUnit.SECONDS, - (n, c) -> c != null && c.getStateFormat() == 2); - } - - DocCollection collection = reader.getClusterState().getCollection("c1"); - assertEquals(2, collection.getStateFormat()); - } - } finally { - IOUtils.close(reader, zkClient); - server.shutdown(); - - } - } - public void testExternalCollectionWatchedNotWatched() throws Exception{ Path zkDir = createTempDir("testExternalCollectionWatchedNotWatched"); ZkTestServer server = new ZkTestServer(zkDir); @@ -156,9 +58,9 @@ public void testExternalCollectionWatchedNotWatched() throws Exception{ zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true); - // create new collection with stateFormat = 2 + // create new collection ZkWriteCommand c1 = new ZkWriteCommand("c1", - new DocCollection("c1", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0, ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json")); + new DocCollection("c1", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0)); writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c1), null); writer.writePendingUpdates(); reader.forceUpdateCollection("c1"); @@ -195,14 +97,14 @@ public void testCollectionStateWatcherCaching() throws Exception { zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true); ZkStateWriter writer = new ZkStateWriter(reader, new Stats()); - DocCollection state = new DocCollection("c1", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0, ZkStateReader.CLUSTER_STATE + "/c1/state.json"); + DocCollection state = new DocCollection("c1", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0); ZkWriteCommand wc = new ZkWriteCommand("c1", state); writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(wc), null); writer.writePendingUpdates(); assertTrue(zkClient.exists(ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json", true)); reader.waitForState("c1", 1, TimeUnit.SECONDS, (liveNodes, collectionState) -> collectionState != null); - state = new DocCollection("c1", new HashMap<>(), Collections.singletonMap("x", "y"), DocRouter.DEFAULT, 0, ZkStateReader.CLUSTER_STATE + "/c1/state.json"); + state = new DocCollection("c1", new HashMap<>(), Collections.singletonMap("x", "y"), DocRouter.DEFAULT, 0); wc = new ZkWriteCommand("c1", state); writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(wc), null); writer.writePendingUpdates(); @@ -253,8 +155,8 @@ public void testWatchedCollectionCreation() throws Exception { ZkStateWriter writer = new ZkStateWriter(reader, new Stats()); - // create new collection with stateFormat = 2 - DocCollection state = new DocCollection("c1", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0, ZkStateReader.CLUSTER_STATE + "/c1/state.json"); + // create new collection + DocCollection state = new DocCollection("c1", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0); ZkWriteCommand wc = new ZkWriteCommand("c1", state); writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(wc), null); writer.writePendingUpdates(); @@ -266,7 +168,6 @@ public void testWatchedCollectionCreation() throws Exception { ClusterState.CollectionRef ref = reader.getClusterState().getCollectionRef("c1"); assertNotNull(ref); assertFalse(ref.isLazilyLoaded()); - assertEquals(2, ref.get().getStateFormat()); } finally { IOUtils.close(reader, zkClient); server.shutdown(); diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java index bc2b42d5e82d..8bc1d79b188e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java @@ -80,12 +80,9 @@ public void testZkStateWriterBatching() throws Exception { zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c2", true); zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c3", true); - ZkWriteCommand c1 = new ZkWriteCommand("c1", - new DocCollection("c1", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0, ZkStateReader.COLLECTIONS_ZKNODE + "/c1")); - ZkWriteCommand c2 = new ZkWriteCommand("c2", - new DocCollection("c2", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0, ZkStateReader.COLLECTIONS_ZKNODE + "/c2")); - ZkWriteCommand c3 = new ZkWriteCommand("c3", - new DocCollection("c3", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0, ZkStateReader.COLLECTIONS_ZKNODE + "/c3")); + ZkWriteCommand c1 = new ZkWriteCommand("c1", new DocCollection("c1", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0)); + ZkWriteCommand c2 = new ZkWriteCommand("c2", new DocCollection("c2", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0)); + ZkWriteCommand c3 = new ZkWriteCommand("c3", new DocCollection("c3", new HashMap<>(), new HashMap<>(), DocRouter.DEFAULT, 0)); ZkStateWriter writer = new ZkStateWriter(reader, new Stats()); // First write is flushed immediately @@ -110,46 +107,6 @@ public void testZkStateWriterBatching() throws Exception { } } - public void testSingleLegacyCollection() throws Exception { - Path zkDir = createTempDir("testSingleLegacyCollection"); - - ZkTestServer server = new ZkTestServer(zkDir); - - SolrZkClient zkClient = null; - - try { - server.run(); - - zkClient = new SolrZkClient(server.getZkAddress(), OverseerTest.DEFAULT_CONNECTION_TIMEOUT); - ZkController.createClusterZkNodes(zkClient); - - try (ZkStateReader reader = new ZkStateReader(zkClient)) { - reader.createClusterStateWatchersAndUpdate(); - - ZkStateWriter writer = new ZkStateWriter(reader, new Stats()); - - zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true); - - // create new collection with stateFormat = 1 - ZkWriteCommand c1 = new ZkWriteCommand("c1", - new DocCollection("c1", new HashMap(), new HashMap(), DocRouter.DEFAULT, 0, ZkStateReader.CLUSTER_STATE)); - - writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c1), null); - writer.writePendingUpdates(); - - Map map = (Map) Utils.fromJSON(zkClient.getData("/clusterstate.json", null, null, true)); - assertNotNull(map.get("c1")); - boolean exists = zkClient.exists(ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json", true); - assertFalse(exists); - } - - } finally { - IOUtils.close(zkClient); - server.shutdown(); - - } - } - public void testSingleExternalCollection() throws Exception { Path zkDir = createTempDir("testSingleExternalCollection"); @@ -170,109 +127,30 @@ public void testSingleExternalCollection() throws Exception { zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true); - // create new collection with stateFormat = 2 + // create new collection ZkWriteCommand c1 = new ZkWriteCommand("c1", - new DocCollection("c1", new HashMap(), new HashMap(), DocRouter.DEFAULT, 0, ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json")); + new DocCollection("c1", new HashMap(), new HashMap(), DocRouter.DEFAULT, 0)); writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c1), null); writer.writePendingUpdates(); - Map map = (Map) Utils.fromJSON(zkClient.getData("/clusterstate.json", null, null, true)); - assertNull(map.get("c1")); - map = (Map) Utils.fromJSON(zkClient.getData(ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json", null, null, true)); + @SuppressWarnings({"rawtypes"}) + Map map = (Map) Utils.fromJSON(zkClient.getData(ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json", null, null, true)); assertNotNull(map.get("c1")); } - } finally { IOUtils.close(zkClient); server.shutdown(); - } - - } - public void testExternalModificationToSharedClusterState() throws Exception { + public void testExternalModification() throws Exception { Path zkDir = createTempDir("testExternalModification"); ZkTestServer server = new ZkTestServer(zkDir); SolrZkClient zkClient = null; - try { - server.run(); - - zkClient = new SolrZkClient(server.getZkAddress(), OverseerTest.DEFAULT_CONNECTION_TIMEOUT); - ZkController.createClusterZkNodes(zkClient); - - try (ZkStateReader reader = new ZkStateReader(zkClient)) { - reader.createClusterStateWatchersAndUpdate(); - - ZkStateWriter writer = new ZkStateWriter(reader, new Stats()); - - zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true); - zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c2", true); - - // create collection 1 with stateFormat = 1 - ZkWriteCommand c1 = new ZkWriteCommand("c1", - new DocCollection("c1", new HashMap(), new HashMap(), DocRouter.DEFAULT, 0, ZkStateReader.CLUSTER_STATE)); - writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c1), null); - writer.writePendingUpdates(); - - reader.forceUpdateCollection("c1"); - reader.forceUpdateCollection("c2"); - ClusterState clusterState = reader.getClusterState(); // keep a reference to the current cluster state object - assertTrue(clusterState.hasCollection("c1")); - assertFalse(clusterState.hasCollection("c2")); - - // Simulate an external modification to /clusterstate.json - byte[] data = zkClient.getData("/clusterstate.json", null, null, true); - zkClient.setData("/clusterstate.json", data, true); - - // enqueue another c1 so that ZkStateWriter has pending updates - writer.enqueueUpdate(clusterState, Collections.singletonList(c1), null); - assertTrue(writer.hasPendingUpdates()); - - // Will trigger flush - Thread.sleep(Overseer.STATE_UPDATE_DELAY + 100); - ZkWriteCommand c2 = new ZkWriteCommand("c2", - new DocCollection("c2", new HashMap(), new HashMap(), DocRouter.DEFAULT, 0, ZkStateReader.getCollectionPath("c2"))); - - try { - writer.enqueueUpdate(clusterState, Collections.singletonList(c2), null); // we are sending in the old cluster state object - fail("Enqueue should not have succeeded"); - } catch (KeeperException.BadVersionException bve) { - // expected - } - - try { - writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c2), null); - fail("enqueueUpdate after BadVersionException should not have succeeded"); - } catch (IllegalStateException e) { - // expected - } - - try { - writer.writePendingUpdates(); - fail("writePendingUpdates after BadVersionException should not have succeeded"); - } catch (IllegalStateException e) { - // expected - } - } - } finally { - IOUtils.close(zkClient); - server.shutdown(); - } - - } - - public void testExternalModificationToStateFormat2() throws Exception { - Path zkDir = createTempDir("testExternalModificationToStateFormat2"); - - ZkTestServer server = new ZkTestServer(zkDir); - - SolrZkClient zkClient = null; - try { server.run(); @@ -289,14 +167,13 @@ public void testExternalModificationToStateFormat2() throws Exception { ClusterState state = reader.getClusterState(); - // create collection 2 with stateFormat = 2 + // create collection 2 ZkWriteCommand c2 = new ZkWriteCommand("c2", - new DocCollection("c2", new HashMap(), new HashMap(), DocRouter.DEFAULT, 0, ZkStateReader.getCollectionPath("c2"))); + new DocCollection("c2", new HashMap(), new HashMap(), DocRouter.DEFAULT, 0)); state = writer.enqueueUpdate(state, Collections.singletonList(c2), null); assertFalse(writer.hasPendingUpdates()); // first write is flushed immediately - int sharedClusterStateVersion = state.getZkClusterStateVersion(); - int stateFormat2Version = state.getCollection("c2").getZNodeVersion(); + int c2Version = state.getCollection("c2").getZNodeVersion(); // Simulate an external modification to /collections/c2/state.json byte[] data = zkClient.getData(ZkStateReader.getCollectionPath("c2"), null, null, true); @@ -307,8 +184,7 @@ public void testExternalModificationToStateFormat2() throws Exception { state = reader.getClusterState(); log.info("Cluster state: {}", state); assertTrue(state.hasCollection("c2")); - assertEquals(sharedClusterStateVersion, (int) state.getZkClusterStateVersion()); - assertEquals(stateFormat2Version + 1, state.getCollection("c2").getZNodeVersion()); + assertEquals(c2Version + 1, state.getCollection("c2").getZNodeVersion()); writer.enqueueUpdate(state, Collections.singletonList(c2), null); assertTrue(writer.hasPendingUpdates()); @@ -320,7 +196,7 @@ public void testExternalModificationToStateFormat2() throws Exception { // Will trigger flush Thread.sleep(Overseer.STATE_UPDATE_DELAY+100); ZkWriteCommand c1 = new ZkWriteCommand("c1", - new DocCollection("c1", new HashMap(), new HashMap(), DocRouter.DEFAULT, 0, ZkStateReader.CLUSTER_STATE)); + new DocCollection("c1", new HashMap(), new HashMap(), DocRouter.DEFAULT, 0)); try { writer.enqueueUpdate(state, Collections.singletonList(c1), null); diff --git a/solr/core/src/test/org/apache/solr/cloud/rule/ImplicitSnitchTest.java b/solr/core/src/test/org/apache/solr/cloud/rule/ImplicitSnitchTest.java index b312e969a10f..175c373da146 100644 --- a/solr/core/src/test/org/apache/solr/cloud/rule/ImplicitSnitchTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/rule/ImplicitSnitchTest.java @@ -204,11 +204,13 @@ public void testExceptions() throws Exception { ImplicitSnitch implicitSnitch = new ImplicitSnitch(); ServerSnitchContext noNodeExceptionSnitch = new ServerSnitchContext(null, null, new HashMap<>(), null) { @Override + @SuppressWarnings({"rawtypes"}) public Map getZkJson(String path) throws KeeperException, InterruptedException { throw new KeeperException.NoNodeException(); } }; implicitSnitch.getTags("", Collections.singleton(ImplicitSnitch.ROLE), noNodeExceptionSnitch); + @SuppressWarnings({"rawtypes"}) Map map = (Map) noNodeExceptionSnitch.retrieve(ZkStateReader.ROLES); // todo it the key really supposed to /roles.json? assertNotNull(map); assertEquals(0, map.size()); @@ -220,6 +222,7 @@ public Map getZkJson(String path) throws KeeperException, InterruptedException { ServerSnitchContext keeperExceptionSnitch = new ServerSnitchContext(null, null, new HashMap<>(), null) { @Override + @SuppressWarnings({"rawtypes"}) public Map getZkJson(String path) throws KeeperException, InterruptedException { throw new KeeperException.ConnectionLossException(); } diff --git a/solr/core/src/test/org/apache/solr/cloud/rule/RuleEngineTest.java b/solr/core/src/test/org/apache/solr/cloud/rule/RuleEngineTest.java index f04895f8b5f8..02cc477f2780 100644 --- a/solr/core/src/test/org/apache/solr/cloud/rule/RuleEngineTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/rule/RuleEngineTest.java @@ -46,6 +46,7 @@ public class RuleEngineTest extends SolrTestCaseJ4{ @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testPlacement2(){ @@ -101,6 +102,7 @@ shardVsReplicaCount, singletonList(MockSnitch.class.getName()), protected SnitchContext getSnitchCtx(String node, SnitchInfoImpl info, SolrCloudManager cloudManager) { return new ServerSnitchContext(info, node, snitchSession,cloudManager){ @Override + @SuppressWarnings({"rawtypes"}) public Map getZkJson(String path) { if(ZkStateReader.ROLES.equals(path)){ return Collections.singletonMap("overseer", preferredOverseerNodes); @@ -121,6 +123,7 @@ public Map getZkJson(String path) { } + @SuppressWarnings({"unchecked", "rawtypes"}) public void testPlacement3(){ String s = "{" + " '127.0.0.1:49961_':{" + @@ -244,6 +247,7 @@ shardVsReplicaCount, singletonList(MockSnitch.class.getName()), } + @SuppressWarnings({"rawtypes"}) private List parseRules(String s) { List maps = (List) Utils.fromJSON(s.getBytes(StandardCharsets.UTF_8)); @@ -254,6 +258,7 @@ private List parseRules(String s) { } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testPlacement() throws Exception { String rulesStr = "rack:*,replica:<2"; List rules = parse(Arrays.asList(rulesStr)); @@ -289,15 +294,18 @@ shardVsReplicaCount, singletonList(MockSnitch.class.getName()), } public static class MockSnitch extends Snitch { + @SuppressWarnings({"rawtypes"}) static Map nodeVsTags = Collections.emptyMap(); @Override + @SuppressWarnings({"unchecked"}) public void getTags(String solrNode, Set requestedTags, SnitchContext ctx) { ctx.getTags().putAll((Map) nodeVsTags.get(solrNode)); } @Override public boolean isKnownTag(String tag) { + @SuppressWarnings({"rawtypes"}) Map next = (Map) nodeVsTags.values().iterator().next(); return next.containsKey(tag); } diff --git a/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java b/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java index 17dd0199260a..d96779325752 100644 --- a/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java @@ -97,6 +97,7 @@ public void doIntegrationTest() throws Exception { DocCollection rulesCollection = getCollectionState(rulesColl); + @SuppressWarnings({"rawtypes"}) List list = (List) rulesCollection.get("rule"); assertEquals(3, list.size()); assertEquals ( "<4", ((Map)list.get(0)).get("cores")); @@ -169,6 +170,7 @@ public void testPortRuleInPresenceOfClusterPolicy() throws Exception { " {'replica': 0, 'port':'" + port + "'}" + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); cluster.getSolrClient().request(req); @@ -186,6 +188,7 @@ public void testPortRuleInPresenceOfClusterPolicy() throws Exception { if (null == rulesCollection) { return false; } else { + @SuppressWarnings({"rawtypes"}) List list = (List) rulesCollection.get("rule"); if (null == list || 1 != list.size()) { return false; @@ -230,6 +233,7 @@ public void testPortRule() throws Exception { if (null == rulesCollection) { return false; } else { + @SuppressWarnings({"rawtypes"}) List list = (List) rulesCollection.get("rule"); if (null == list || 1 != list.size()) { return false; @@ -256,6 +260,7 @@ public void testPortRule() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testHostFragmentRule() throws Exception { String rulesColl = "hostFragment"; @@ -274,6 +279,7 @@ public void testHostFragmentRule() throws Exception { cluster.waitForActiveCollection(rulesColl, 1, 2); DocCollection rulesCollection = getCollectionState(rulesColl); + @SuppressWarnings({"rawtypes"}) List list = (List) rulesCollection.get("rule"); assertEquals(2, list.size()); assertEquals(ip_2, list.get(0).get("ip_2")); @@ -358,6 +364,7 @@ public void testModifyColl() throws Exception { if (null == rulesCollection) { return false; } + @SuppressWarnings({"rawtypes"}) List list = (List) rulesCollection.get("rule"); if (null == list || 3 != list.size()) { return false; diff --git a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java index 4a0f1ba7eb63..16dad9752581 100644 --- a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java +++ b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java @@ -50,7 +50,7 @@ public class BlobRepositoryMockingTest { private static final String[][] PARSED = new String[][]{{"foo", "bar", "baz"}, {"bang", "boom", "bash"}}; private static final String BLOBSTR = "foo,bar,baz\nbang,boom,bash"; private CoreContainer mockContainer = mock(CoreContainer.class); - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) private ConcurrentHashMap mapMock = mock(ConcurrentHashMap.class); private Object[] mocks = new Object[] { @@ -92,6 +92,7 @@ ByteBuffer fetchFromUrl(String key, String url) { } @Override + @SuppressWarnings({"rawtypes"}) ConcurrentHashMap createMap() { return mapMock; } @@ -103,6 +104,7 @@ ConcurrentHashMap createMap() { public void testCloudOnly() { when(mockContainer.isZooKeeperAware()).thenReturn(false); try { + @SuppressWarnings({"rawtypes"}) BlobRepository.BlobContentRef ref = repository.getBlobIncRef("foo!"); } catch (SolrException e) { verify(mockContainer).isZooKeeperAware(); @@ -114,6 +116,7 @@ public void testCloudOnly() { @Test public void testGetBlobIncrRefString() { when(mockContainer.isZooKeeperAware()).thenReturn(true); + @SuppressWarnings({"rawtypes"}) BlobRepository.BlobContentRef ref = repository.getBlobIncRef("foo!"); assertTrue("foo!".equals(blobKey)); assertTrue(blobFetched); @@ -130,6 +133,7 @@ public void testGetBlobIncrRefByUrl() throws Exception{ when(mockContainer.isZooKeeperAware()).thenReturn(true); filecontent = TestDynamicLoading.getFileContent("runtimecode/runtimelibs_v2.jar.bin"); url = "http://localhost:8080/myjar/location.jar"; + @SuppressWarnings({"rawtypes"}) BlobRepository.BlobContentRef ref = repository.getBlobIncRef( "filefoo",null,url, "bc5ce45ad281b6a08fb7e529b1eb475040076834816570902acb6ebdd809410e31006efdeaa7f78a6c35574f3504963f5f7e4d92247d0eb4db3fc9abdda5d417"); assertTrue("filefoo".equals(blobKey)); @@ -154,6 +158,7 @@ public void testGetBlobIncrRefByUrl() throws Exception{ public void testCachedAlready() { when(mockContainer.isZooKeeperAware()).thenReturn(true); when(mapMock.get("foo!")).thenReturn(new BlobRepository.BlobContent("foo!", blobData)); + @SuppressWarnings({"rawtypes"}) BlobRepository.BlobContentRef ref = repository.getBlobIncRef("foo!"); assertEquals("",blobKey); assertFalse(blobFetched); @@ -167,6 +172,7 @@ public void testCachedAlready() { @Test public void testGetBlobIncrRefStringDecoder() { when(mockContainer.isZooKeeperAware()).thenReturn(true); + @SuppressWarnings({"rawtypes"}) BlobRepository.BlobContentRef ref = repository.getBlobIncRef("foo!", new BlobRepository.Decoder() { @Override public Object decode(InputStream inputStream) { diff --git a/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java index 697d47b358da..f584604863f6 100644 --- a/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java +++ b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java @@ -80,6 +80,7 @@ public void testComparator() { } @Test + @SuppressWarnings({"unchecked"}) public void integrationTest() { assumeWorkingMockito(); @@ -124,6 +125,7 @@ public void integrationTest() { Map replicaMap = replicas.stream().collect(Collectors.toMap(Replica::getName, Function.identity())); sliceMap.put(slice, new Slice(slice, replicaMap, map(), collection)); } + @SuppressWarnings({"unchecked"}) DocCollection col = new DocCollection(collection, sliceMap, map(), DocRouter.DEFAULT); collToState.put(collection, col); } @@ -181,6 +183,7 @@ public void integrationTest() { } private CoreDescriptor newCoreDescriptor(Replica r) { + @SuppressWarnings({"unchecked"}) Map props = map( CoreDescriptor.CORE_SHARD, r.getSlice(), CoreDescriptor.CORE_COLLECTION, r.getCollection(), @@ -192,6 +195,7 @@ private CoreDescriptor newCoreDescriptor(Replica r) { protected Replica addNewReplica(List replicaList, String collection, String slice, List possibleNodes) { String replica = "r" + replicaList.size(); String node = possibleNodes.get(random().nextInt(possibleNodes.size())); // place on a random node + @SuppressWarnings({"unchecked"}) Replica r = new Replica(replica, map("core", replica, "node_name", node), collection, slice); replicaList.add(r); return r; diff --git a/solr/core/src/test/org/apache/solr/core/CountUsageValueSourceParser.java b/solr/core/src/test/org/apache/solr/core/CountUsageValueSourceParser.java index e6380393e573..605018726a80 100644 --- a/solr/core/src/test/org/apache/solr/core/CountUsageValueSourceParser.java +++ b/solr/core/src/test/org/apache/solr/core/CountUsageValueSourceParser.java @@ -72,7 +72,8 @@ public CountDocsValueSource(AtomicInteger counter, double value) { this.counter = counter; } @Override - public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { + public FunctionValues getValues(@SuppressWarnings({"rawtypes"})Map context, + LeafReaderContext readerContext) throws IOException { return new DoubleDocValues(this) { @Override public double doubleVal(int doc) { diff --git a/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java index 71bb73b0720e..38754a610a3e 100755 --- a/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java @@ -76,6 +76,7 @@ public void testGetDataHomeByteBuffersDirectory() throws Exception { doTestGetDataHome(ByteBuffersDirectoryFactory.class); } + @SuppressWarnings({"unchecked", "rawtypes"}) private void doTestGetDataHome(Class directoryFactoryClass) throws Exception { NodeConfig config = loadNodeConfig("/solr/solr-solrDataHome.xml"); CoreContainer cc = new CoreContainer(config); diff --git a/solr/core/src/test/org/apache/solr/core/DummyValueSourceParser.java b/solr/core/src/test/org/apache/solr/core/DummyValueSourceParser.java index 2d83438d9d9d..14f1a1ee85de 100644 --- a/solr/core/src/test/org/apache/solr/core/DummyValueSourceParser.java +++ b/solr/core/src/test/org/apache/solr/core/DummyValueSourceParser.java @@ -29,10 +29,11 @@ * **/ public class DummyValueSourceParser extends ValueSourceParser { + @SuppressWarnings({"rawtypes"}) private NamedList args; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { this.args = args; } diff --git a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java index 06faa05e29a1..d4fd38eb4acd 100644 --- a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java +++ b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java @@ -99,10 +99,13 @@ public void testCacheAssumptions() throws Exception { // This gets 0 docs back. Use 10000 instead of 1 for timeAllowed and it gets 100 back and the for loop below // succeeds. String response = JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", "1", "sleep", sleep)); + @SuppressWarnings({"rawtypes"}) Map res = (Map) fromJSONString(response); + @SuppressWarnings({"rawtypes"}) Map body = (Map) (res.get("response")); assertTrue("Should have fewer docs than " + NUM_DOCS, (long) (body.get("numFound")) < NUM_DOCS); + @SuppressWarnings({"rawtypes"}) Map header = (Map) (res.get("responseHeader")); assertTrue("Should have partial results", (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))); @@ -142,8 +145,11 @@ public void testQueryResults() throws Exception { nl = queryCacheStats.getValue(); assertEquals("Should NOT have inserted partial results!", inserts, (long) nl.get("inserts")); + @SuppressWarnings({"rawtypes"}) Map res = (Map) fromJSONString(response); + @SuppressWarnings({"rawtypes"}) Map body = (Map) (res.get("response")); + @SuppressWarnings({"rawtypes"}) Map header = (Map) (res.get("responseHeader")); assertTrue("Should have fewer docs than " + NUM_DOCS, (long) (body.get("numFound")) < NUM_DOCS); diff --git a/solr/core/src/test/org/apache/solr/core/FakeDeletionPolicy.java b/solr/core/src/test/org/apache/solr/core/FakeDeletionPolicy.java index 3f463a11c695..ae20e0fb137e 100644 --- a/solr/core/src/test/org/apache/solr/core/FakeDeletionPolicy.java +++ b/solr/core/src/test/org/apache/solr/core/FakeDeletionPolicy.java @@ -33,7 +33,7 @@ public class FakeDeletionPolicy extends IndexDeletionPolicy implements NamedList //@Override @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { var1 = (String) args.get("var1"); var2 = (String) args.get("var2"); } @@ -48,13 +48,13 @@ public String getVar2() { // @Override @Override - public void onCommit(List arg0) throws IOException { + public void onCommit(@SuppressWarnings({"rawtypes"})List arg0) throws IOException { System.setProperty("onCommit", "test.org.apache.solr.core.FakeDeletionPolicy.onCommit"); } // @Override @Override - public void onInit(List arg0) throws IOException { + public void onInit(@SuppressWarnings({"rawtypes"})List arg0) throws IOException { System.setProperty("onInit", "test.org.apache.solr.core.FakeDeletionPolicy.onInit"); } } diff --git a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java index 632fcb68b825..bd940c24af7e 100644 --- a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java @@ -78,6 +78,7 @@ public static void teardownClass() throws Exception { } @Test + @SuppressWarnings({"try"}) public void testInitArgsOrSysPropConfig() throws Exception { try(HdfsDirectoryFactory hdfsFactory = new HdfsDirectoryFactory()) { // test sys prop config diff --git a/solr/core/src/test/org/apache/solr/core/HelloStream.java b/solr/core/src/test/org/apache/solr/core/HelloStream.java index be285e5886d4..370200504dc3 100644 --- a/solr/core/src/test/org/apache/solr/core/HelloStream.java +++ b/solr/core/src/test/org/apache/solr/core/HelloStream.java @@ -18,9 +18,7 @@ package org.apache.solr.core; import java.io.IOException; -import java.util.HashMap; import java.util.List; -import java.util.Map; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.StreamComparator; @@ -67,14 +65,10 @@ public void close() throws IOException { @Override public Tuple read() throws IOException { if (isSentHelloWorld) { - Map m = new HashMap(); - m.put("EOF", true); - return new Tuple(m); + return Tuple.EOF(); } else { isSentHelloWorld = true; - Map m = new HashMap<>(); - m.put("msg", "Hello World!"); - return new Tuple(m); + return new Tuple("msg", "Hello World!"); } } diff --git a/solr/core/src/test/org/apache/solr/core/MockEventListener.java b/solr/core/src/test/org/apache/solr/core/MockEventListener.java index d2babd550d76..5b0422e6f200 100644 --- a/solr/core/src/test/org/apache/solr/core/MockEventListener.java +++ b/solr/core/src/test/org/apache/solr/core/MockEventListener.java @@ -34,7 +34,7 @@ public MockEventListener() { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { /* NOOP */ } diff --git a/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java b/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java index 23bccdf2cdda..3dcf68f0bbd8 100644 --- a/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java +++ b/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java @@ -36,7 +36,7 @@ public class MockQuerySenderListenerReqHandler extends RequestHandlerBase { AtomicInteger initCounter = new AtomicInteger(0); @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { initCounter.incrementAndGet(); super.init(args); } diff --git a/solr/core/src/test/org/apache/solr/core/MockShardHandlerFactory.java b/solr/core/src/test/org/apache/solr/core/MockShardHandlerFactory.java index 98266b2ae843..f7eecedbd55f 100644 --- a/solr/core/src/test/org/apache/solr/core/MockShardHandlerFactory.java +++ b/solr/core/src/test/org/apache/solr/core/MockShardHandlerFactory.java @@ -27,6 +27,7 @@ /** a fake shardhandler factory that does nothing. */ public class MockShardHandlerFactory extends ShardHandlerFactory implements PluginInfoInitialized { + @SuppressWarnings({"rawtypes"}) NamedList args; @Override diff --git a/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java b/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java index dc1e48b17b3e..b7147ee32516 100644 --- a/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java +++ b/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java @@ -136,7 +136,21 @@ public void testRandomQueryKeyEquality() { } assert minIters <= iter; } - + + public void testMinExactCount() { + int[] nums = smallArrayOfRandomNumbers(); + final Query base = new FlatHashTermQuery("base"); + assertKeyEquals(new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10), + new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10)); + assertKeyNotEquals(new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10), + new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 20)); + assertKeyNotEquals(new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10), + new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0));//Integer.MAX_VALUE + assertKeyEquals(new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, Integer.MAX_VALUE), + new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0)); + + } + /** * does bi-directional equality check as well as verifying hashCode */ diff --git a/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java b/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java index c40929d417a7..c12957b63476 100644 --- a/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java +++ b/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java @@ -35,6 +35,7 @@ public static void beforeClass() throws Exception { public void testInitCount() { String registry = h.getCore().getCoreMetricManager().getRegistryName(); SolrMetricManager manager = h.getCoreContainer().getMetricManager(); + @SuppressWarnings({"unchecked"}) Gauge g = (Gauge)manager.registry(registry).getMetrics().get("QUERY./mock.initCount"); assertEquals("Incorrect init count", 1, g.getValue().intValue()); diff --git a/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java b/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java index 5cca2293b30c..b09b33d12d23 100644 --- a/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java +++ b/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java @@ -71,6 +71,7 @@ public void testEscapeInstanceDir() throws Exception { } + @SuppressWarnings({"unchecked"}) public void testAwareCompatibility() throws Exception { final Class clazz1 = ResourceLoaderAware.class; @@ -201,7 +202,7 @@ public TokenStream create(TokenStream input) { } - @SuppressWarnings("deprecation") + @SuppressWarnings({"rawtypes", "deprecation"}) public void testLoadDeprecatedFactory() throws Exception { SolrResourceLoader loader = new SolrResourceLoader(Paths.get("solr/collection1")); // ensure we get our exception @@ -211,10 +212,11 @@ public void testLoadDeprecatedFactory() throws Exception { loader.close(); } - public void testCacheWrongType() { + public void testCacheWrongType() throws Exception { clearCache(); SolrResourceLoader loader = new SolrResourceLoader(); + @SuppressWarnings({"rawtypes"}) Class[] params = { Map.class }; Map args = Map.of("minGramSize", "1", "maxGramSize", "2"); final String className = "solr.NGramTokenizerFactory"; @@ -226,5 +228,6 @@ public void testCacheWrongType() { // This should work, but won't if earlier call succeeding corrupting the cache TokenizerFactory tf = loader.newInstance(className, TokenizerFactory.class, new String[0], params, new Object[]{new HashMap<>(args)}); assertNotNull("Did not load Tokenizer after bad call earlier", tf); + loader.close(); } } diff --git a/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java b/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java index 1156aad67256..314af7c59e2b 100644 --- a/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java +++ b/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java @@ -77,7 +77,9 @@ public void testSimpleLockErrorOnStartup() throws Exception { public void testNativeLockErrorOnStartup() throws Exception { File indexDir = new File(initAndGetDataDir(), "index"); - log.info("Acquiring lock on {}", indexDir.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("Acquiring lock on {}", indexDir.getAbsolutePath()); + } Directory directory = newFSDirectory(indexDir.toPath(), NativeFSLockFactory.INSTANCE); //creates a new IndexWriter without releasing the lock yet IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(null)); diff --git a/solr/core/src/test/org/apache/solr/core/TestConfigOverlay.java b/solr/core/src/test/org/apache/solr/core/TestConfigOverlay.java index e98f5100fbfb..1c39fa98683f 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfigOverlay.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfigOverlay.java @@ -65,6 +65,7 @@ public void testPaths() { } public void testSetProperty(){ + @SuppressWarnings({"unchecked"}) ConfigOverlay overlay = new ConfigOverlay(Collections.EMPTY_MAP,0); overlay = overlay.setProperty("query.filterCache.initialSize",100); assertEquals(100, overlay.getXPathProperty("query/filterCache/@initialSize")); diff --git a/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java b/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java index f8caaf6bddf3..1d0c73300910 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java @@ -75,6 +75,7 @@ public void testSolrConfigHandlerImmutable() throws Exception { "}"; String uri = "/config"; String response = restTestHarness.post(uri, SolrTestCaseJ4.json(payload)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) Utils.fromJSONString(response); assertNotNull(map.get("error")); assertTrue(map.get("error").toString().contains("immutable")); @@ -92,7 +93,9 @@ public void testSchemaHandlerImmutable() throws Exception { " }"; String response = restTestHarness.post("/schema", json(payload)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) Utils.fromJSONString(response); + @SuppressWarnings({"rawtypes"}) Map error = (Map)map.get("error"); assertNotNull("No errors", error); String msg = (String)error.get("msg"); diff --git a/solr/core/src/test/org/apache/solr/core/TestConfigSetProperties.java b/solr/core/src/test/org/apache/solr/core/TestConfigSetProperties.java index 75f4e662679b..093bed6065fc 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfigSetProperties.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfigSetProperties.java @@ -62,19 +62,23 @@ public void testConfigSetPropertiesNotMap() throws Exception { @Test public void testEmptyMap() throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList list = createConfigSetProps(Utils.toJSONString(ImmutableMap.of())); assertEquals(0, list.size()); } @Test public void testMultipleProps() throws Exception { + @SuppressWarnings({"rawtypes"}) Map map = ImmutableMap.of("immutable", "true", "someOtherProp", "true"); + @SuppressWarnings({"rawtypes"}) NamedList list = createConfigSetProps(Utils.toJSONString(map)); assertEquals(2, list.size()); assertEquals("true", list.get("immutable")); assertEquals("true", list.get("someOtherProp")); } + @SuppressWarnings({"rawtypes"}) private NamedList createConfigSetProps(String props) throws Exception { Path testDirectory = createTempDir(); String filename = "configsetprops.json"; diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java index 6d1f0885bce4..2ebeb797caaf 100644 --- a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java +++ b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java @@ -19,16 +19,19 @@ import java.io.File; import java.io.FileOutputStream; import java.nio.file.Path; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import java.util.regex.Pattern; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; +import org.apache.commons.exec.OS; import org.apache.commons.io.FileUtils; import org.apache.lucene.util.IOUtils; import org.apache.solr.SolrTestCaseJ4; @@ -38,6 +41,7 @@ import org.apache.solr.handler.admin.CoreAdminHandler; import org.apache.solr.handler.admin.InfoHandler; import org.junit.AfterClass; +import org.junit.Assume; import org.junit.BeforeClass; import org.junit.Test; import org.xml.sax.SAXParseException; @@ -310,6 +314,11 @@ public void testSharedLib() throws Exception { "${shareSchema:false}\n" + ""; + private static final String ALLOW_PATHS_SOLR_XML ="\n" + + "\n" + + "${solr.allowPaths:}\n" + + ""; + private static final String CUSTOM_HANDLERS_SOLR_XML = "\n" + "" + " " + CustomCollectionsHandler.class.getName() + "" + @@ -342,6 +351,82 @@ public CustomConfigSetsHandler(CoreContainer cc) { } } + @Test + public void assertAllowPathFromSolrXml() throws Exception { + Assume.assumeFalse(OS.isFamilyWindows()); + System.setProperty("solr.allowPaths", "/var/solr"); + CoreContainer cc = init(ALLOW_PATHS_SOLR_XML); + cc.assertPathAllowed(Paths.get("/var/solr/foo")); + try { + cc.assertPathAllowed(Paths.get("/tmp")); + fail("Path /tmp should not be allowed"); + } catch(SolrException e) { + /* Ignore */ + } finally { + cc.shutdown(); + System.clearProperty("solr.allowPaths"); + } + } + + @Test + public void assertAllowPathFromSolrXmlWin() throws Exception { + Assume.assumeTrue(OS.isFamilyWindows()); + System.setProperty("solr.allowPaths", "C:\\solr"); + CoreContainer cc = init(ALLOW_PATHS_SOLR_XML); + cc.assertPathAllowed(Paths.get("C:\\solr\\foo")); + try { + cc.assertPathAllowed(Paths.get("C:\\tmp")); + fail("Path C:\\tmp should not be allowed"); + } catch(SolrException e) { + /* Ignore */ + } finally { + cc.shutdown(); + System.clearProperty("solr.allowPaths"); + } + } + + @Test + public void assertAllowPath() { + Assume.assumeFalse(OS.isFamilyWindows()); + assertPathAllowed("/var/solr/foo"); + assertPathAllowed("/var/log/../solr/foo"); + assertPathAllowed("relative"); + + assertPathBlocked("../../false"); + assertPathBlocked("./../../false"); + assertPathBlocked("/var/solr/../../etc"); + } + + @Test + public void assertAllowPathWindows() { + Assume.assumeTrue(OS.isFamilyWindows()); + assertPathAllowed("C:\\var\\solr\\foo"); + assertPathAllowed("C:\\var\\log\\..\\solr\\foo"); + assertPathAllowed("relative"); + + assertPathBlocked("..\\..\\false"); + assertPathBlocked(".\\../\\..\\false"); + assertPathBlocked("C:\\var\\solr\\..\\..\\etc"); + + // UNC paths are always blocked + assertPathBlocked("\\\\unc-server\\share\\path"); + } + + private static Set ALLOWED_PATHS = Set.of(Path.of("/var/solr")); + private static Set ALLOWED_PATHS_WIN = Set.of(Path.of("C:\\var\\solr")); + + private void assertPathBlocked(String path) { + try { + + SolrPaths.assertPathAllowed(Path.of(path), OS.isFamilyWindows() ? ALLOWED_PATHS_WIN : ALLOWED_PATHS); + fail("Path " + path + " sould have been blocked."); + } catch (SolrException e) { /* Expected */ } + } + + private void assertPathAllowed(String path) { + SolrPaths.assertPathAllowed(Path.of(path), OS.isFamilyWindows() ? ALLOWED_PATHS_WIN : ALLOWED_PATHS); + } + @Test public void testCustomHandlers() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java index a9bd969144a9..5c5ba8fe7c26 100644 --- a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java +++ b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java @@ -141,6 +141,7 @@ public void after() throws Exception { // Test the basic setup, create some dirs with core.properties files in them, but solr.xml has discoverCores // set and insure that we find all the cores and can load them. @Test + @SuppressWarnings({"try"}) public void testDiscovery() throws Exception { setMeUp(); @@ -397,7 +398,7 @@ public void testAlternateRelativeCoreDir() throws Exception { assertNull(cc.getCore("core0")); SolrCore core3 = cc.create("core3", ImmutableMap.of("configSet", "minimal")); - assertThat(core3.getCoreDescriptor().getInstanceDir().toAbsolutePath().toString(), containsString("relative")); + assertThat(core3.getCoreDescriptor().getInstanceDir().toString(), containsString("relative")); } finally { cc.shutdown(); diff --git a/solr/core/src/test/org/apache/solr/core/TestDirectoryFactory.java b/solr/core/src/test/org/apache/solr/core/TestDirectoryFactory.java index 6496b24247e9..e37591aa8e0d 100644 --- a/solr/core/src/test/org/apache/solr/core/TestDirectoryFactory.java +++ b/solr/core/src/test/org/apache/solr/core/TestDirectoryFactory.java @@ -57,6 +57,7 @@ public void testExistsEquivilence() throws Exception { } } + @SuppressWarnings({"rawtypes"}) private void testExistsBehavior(Class clazz) throws Exception { final String path = createTempDir().toString() + "/" + clazz + "_somedir"; DirectoryFactory dirFac = null; diff --git a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java index 22ee299dac6a..d9a3bf47fbb6 100644 --- a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java +++ b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java @@ -84,6 +84,7 @@ public void testDynamicLoading() throws Exception { Arrays.asList("overlay", "requestHandler", "/test1", "class"), "org.apache.solr.core.BlobStoreTestRequestHandler",10); + @SuppressWarnings({"rawtypes"}) Map map = TestSolrConfigHandler.getRespMap("/test1", client); assertNotNull(map.toString(), map = (Map) map.get("error")); @@ -142,6 +143,7 @@ public void testDynamicLoading() throws Exception { client = randomRestTestHarness(); TestSolrConfigHandler.runConfigCommand(client, "/config", payload); + @SuppressWarnings({"rawtypes"}) Map result = TestSolrConfigHandler.testForResponseElement(client, null, "/config/overlay", @@ -258,7 +260,8 @@ public static ByteBuffer getFileContent(String f, boolean loadFromClassPath) thr return jar; } - public static ByteBuffer persistZip(String loc, Class... classes) throws IOException { + public static ByteBuffer persistZip(String loc, + @SuppressWarnings({"rawtypes"})Class... classes) throws IOException { ByteBuffer jar = generateZip(classes); try (FileOutputStream fos = new FileOutputStream(loc)){ fos.write(jar.array(), 0, jar.limit()); @@ -268,11 +271,11 @@ public static ByteBuffer persistZip(String loc, Class... classes) throws IOExce } - public static ByteBuffer generateZip(Class... classes) throws IOException { + public static ByteBuffer generateZip(@SuppressWarnings({"rawtypes"})Class... classes) throws IOException { SimplePostTool.BAOS bos = new SimplePostTool.BAOS(); try (ZipOutputStream zipOut = new ZipOutputStream(bos)) { zipOut.setLevel(ZipOutputStream.DEFLATED); - for (Class c : classes) { + for (@SuppressWarnings({"rawtypes"})Class c : classes) { String path = c.getName().replace('.', '/').concat(".class"); ZipEntry entry = new ZipEntry(path); ByteBuffer b = SimplePostTool.inputStreamToByteArray(c.getClassLoader().getResourceAsStream(path)); diff --git a/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java b/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java index 575cf9e2da88..b172d524430e 100644 --- a/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java +++ b/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java @@ -109,6 +109,7 @@ public void testDynamicLoadingUrl() throws Exception { Arrays.asList("overlay", "requestHandler", "/runtime", "class"), "org.apache.solr.core.RuntimeLibReqHandler", 120); + @SuppressWarnings({"rawtypes"}) Map result = TestSolrConfigHandler.testForResponseElement(client, null, "/runtime", diff --git a/solr/core/src/test/org/apache/solr/core/TestInitParams.java b/solr/core/src/test/org/apache/solr/core/TestInitParams.java index 465c16027903..d3a0eef9ef80 100644 --- a/solr/core/src/test/org/apache/solr/core/TestInitParams.java +++ b/solr/core/src/test/org/apache/solr/core/TestInitParams.java @@ -41,7 +41,9 @@ public void testComponentWithInitParams(){ SolrRequestHandler handler = h.getCore().getRequestHandler(s); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req("initArgs", "true"), rsp); + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) rsp.getValues().get("initArgs"); + @SuppressWarnings({"rawtypes"}) NamedList def = (NamedList) nl.get(PluginInfo.DEFAULTS); assertEquals("A", def.get("a")); def = (NamedList) nl.get(PluginInfo.INVARIANTS); @@ -52,8 +54,9 @@ public void testComponentWithInitParams(){ InitParams initParams = h.getCore().getSolrConfig().getInitParams().get("a"); + @SuppressWarnings({"unchecked", "rawtypes"}) PluginInfo pluginInfo = new PluginInfo("requestHandler", - new HashMap(), + new HashMap<>(), new NamedList<>(singletonMap("defaults", new NamedList(Utils.makeMap("a", "A1")))), null); initParams.apply(pluginInfo); assertEquals( "A",initParams.defaults.get("a")); @@ -64,7 +67,9 @@ public void testMultiInitParams(){ SolrRequestHandler handler = h.getCore().getRequestHandler("/dump6"); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req("initArgs", "true"), rsp); + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) rsp.getValues().get("initArgs"); + @SuppressWarnings({"rawtypes"}) NamedList def = (NamedList) nl.get(PluginInfo.DEFAULTS); assertEquals("A", def.get("a")); assertEquals("P", def.get("p")); @@ -81,7 +86,9 @@ public void testComponentWithConflictingInitParams(){ SolrRequestHandler handler = h.getCore().getRequestHandler("/dump2"); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req("initArgs", "true"), rsp); + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) rsp.getValues().get("initArgs"); + @SuppressWarnings({"rawtypes"}) NamedList def = (NamedList) nl.get(PluginInfo.DEFAULTS); assertEquals("A1" ,def.get("a")); def = (NamedList) nl.get(PluginInfo.INVARIANTS); @@ -101,7 +108,9 @@ public void testElevateExample(){ SolrRequestHandler handler = h.getCore().getRequestHandler("/elevate"); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req("initArgs", "true"), rsp); + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) rsp.getValues().get("initArgs"); + @SuppressWarnings({"rawtypes"}) NamedList def = (NamedList) nl.get(PluginInfo.DEFAULTS); assertEquals("text" ,def.get("df")); @@ -111,6 +120,7 @@ public void testArbitraryAttributes() { SolrRequestHandler handler = h.getCore().getRequestHandler("/dump7"); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req("initArgs", "true"), rsp); + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) rsp.getValues().get("initArgs"); assertEquals("server-enabled.txt", nl.get("healthcheckFile")); } diff --git a/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java b/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java index 90ce4f948a04..bc5924192239 100644 --- a/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java +++ b/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java @@ -156,7 +156,9 @@ public void testJmxUpdate() throws Exception { if (bean==null) throw new RuntimeException("searcher was never registered"); ObjectName searcher = nameFactory.createName("gauge", registryName, "SEARCHER.searcher.*"); - log.info("Mbeans in server: " + mbeanServer.queryNames(null, null)); + if (log.isInfoEnabled()) { + log.info("Mbeans in server: {}", mbeanServer.queryNames(null, null)); + } Set objects = mbeanServer.queryMBeans(searcher, null); assertFalse("No mbean found for SolrIndexSearcher", mbeanServer.queryMBeans(searcher, null).isEmpty()); @@ -171,6 +173,7 @@ public void testJmxUpdate() throws Exception { } @Test + @SuppressWarnings({"try"}) public void testJmxOnCoreReload() throws Exception { // make sure searcher beans are registered assertQ(req("q", "*:*"), "//result[@numFound='0']"); @@ -204,7 +207,7 @@ public void testJmxOnCoreReload() throws Exception { } int totalCoreMetrics = mgr.registry(registryName).getMetrics().size(); - log.info("Before Reload: size of all core metrics: " + totalCoreMetrics + " MBeans: " + oldNumberOfObjects); + log.info("Before Reload: size of all core metrics: {} MBeans: {}", totalCoreMetrics, oldNumberOfObjects); assertEquals("Number of registered MBeans is not the same as the number of core metrics", totalCoreMetrics, oldNumberOfObjects); h.getCoreContainer().reload(coreName); assertQ(req("q", "*:*"), "//result[@numFound='0']"); @@ -250,14 +253,14 @@ public void testJmxOnCoreReload() throws Exception { } } - log.info("After Reload: size of all core metrics: " + totalCoreMetrics + " MBeans: " + newNumberOfObjects); + log.info("After Reload: size of all core metrics: {} MBeans: {}", totalCoreMetrics, newNumberOfObjects); if (totalCoreMetrics != newNumberOfObjects) { Set errors = new TreeSet<>(beanNames); errors.removeAll(metricNames); - log.error("Unexpected bean names: " + errors); + log.error("Unexpected bean names: {}", errors); errors = new TreeSet<>(metricNames); errors.removeAll(beanNames); - log.error("Unexpected metric names: " + errors); + log.error("Unexpected metric names: {}", errors); fail("Number of registered MBeans is not the same as the number of core metrics: " + totalCoreMetrics + " != " + newNumberOfObjects); } } diff --git a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java index 4120f3e124af..a41b750e115e 100644 --- a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java +++ b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java @@ -241,17 +241,20 @@ public void testCachingLimit() throws Exception { checkNotInCores(cc, Arrays.asList("collection2", "collection3")); // verify that getting metrics from an unloaded core doesn't cause exceptions (SOLR-12541) - MetricsHandler handler = new MetricsHandler(h.getCoreContainer()); - - SolrQueryResponse resp = new SolrQueryResponse(); - handler.handleRequest(makeReq(core1, CommonParams.QT, "/admin/metrics"), resp); - NamedList values = resp.getValues(); - assertNotNull(values.get("metrics")); - values = (NamedList) values.get("metrics"); - NamedList nl = (NamedList) values.get("solr.core.collection2"); - assertNotNull(nl); - Object o = nl.get("REPLICATION./replication.indexPath"); - assertNotNull(o); + try (MetricsHandler handler = new MetricsHandler(h.getCoreContainer())) { + + SolrQueryResponse resp = new SolrQueryResponse(); + handler.handleRequest(makeReq(core1, CommonParams.QT, "/admin/metrics"), resp); + @SuppressWarnings({"rawtypes"}) + NamedList values = resp.getValues(); + assertNotNull(values.get("metrics")); + values = (NamedList) values.get("metrics"); + @SuppressWarnings({"rawtypes"}) + NamedList nl = (NamedList) values.get("solr.core.collection2"); + assertNotNull(nl); + Object o = nl.get("REPLICATION./replication.indexPath"); + assertNotNull(o); + } // Note decrementing the count when the core is removed from the lazyCores list is appropriate, since the @@ -361,28 +364,29 @@ public void testCreateSame() throws Exception { private void createViaAdmin(CoreContainer cc, String name, boolean isTransient, boolean loadOnStartup) throws Exception { - final CoreAdminHandler admin = new CoreAdminHandler(cc); - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.NAME, name, - CoreAdminParams.TRANSIENT, Boolean.toString(isTransient), - CoreAdminParams.LOAD_ON_STARTUP, Boolean.toString(loadOnStartup)), - resp); + try (final CoreAdminHandler admin = new CoreAdminHandler(cc)) { + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody + (req(CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATE.toString(), + CoreAdminParams.NAME, name, + CoreAdminParams.TRANSIENT, Boolean.toString(isTransient), + CoreAdminParams.LOAD_ON_STARTUP, Boolean.toString(loadOnStartup)), + resp); + } } private void unloadViaAdmin(CoreContainer cc, String name) throws Exception { - final CoreAdminHandler admin = new CoreAdminHandler(cc); - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.UNLOAD.toString(), - CoreAdminParams.CORE, name), - resp); - + try (final CoreAdminHandler admin = new CoreAdminHandler(cc)) { + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody + (req(CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.UNLOAD.toString(), + CoreAdminParams.CORE, name), + resp); + } } // Make sure that creating a transient core from the admin handler correctly respects the transient limits etc. @@ -634,13 +638,15 @@ private void copyGoodConf(String coreName, String srcName, String dstName) throw // if ok==false, the core being examined should have a failure in the list. private void checkStatus(CoreContainer cc, Boolean ok, String core) throws Exception { SolrQueryResponse resp = new SolrQueryResponse(); - final CoreAdminHandler admin = new CoreAdminHandler(cc); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.STATUS.toString(), - CoreAdminParams.CORE, core), - resp); + try (final CoreAdminHandler admin = new CoreAdminHandler(cc)) { + admin.handleRequestBody + (req(CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.STATUS.toString(), + CoreAdminParams.CORE, core), + resp); + } + @SuppressWarnings({"unchecked"}) Map failures = (Map) resp.getValues().get("initFailures"); @@ -717,6 +723,7 @@ private void addLazy(SolrCore core, String... fieldValues) throws IOException { updater.addDoc(cmd); } + @SuppressWarnings({"unchecked", "rawtypes"}) private LocalSolrQueryRequest makeReq(SolrCore core, String... q) { if (q.length == 1) { return new LocalSolrQueryRequest(core, diff --git a/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java b/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java index e6e629563724..9b1fabec97cc 100644 --- a/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java +++ b/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java @@ -33,6 +33,7 @@ public void testXML() throws Exception { CoreContainer cc = CoreContainer.createAndLoad(home, home.resolve("solr-shardhandler.xml")); ShardHandlerFactory factory = cc.getShardHandlerFactory(); assertTrue(factory instanceof MockShardHandlerFactory); + @SuppressWarnings({"rawtypes"}) NamedList args = ((MockShardHandlerFactory)factory).args; assertEquals("myMagicRequiredValue", args.get("myMagicRequiredParameter")); factory.close(); diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java index c66eb197b0ea..6aa48a5467f6 100644 --- a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java +++ b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java @@ -171,6 +171,7 @@ public void testReqHandlerAPIs() throws Exception { reqhandlertests(restTestHarness, null, null); } + @SuppressWarnings({"rawtypes"}) public static Map runConfigCommand(RestTestHarness harness, String uri, String payload) throws IOException { String json = SolrTestCaseJ4.json(payload); log.info("going to send config command. path {} , payload: {}", uri, payload); @@ -185,11 +186,14 @@ public static void runConfigCommandExpectFailure(RestTestHarness harness, String String json = SolrTestCaseJ4.json(payload); log.info("going to send config command. path {} , payload: {}", uri, payload); String response = harness.post(uri, json); + @SuppressWarnings({"rawtypes"}) Map map = (Map)Utils.fromJSONString(response); assertNotNull(response, map.get("errorMessages")); assertNotNull(response, map.get("error")); assertTrue("Expected status != 0: " + response, 0L != (Long)((Map)map.get("responseHeader")).get("status")); + @SuppressWarnings({"rawtypes"}) List errorDetails = (List)((Map)map.get("error")).get("details"); + @SuppressWarnings({"rawtypes"}) List errorMessages = (List)((Map)errorDetails.get(0)).get("errorMessages"); assertTrue("Expected '" + expectedErrorMessage + "': " + response, errorMessages.get(0).toString().contains(expectedErrorMessage)); @@ -264,6 +268,7 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ int maxTimeoutSeconds = 10; while (TimeUnit.SECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutSeconds) { String uri = "/config/overlay"; + @SuppressWarnings({"rawtypes"}) Map m = testServerBaseUrl == null ? getRespMap(uri, writeHarness) : TestSolrConfigHandlerConcurrent.getAsMap(testServerBaseUrl + uri, cloudSolrClient); if (null == Utils.getObjectByPath(m, true, asList("overlay", "requestHandler", "/x", "a"))) { success = true; @@ -413,6 +418,7 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "'create-initparams' : { 'name' : 'hello', 'key':'val'}\n" + "}"; runConfigCommand(writeHarness, "/config", payload); + @SuppressWarnings({"rawtypes"}) Map map = testForResponseElement(writeHarness, testServerBaseUrl, "/config", @@ -421,6 +427,7 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ null, TIMEOUT_S); + @SuppressWarnings({"rawtypes"}) List l = (List) Utils.getObjectByPath(map, false, asList("config", "initParams")); assertNotNull("no object /config/initParams : "+ map , l); assertEquals( 2, l.size()); @@ -467,6 +474,7 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ TIMEOUT_S); map = getRespMap("/dump100?json.nl=arrmap&initArgs=true", writeHarness); + @SuppressWarnings({"rawtypes"}) List initArgs = (List) map.get("initArgs"); assertNotNull(initArgs); assertTrue(initArgs.size() >= 2); @@ -536,13 +544,16 @@ public void testFailures() throws Exception { public static class CacheTest extends DumpRequestHandler { @Override + @SuppressWarnings({"unchecked"}) public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException { super.handleRequestBody(req, rsp); String[] caches = req.getParams().getParams("cacheNames"); if(caches != null && caches.length>0){ + @SuppressWarnings({"rawtypes"}) HashMap m = new HashMap(); rsp.add("caches", m); for (String c : caches) { + @SuppressWarnings({"rawtypes"}) SolrCache cache = req.getSearcher().getCache(c); if(cache != null) m.put(c, cache.getClass().getName()); } @@ -550,6 +561,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw } } + @SuppressWarnings({"unchecked", "rawtypes"}) public static LinkedHashMapWriter testForResponseElement(RestTestHarness harness, String testServerBaseUrl, String uri, @@ -830,6 +842,7 @@ public void testReqParams() throws Exception { RESTfulServerProvider oldProvider = restTestHarness.getServerProvider(); restTestHarness.setServerProvider(() -> jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME); + @SuppressWarnings({"rawtypes"}) Map rsp = TestSolrConfigHandler.testForResponseElement( harness, null, @@ -858,6 +871,7 @@ public String toString() { } + @SuppressWarnings({"rawtypes"}) public static LinkedHashMapWriter getRespMap(String path, RestTestHarness restHarness) throws Exception { String response = restHarness.query(path); try { diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java index 2fcf97158147..f45ae2717657 100644 --- a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java +++ b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java @@ -22,8 +22,11 @@ import java.nio.file.Path; import java.util.Locale; import java.util.Properties; +import java.util.Set; +import java.util.stream.Collectors; import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; +import org.apache.commons.exec.OS; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; @@ -56,6 +59,7 @@ public void testAllInfoPresent() throws IOException { Path testSrcRoot = TEST_PATH(); Files.copy(testSrcRoot.resolve("solr-50-all.xml"), solrHome.resolve("solr.xml")); + System.setProperty("solr.allowPaths", OS.isFamilyWindows() ? "C:\\tmp,C:\\home\\john" : "/tmp,/home/john"); NodeConfig cfg = SolrXmlConfig.fromSolrHome(solrHome, new Properties()); CloudConfig ccfg = cfg.getCloudConfig(); UpdateShardHandlerConfig ucfg = cfg.getUpdateShardHandlerConfig(); @@ -98,6 +102,12 @@ public void testAllInfoPresent() throws IOException { assertEquals("a.b.C", backupRepoConfigs[0].className); assertEquals("true", backupRepoConfigs[0].attributes.get("default")); assertEquals(0, backupRepoConfigs[0].initArgs.size()); + assertTrue("allowPaths", cfg.getAllowPaths().containsAll(OS.isFamilyWindows() ? + Set.of("C:\\tmp", "C:\\home\\john").stream().map(s -> Path.of(s)).collect(Collectors.toSet()) : + Set.of("/tmp", "/home/john").stream().map(s -> Path.of(s)).collect(Collectors.toSet()) + ) + ); + System.clearProperty("solr.allowPaths"); } // Test a few property substitutions that happen to be in solr-50-all.xml. diff --git a/solr/core/src/test/org/apache/solr/core/backup/repository/HdfsBackupRepositoryTest.java b/solr/core/src/test/org/apache/solr/core/backup/repository/HdfsBackupRepositoryTest.java index 0da02b02903e..398fb3b79b3b 100644 --- a/solr/core/src/test/org/apache/solr/core/backup/repository/HdfsBackupRepositoryTest.java +++ b/solr/core/src/test/org/apache/solr/core/backup/repository/HdfsBackupRepositoryTest.java @@ -17,6 +17,8 @@ package org.apache.solr.core.backup.repository; +import java.io.IOException; + import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.core.HdfsDirectoryFactory; @@ -28,68 +30,76 @@ public class HdfsBackupRepositoryTest { @Test(expected = NullPointerException.class) - public void testHdfsHomePropertyMissing() { - HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository(); - NamedList namedList = new SimpleOrderedMap<>(); - hdfsBackupRepository.init(namedList); + public void testHdfsHomePropertyMissing() throws IOException { + try (HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository()) { + NamedList namedList = new SimpleOrderedMap<>(); + hdfsBackupRepository.init(namedList); + } } @Test - public void testHdfsHomePropertySet() { - HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository(); - NamedList namedList = new SimpleOrderedMap<>(); - namedList.add(HdfsDirectoryFactory.HDFS_HOME, "hdfs://localhost"); - hdfsBackupRepository.init(namedList); + public void testHdfsHomePropertySet() throws IOException { + try (HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository()) { + NamedList namedList = new SimpleOrderedMap<>(); + namedList.add(HdfsDirectoryFactory.HDFS_HOME, "hdfs://localhost"); + hdfsBackupRepository.init(namedList); + } } @Test(expected = ClassCastException.class) - public void testCopyBufferSizeNonNumeric() { - HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository(); - NamedList namedList = new SimpleOrderedMap<>(); - namedList.add("solr.hdfs.buffer.size", "xyz"); - hdfsBackupRepository.init(namedList); + public void testCopyBufferSizeNonNumeric() throws IOException { + try (HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository()) { + NamedList namedList = new SimpleOrderedMap<>(); + namedList.add("solr.hdfs.buffer.size", "xyz"); + hdfsBackupRepository.init(namedList); + } } @Test(expected = ClassCastException.class) - public void testCopyBufferSizeWrongType() { - HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository(); - NamedList namedList = new SimpleOrderedMap<>(); - namedList.add("solr.hdfs.buffer.size", "8192"); - hdfsBackupRepository.init(namedList); + public void testCopyBufferSizeWrongType() throws IOException { + try (HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository()) { + NamedList namedList = new SimpleOrderedMap<>(); + namedList.add("solr.hdfs.buffer.size", "8192"); + hdfsBackupRepository.init(namedList); + } } @Test(expected = IllegalArgumentException.class) - public void testCopyBufferSizeNegative() { - HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository(); - NamedList namedList = new SimpleOrderedMap<>(); - namedList.add("solr.hdfs.buffer.size", -1); - hdfsBackupRepository.init(namedList); + public void testCopyBufferSizeNegative() throws IOException { + try (HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository()) { + NamedList namedList = new SimpleOrderedMap<>(); + namedList.add("solr.hdfs.buffer.size", -1); + hdfsBackupRepository.init(namedList); + } } @Test(expected = IllegalArgumentException.class) - public void testCopyBufferSizeZero() { - HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository(); - NamedList namedList = new SimpleOrderedMap<>(); - namedList.add("solr.hdfs.buffer.size", 0); - hdfsBackupRepository.init(namedList); + public void testCopyBufferSizeZero() throws IOException { + try (HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository()) { + NamedList namedList = new SimpleOrderedMap<>(); + namedList.add("solr.hdfs.buffer.size", 0); + hdfsBackupRepository.init(namedList); + } } @Test - public void testCopyBufferSet() { - HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository(); - NamedList namedList = new SimpleOrderedMap<>(); - namedList.add(HdfsDirectoryFactory.HDFS_HOME, "hdfs://localhost"); - namedList.add("solr.hdfs.buffer.size", 32768); - hdfsBackupRepository.init(namedList); - assertEquals(hdfsBackupRepository.copyBufferSize, 32768); + public void testCopyBufferSet() throws IOException { + try (HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository()) { + NamedList namedList = new SimpleOrderedMap<>(); + namedList.add(HdfsDirectoryFactory.HDFS_HOME, "hdfs://localhost"); + namedList.add("solr.hdfs.buffer.size", 32768); + hdfsBackupRepository.init(namedList); + assertEquals(hdfsBackupRepository.copyBufferSize, 32768); + } } @Test - public void testCopyBufferDefaultSize() { - HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository(); - NamedList namedList = new SimpleOrderedMap<>(); - namedList.add(HdfsDirectoryFactory.HDFS_HOME, "hdfs://localhost"); - hdfsBackupRepository.init(namedList); - assertEquals(hdfsBackupRepository.copyBufferSize, HdfsDirectory.DEFAULT_BUFFER_SIZE); + public void testCopyBufferDefaultSize() throws IOException { + try (HdfsBackupRepository hdfsBackupRepository = new HdfsBackupRepository()) { + NamedList namedList = new SimpleOrderedMap<>(); + namedList.add(HdfsDirectoryFactory.HDFS_HOME, "hdfs://localhost"); + hdfsBackupRepository.init(namedList); + assertEquals(hdfsBackupRepository.copyBufferSize, HdfsDirectory.DEFAULT_BUFFER_SIZE); + } } } diff --git a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java index f0bae3b8db42..21e7322c4252 100644 --- a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java +++ b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java @@ -105,7 +105,7 @@ public void testSnapshots() throws Exception { // Here the assumption is that Solr will spread the replicas uniformly across nodes. // If this is not true for some reason, then we will need to add some logic to find a // node with a single replica. - this.cluster.getRandomJetty(random()).stop(); + cluster.getRandomJetty(random()).stop(); // Sleep a bit for allowing ZK watch to fire. Thread.sleep(5000); @@ -244,7 +244,7 @@ public void testSnapshots() throws Exception { CollectionAdminRequest.DeleteSnapshot deleteSnap = new CollectionAdminRequest.DeleteSnapshot(collectionName, commitName); deleteSnap.process(solrClient); - // Wait for a while so that the clusterstate.json updates are propagated to the client side. + // Wait for a while so that the cluster state updates are propagated to the client side. Thread.sleep(2000); collectionState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName); @@ -290,11 +290,13 @@ public void testSnapshots() throws Exception { } + @SuppressWarnings({"unchecked"}) private Collection listCollectionSnapshots(SolrClient adminClient, String collectionName) throws Exception { CollectionAdminRequest.ListSnapshots listSnapshots = new CollectionAdminRequest.ListSnapshots(collectionName); CollectionAdminResponse resp = listSnapshots.process(adminClient); assertTrue( resp.getResponse().get(SolrSnapshotManager.SNAPSHOTS_INFO) instanceof NamedList ); + @SuppressWarnings({"rawtypes"}) NamedList apiResult = (NamedList) resp.getResponse().get(SolrSnapshotManager.SNAPSHOTS_INFO); Collection result = new ArrayList<>(); @@ -308,8 +310,10 @@ private Collection listCollectionSnapshots(SolrClien private Collection listCoreSnapshots(SolrClient adminClient, String coreName) throws Exception { ListSnapshots req = new ListSnapshots(); req.setCoreName(coreName); + @SuppressWarnings({"rawtypes"}) NamedList resp = adminClient.request(req); assertTrue( resp.get(SolrSnapshotManager.SNAPSHOTS_INFO) instanceof NamedList ); + @SuppressWarnings({"rawtypes"}) NamedList apiResult = (NamedList) resp.get(SolrSnapshotManager.SNAPSHOTS_INFO); List result = new ArrayList<>(apiResult.size()); diff --git a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java index e2be8f9da5bc..bcd1c303a810 100644 --- a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java +++ b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java @@ -285,8 +285,10 @@ private void deleteSnapshot(SolrClient adminClient, String coreName, String comm private Collection listSnapshots(SolrClient adminClient, String coreName) throws Exception { ListSnapshots req = new ListSnapshots(); req.setCoreName(coreName); + @SuppressWarnings({"rawtypes"}) NamedList resp = adminClient.request(req); assertTrue( resp.get("snapshots") instanceof NamedList ); + @SuppressWarnings({"rawtypes"}) NamedList apiResult = (NamedList) resp.get("snapshots"); List result = new ArrayList<>(apiResult.size()); diff --git a/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java b/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java index e7f7ab093e60..aaf938a43649 100644 --- a/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java +++ b/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java @@ -68,6 +68,7 @@ public void teardown() { System.clearProperty("enable.packages"); } + @SuppressWarnings({"unchecked"}) public void testPackageStoreManagement() throws Exception { MiniSolrCloudCluster cluster = configureCluster(4) @@ -119,6 +120,7 @@ public void testPackageStoreManagement() throws Exception { ) ); + @SuppressWarnings({"rawtypes"}) Map expected = Utils.makeMap( ":files:/package/mypkg/v1.0/runtimelibs.jar:name", "runtimelibs.jar", ":files:/package/mypkg/v1.0[0]:sha512", "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420" @@ -142,8 +144,10 @@ public void testPackageStoreManagement() throws Exception { expected = Utils.makeMap( ":files:/package/mypkg/v1.0", (Predicate) o -> { + @SuppressWarnings({"rawtypes"}) List l = (List) o; assertEquals(2, l.size()); + @SuppressWarnings({"rawtypes"}) Set expectedKeys = ImmutableSet.of("runtimelibs_v2.jar", "runtimelibs.jar"); for (Object file : l) { if(! expectedKeys.contains(Utils.getObjectByPath(file, true, "name"))) return false; @@ -167,6 +171,7 @@ public void testPackageStoreManagement() throws Exception { } } + @SuppressWarnings({"unchecked", "rawtypes"}) public static void waitForAllNodesHaveFile(MiniSolrCloudCluster cluster, String path, Map expected , boolean verifyContent) throws Exception { for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) { String baseUrl = jettySolrRunner.getBaseUrl().toString().replace("/solr", "/api"); @@ -188,6 +193,7 @@ public static void waitForAllNodesHaveFile(MiniSolrCloudCluster cluster, String } } + @SuppressWarnings({"rawtypes"}) static class Fetcher implements Callable { String url; JettySolrRunner jetty; @@ -209,13 +215,17 @@ public String toString() { } - public static NavigableObject assertResponseValues(int repeats, SolrClient client, SolrRequest req, Map vals) throws Exception { + public static NavigableObject assertResponseValues(int repeats, SolrClient client, + @SuppressWarnings({"rawtypes"})SolrRequest req, + @SuppressWarnings({"rawtypes"})Map vals) throws Exception { Callable callable = () -> req.process(client); return assertResponseValues(repeats, callable,vals); } - public static NavigableObject assertResponseValues(int repeats, Callable callable,Map vals) throws Exception { + @SuppressWarnings({"unchecked"}) + public static NavigableObject assertResponseValues(int repeats, Callable callable, + @SuppressWarnings({"rawtypes"})Map vals) throws Exception { NavigableObject rsp = null; for (int i = 0; i < repeats; i++) { @@ -229,11 +239,13 @@ public static NavigableObject assertResponseValues(int repeats, Callable key = StrUtils.split(k, '/'); Object val = entry.getValue(); + @SuppressWarnings({"rawtypes"}) Predicate p = val instanceof Predicate ? (Predicate) val : o -> { String v = o == null ? null : String.valueOf(o); return Objects.equals(val, o); diff --git a/solr/core/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java b/solr/core/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java index 14036ede07fe..0012c1168654 100644 --- a/solr/core/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java +++ b/solr/core/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java @@ -28,7 +28,7 @@ */ public abstract class AnalysisRequestHandlerTestBase extends SolrTestCaseJ4 { - protected void assertToken(NamedList token, TokenInfo info) { + protected void assertToken(@SuppressWarnings({"rawtypes"})NamedList token, TokenInfo info) { assertEquals(info.getText(), token.get("text")); if (info.getRawText() != null) { assertEquals(info.getRawText(), token.get("raw_text")); diff --git a/solr/core/src/test/org/apache/solr/handler/BackupStatusChecker.java b/solr/core/src/test/org/apache/solr/handler/BackupStatusChecker.java index 3a461b7ad83e..089ed59fa974 100644 --- a/solr/core/src/test/org/apache/solr/handler/BackupStatusChecker.java +++ b/solr/core/src/test/org/apache/solr/handler/BackupStatusChecker.java @@ -194,8 +194,10 @@ private String _checkBackupSuccess(final String backupName) throws Exception { final String label = (null == backupName ? "latest backup" : backupName); final SimpleSolrResponse rsp = new GenericSolrRequest(GenericSolrRequest.METHOD.GET, path, params("command", "details")).process(client); + @SuppressWarnings({"rawtypes"}) final NamedList data = rsp.getResponse(); log.info("Checking Status of {}: {}", label, data); + @SuppressWarnings({"unchecked"}) final NamedList backupData = (NamedList) data.findRecursive("details","backup"); if (null == backupData) { // no backup has finished yet @@ -266,8 +268,10 @@ public boolean checkBackupDeletionSuccess(final String backupName) throws Except assertNotNull("backumpName must not be null", backupName); final SimpleSolrResponse rsp = new GenericSolrRequest(GenericSolrRequest.METHOD.GET, path, params("command", "details")).process(client); + @SuppressWarnings({"rawtypes"}) final NamedList data = rsp.getResponse(); log.info("Checking Deletion Status of {}: {}", backupName, data); + @SuppressWarnings({"unchecked"}) final NamedList backupData = (NamedList) data.findRecursive("details","backup"); if (null == backupData || null == backupData.get("status") diff --git a/solr/core/src/test/org/apache/solr/handler/BinaryUpdateRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/BinaryUpdateRequestHandlerTest.java index 0c02109f5683..f1910542d0e0 100644 --- a/solr/core/src/test/org/apache/solr/handler/BinaryUpdateRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/BinaryUpdateRequestHandlerTest.java @@ -42,6 +42,7 @@ public static void beforeTests() throws Exception { } @Test + @SuppressWarnings({"rawtypes"}) public void testRequestParams() throws Exception { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", "1"); @@ -53,21 +54,18 @@ public void testRequestParams() throws Exception { BinaryRequestWriter brw = new BinaryRequestWriter(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); SolrQueryResponse rsp = new SolrQueryResponse(); - UpdateRequestHandler handler = new UpdateRequestHandler(); - handler.init(new NamedList()); - SolrQueryRequest req = req(); - ContentStreamLoader csl = handler.newLoader(req, p); - - RequestWriter.ContentWriter cw = brw.getContentWriter(ureq); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - cw.write(baos); - ContentStreamBase.ByteArrayStream cs = new ContentStreamBase.ByteArrayStream(baos.toByteArray(), null, "application/javabin"); - csl.load(req, rsp, cs, p); - AddUpdateCommand add = p.addCommands.get(0); - System.out.println(add.solrDoc); - assertEquals(false, add.overwrite); - assertEquals(100, add.commitWithin); - - req.close(); + try (SolrQueryRequest req = req(); UpdateRequestHandler handler = new UpdateRequestHandler()) { + handler.init(new NamedList()); + ContentStreamLoader csl = handler.newLoader(req, p); + RequestWriter.ContentWriter cw = brw.getContentWriter(ureq); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + cw.write(baos); + ContentStreamBase.ByteArrayStream cs = new ContentStreamBase.ByteArrayStream(baos.toByteArray(), null, "application/javabin"); + csl.load(req, rsp, cs, p); + AddUpdateCommand add = p.addCommands.get(0); + System.out.println(add.solrDoc); + assertEquals(false, add.overwrite); + assertEquals(100, add.commitWithin); + } } } diff --git a/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java index 34fb186c802b..6d98bb4cdc7c 100644 --- a/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java @@ -55,6 +55,7 @@ public static void beforeClass() throws Exception { @Override @Before + @SuppressWarnings({"rawtypes"}) public void setUp() throws Exception { super.setUp(); handler = new DocumentAnalysisRequestHandler(); @@ -204,6 +205,7 @@ public Iterable getContentStreams() { * org.apache.solr.schema.IndexSchema)} */ @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testHandleAnalysisRequest() throws Exception { SolrInputDocument document = new SolrInputDocument(); diff --git a/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java index e3c9d07efa40..4d905e2c5b68 100644 --- a/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java @@ -76,9 +76,13 @@ public void testPointField() throws Exception { request.addFieldType("pint"); request.setFieldValue("5"); + @SuppressWarnings({"rawtypes"}) NamedList nl = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); + @SuppressWarnings({"rawtypes"}) NamedList pintNL = (NamedList)nl.get("field_types").get("pint"); + @SuppressWarnings({"rawtypes"}) NamedList indexNL = (NamedList)pintNL.get("index"); + @SuppressWarnings({"rawtypes"}) ArrayList analyzerNL = (ArrayList)indexNL.get("org.apache.solr.schema.FieldType$DefaultAnalyzer$1"); String text = (String)((NamedList)analyzerNL.get(0)).get("text"); assertEquals("5", text); @@ -169,6 +173,7 @@ public void testResolveAnalysisRequest() throws Exception { * org.apache.solr.schema.IndexSchema)} */ @Test + @SuppressWarnings({"unchecked"}) public void testHandleAnalysisRequest() throws Exception { FieldAnalysisRequest request = new FieldAnalysisRequest(); @@ -180,17 +185,22 @@ public void testHandleAnalysisRequest() throws Exception { request.setQuery("fox brown"); request.setShowMatch(true); + @SuppressWarnings({"rawtypes"}) NamedList result = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); assertTrue("result is null and it shouldn't be", result != null); + @SuppressWarnings({"rawtypes"}) NamedList fieldTypes = result.get("field_types"); assertNotNull("field_types should never be null", fieldTypes); + @SuppressWarnings({"rawtypes"}) NamedList textType = fieldTypes.get("text"); assertNotNull("expecting result for field type 'text'", textType); + @SuppressWarnings({"rawtypes"}) NamedList> indexPart = textType.get("index"); assertNotNull("expecting an index token analysis for field type 'text'", indexPart); + @SuppressWarnings({"rawtypes"}) List tokenList = indexPart.get("org.apache.lucene.analysis.standard.StandardTokenizer"); assertNotNull("Expcting StandardTokenizer analysis breakdown", tokenList); assertEquals(tokenList.size(), 10); @@ -240,6 +250,7 @@ public void testHandleAnalysisRequest() throws Exception { assertToken(tokenList.get(6), new TokenInfo("brown", null, "", 39, 44, 9, new int[]{9,9,9,9}, null, true)); assertToken(tokenList.get(7), new TokenInfo("dog", null, "", 45, 49, 10, new int[]{10,10,10,10}, null, false)); + @SuppressWarnings({"rawtypes"}) NamedList> queryPart = textType.get("query"); assertNotNull("expecting a query token analysis for field type 'text'", queryPart); @@ -264,6 +275,7 @@ public void testHandleAnalysisRequest() throws Exception { assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, new int[]{1,1,1,1}, null, false)); assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, new int[]{2,2,2,2}, null, false)); + @SuppressWarnings({"rawtypes"}) NamedList nameTextType = fieldTypes.get("nametext"); assertNotNull("expecting result for field type 'nametext'", nameTextType); @@ -290,9 +302,11 @@ public void testHandleAnalysisRequest() throws Exception { assertToken(tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, new int[]{1}, null, false)); assertToken(tokenList.get(1), new TokenInfo("brown", null, "word", 4, 9, 2, new int[]{2}, null, false)); + @SuppressWarnings({"rawtypes"}) NamedList fieldNames = result.get("field_names"); assertNotNull("field_nameds should never be null", fieldNames); + @SuppressWarnings({"rawtypes"}) NamedList whitetok = fieldNames.get("whitetok"); assertNotNull("expecting result for field 'whitetok'", whitetok); @@ -322,6 +336,7 @@ public void testHandleAnalysisRequest() throws Exception { assertToken(tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, new int[]{1}, null, false)); assertToken(tokenList.get(1), new TokenInfo("brown", null, "word", 4, 9, 2, new int[]{2}, null, false)); + @SuppressWarnings({"rawtypes"}) NamedList keywordtok = fieldNames.get("keywordtok"); assertNotNull("expecting result for field 'keywordtok'", keywordtok); @@ -351,20 +366,25 @@ public void testCharFilterAnalysis() throws Exception { request.setFieldValue("whátëvêr"); request.setShowMatch(false); + @SuppressWarnings({"rawtypes"}) NamedList result = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); assertTrue("result is null and it shouldn't be", result != null); + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList fieldTypes = result.get("field_types"); assertNotNull("field_types should never be null", fieldTypes); + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList textType = fieldTypes.get("charfilthtmlmap"); assertNotNull("expecting result for field type 'charfilthtmlmap'", textType); + @SuppressWarnings({"rawtypes"}) NamedList indexPart = textType.get("index"); assertNotNull("expecting an index token analysis for field type 'charfilthtmlmap'", indexPart); assertEquals("\n\nwhátëvêr\n\n", indexPart.get("org.apache.lucene.analysis.charfilter.HTMLStripCharFilter")); assertEquals("\n\nwhatever\n\n", indexPart.get("org.apache.lucene.analysis.charfilter.MappingCharFilter")); + @SuppressWarnings({"unchecked", "rawtypes"}) List tokenList = (List)indexPart.get(MockTokenizer.class.getName()); assertNotNull("Expecting MockTokenizer analysis breakdown", tokenList); assertEquals(tokenList.size(), 1); @@ -379,17 +399,22 @@ public void testPositionHistoryWithWDGF() throws Exception { request.setFieldValue("hi, 3456-12 a Test"); request.setShowMatch(false); + @SuppressWarnings({"rawtypes"}) NamedList result = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); assertTrue("result is null and it shouldn't be", result != null); + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList fieldTypes = result.get("field_types"); assertNotNull("field_types should never be null", fieldTypes); + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList textType = fieldTypes.get("skutype1"); assertNotNull("expecting result for field type 'skutype1'", textType); + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList> indexPart = textType.get("index"); assertNotNull("expecting an index token analysis for field type 'skutype1'", indexPart); + @SuppressWarnings({"rawtypes"}) List tokenList = indexPart.get(MockTokenizer.class.getName()); assertNotNull("Expcting MockTokenizer analysis breakdown", tokenList); assertEquals(4, tokenList.size()); @@ -423,14 +448,17 @@ public void testSpatial() throws Exception { request.addFieldType("location_rpt"); request.setFieldValue("MULTIPOINT ((10 40), (40 30), (20 20), (30 10))"); + @SuppressWarnings({"rawtypes"}) NamedList result = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList> tokens = (NamedList>) ((NamedList)result.get("field_types").get("location_rpt")).get("index"); + @SuppressWarnings({"rawtypes"}) List tokenList = tokens.get("org.apache.lucene.spatial.prefix.PrefixTreeStrategy$ShapeTokenStream"); List vals = new ArrayList<>(tokenList.size()); - for(NamedList v : tokenList) { + for(@SuppressWarnings({"rawtypes"})NamedList v : tokenList) { vals.add( (String)v.get("text") ); } Collections.sort(vals); @@ -462,8 +490,10 @@ public TokenStream create(TokenStream input) { ); fieldType.setIndexAnalyzer(analyzer); + @SuppressWarnings({"rawtypes"}) NamedList result = handler.analyzeValues(request, fieldType, "fieldNameUnused"); // just test that we see "900" in the flags attribute here + @SuppressWarnings({"unchecked", "rawtypes"}) List tokenInfoList = (List) result.findRecursive("index", CustomTokenFilter.class.getName()); // '1' from CustomTokenFilter plus 900 from CustomFlagsAttributeImpl. assertEquals(901, tokenInfoList.get(0).get("org.apache.lucene.analysis.tokenattributes.FlagsAttribute#flags")); diff --git a/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java b/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java index 6c3a23a7d3d8..cc917765f156 100644 --- a/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java +++ b/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java @@ -294,6 +294,7 @@ public void testJsonDocFormat() throws Exception{ String content = (String) p.addCommands.get(0).solrDoc.getFieldValue("_src_"); assertNotNull(content); + @SuppressWarnings({"rawtypes"}) Map obj = (Map) Utils.fromJSONString(content); assertEquals(Boolean.TRUE, obj.get("bool")); assertEquals("v0", obj.get("f0")); @@ -418,6 +419,7 @@ public void testFewParentsJsonDoc() throws Exception { assertOnlyValue("i am the parent", parent, "name"); assertOnlyValue("parent", parent, "cat"); + @SuppressWarnings({"unchecked"}) List childDocs1 = (List) ((parent.getField("children")).getValue()); assertEquals(2, childDocs1.size()); @@ -434,6 +436,7 @@ public void testFewParentsJsonDoc() throws Exception { assertOnlyValue("test-new-label", child2, "test_s"); assertOnlyValue("child", child2, "cat"); + @SuppressWarnings({"unchecked"}) List childDocs2 = (List) ((child2.getField("grandchildren")).getValue()); assertEquals(1, childDocs2.size()); @@ -944,6 +947,7 @@ public void testChildDocs() throws Exception { SolrInputDocument one = add.solrDoc; assertEquals("1", one.getFieldValue("id")); + @SuppressWarnings({"unchecked"}) List children = (List) one.getFieldValues("children"); SolrInputDocument two = children.get(0); assertEquals("2", two.getFieldValue("id")); diff --git a/solr/core/src/test/org/apache/solr/handler/MoreLikeThisHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/MoreLikeThisHandlerTest.java index 6b80014c0fb8..05f83d819895 100644 --- a/solr/core/src/test/org/apache/solr/handler/MoreLikeThisHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/MoreLikeThisHandlerTest.java @@ -43,8 +43,7 @@ public static void moreLikeThisBeforeClass() throws Exception { public void testInterface() throws Exception { SolrCore core = h.getCore(); - MoreLikeThisHandler mlt = new MoreLikeThisHandler(); - + ModifiableSolrParams params = new ModifiableSolrParams(); assertU(adoc("id","42","name","Tom Cruise","subword","Top Gun","subword","Risky Business","subword","The Color of Money","subword","Minority Report","subword", "Days of Thunder","subword", "Eyes Wide Shut","subword", "Far and Away", "foo_ti","10")); @@ -63,7 +62,8 @@ public void testInterface() throws Exception // requires 'q' or a single content stream SolrException ex = expectThrows(SolrException.class, () -> { - try (SolrQueryRequestBase req = new SolrQueryRequestBase(core, params) {}) { + try (MoreLikeThisHandler mlt = new MoreLikeThisHandler(); + SolrQueryRequestBase req = new SolrQueryRequestBase(core, params) {}) { mlt.handleRequestBody(req, new SolrQueryResponse()); } }); @@ -72,7 +72,8 @@ public void testInterface() throws Exception // requires a single content stream (more than one is not supported). ex = expectThrows(SolrException.class, () -> { - try (SolrQueryRequestBase req = new SolrQueryRequestBase(core, params) {}) { + try (MoreLikeThisHandler mlt = new MoreLikeThisHandler(); + SolrQueryRequestBase req = new SolrQueryRequestBase(core, params) {}) { ArrayList streams = new ArrayList<>(2); streams.add(new ContentStreamBase.StringStream("hello")); streams.add(new ContentStreamBase.StringStream("there")); @@ -153,8 +154,6 @@ public void testInterface() throws Exception public void testMultifieldSimilarity() throws Exception { SolrCore core = h.getCore(); - MoreLikeThisHandler mlt = new MoreLikeThisHandler(); - ModifiableSolrParams params = new ModifiableSolrParams(); assertU(adoc("id", "1", "name", "aaa bbb ccc", "subword", " zzz")); diff --git a/solr/core/src/test/org/apache/solr/handler/PingRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/PingRequestHandlerTest.java index 3b3edb3ba826..dd8aead99e14 100644 --- a/solr/core/src/test/org/apache/solr/handler/PingRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/PingRequestHandlerTest.java @@ -50,6 +50,7 @@ public static void beforeClass() throws Exception { } @Before + @SuppressWarnings({"unchecked"}) public void before() throws IOException { // by default, use relative file in dataDir healthcheckFile = new File(initAndGetDataDir(), fileName); @@ -63,6 +64,7 @@ public void before() throws IOException { if (healthcheckFile.exists()) FileUtils.forceDelete(healthcheckFile); handler = new PingRequestHandler(); + @SuppressWarnings({"rawtypes"}) NamedList initParams = new NamedList(); initParams.add(PingRequestHandler.HEALTHCHECK_FILE_PARAM, fileNameParam); @@ -70,6 +72,7 @@ public void before() throws IOException { handler.inform(h.getCore()); } + @SuppressWarnings({"rawtypes"}) public void testPingWithNoHealthCheck() throws Exception { // for this test, we don't want any healthcheck file configured at all diff --git a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java index c1cfecbdd037..c6c1dfe23764 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java @@ -177,6 +177,7 @@ public static void postData(CloudSolrClient cloudClient, String baseUrl, String entity = cloudClient.getLbClient().getHttpClient().execute(httpPost).getEntity(); try { response = EntityUtils.toString(entity, StandardCharsets.UTF_8); + @SuppressWarnings({"rawtypes"}) Map m = (Map) fromJSONString(response); assertFalse("Error in posting blob " + m.toString(), m.containsKey("error")); } catch (JSONParser.ParseException e) { diff --git a/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java b/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java index f95d14e7dee1..49ec80b2bb02 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java +++ b/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java @@ -64,7 +64,9 @@ public void test() throws Exception { private void reloadTest() throws Exception { SolrZkClient client = cloudClient.getZkStateReader().getZkClient(); - log.info("live_nodes_count : " + cloudClient.getZkStateReader().getClusterState().getLiveNodes()); + if (log.isInfoEnabled()) { + log.info("live_nodes_count : {}", cloudClient.getZkStateReader().getClusterState().getLiveNodes()); + } String confPath = ZkConfigManager.CONFIGS_ZKNODE+"/conf1/"; // checkConfReload(client, confPath + ConfigOverlay.RESOURCE_NAME, "overlay"); checkConfReload(client, confPath + SolrConfig.DEFAULT_CONF_FILE,"config", "/config"); @@ -85,7 +87,9 @@ private void checkConfReload(SolrZkClient client, String resPath, String name, S Stat newStat = client.setData(resPath, data, true); client.setData("/configs/conf1", new byte[]{1}, true); assertTrue(newStat.getVersion() > stat.getVersion()); - log.info("new_version "+ newStat.getVersion()); + if (log.isInfoEnabled()) { + log.info("new_version {}", newStat.getVersion()); + } Integer newVersion = newStat.getVersion(); long maxTimeoutSeconds = 60; DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection("collection1"); @@ -110,6 +114,7 @@ private void checkConfReload(SolrZkClient client, String resPath, String name, S assertEquals(StrUtils.formatString("tried these servers {0} succeeded only in {1} ", urls, succeeded) , urls.size(), succeeded.size()); } + @SuppressWarnings({"rawtypes"}) private LinkedHashMapWriter getAsMap(String uri) throws Exception { HttpGet get = new HttpGet(uri) ; HttpEntity entity = null; diff --git a/solr/core/src/test/org/apache/solr/handler/TestCoreBackup.java b/solr/core/src/test/org/apache/solr/handler/TestCoreBackup.java index 407ef4645368..ee872539dbdb 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestCoreBackup.java +++ b/solr/core/src/test/org/apache/solr/handler/TestCoreBackup.java @@ -63,14 +63,15 @@ public void testBackupWithDocsNotSearchable() throws Exception { String snapshotName = TestUtil.randomSimpleString(random(), 1, 5); final CoreContainer cores = h.getCoreContainer(); - final CoreAdminHandler admin = new CoreAdminHandler(cores); - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, "name", snapshotName, "location", location) - , resp); - assertNull("Backup should have succeeded", resp.getException()); - simpleBackupCheck(new File(location, "snapshot." + snapshotName), 2); + try (final CoreAdminHandler admin = new CoreAdminHandler(cores)) { + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody + (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", DEFAULT_TEST_COLLECTION_NAME, "name", snapshotName, "location", location) + , resp); + assertNull("Backup should have succeeded", resp.getException()); + simpleBackupCheck(new File(location, "snapshot." + snapshotName), 2); + } } public void testBackupBeforeFirstCommit() throws Exception { @@ -169,6 +170,7 @@ public void testBackupBeforeFirstCommit() throws Exception { 0, initialEmptyIndexSegmentFileName); } + admin.close(); } /** @@ -291,7 +293,7 @@ public void testBackupAfterSoftCommit() throws Exception { 1, oneDocSegmentFile); } - + admin.close(); } /** @@ -317,7 +319,7 @@ public void testDemoWhyBackupCodeShouldNeverUseIndexCommitFromSearcher() throws // sanity check this is the initial commit.. final IndexCommit commit = s.getIndexReader().getIndexCommit(); - assertEquals(EXPECTED_GEN_OF_EMPTY_INDEX, (long) commit.getGeneration()); + assertEquals(EXPECTED_GEN_OF_EMPTY_INDEX, commit.getGeneration()); return commit; }); @@ -336,7 +338,7 @@ public void testDemoWhyBackupCodeShouldNeverUseIndexCommitFromSearcher() throws final IndexCommit commit = s.getIndexReader().getIndexCommit(); // WTF: how/why does this reader still have the same commit generation as before ? ? ? ? ? assertEquals("WTF: This Reader (claims) the same generation as our previous pre-softCommif (empty) reader", - EXPECTED_GEN_OF_EMPTY_INDEX, (long) commit.getGeneration()); + EXPECTED_GEN_OF_EMPTY_INDEX, commit.getGeneration()); return commit; }); diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java index 8cc19d055238..b64df5d26ed8 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java @@ -205,6 +205,7 @@ static int index(SolrClient s, Object... fields) throws Exception { return s.add(doc).getStatus(); } + @SuppressWarnings({"rawtypes"}) NamedList query(String query, SolrClient s) throws SolrServerException, IOException { ModifiableSolrParams params = new ModifiableSolrParams(); @@ -216,21 +217,24 @@ NamedList query(String query, SolrClient s) throws SolrServerException, IOExcept } /** will sleep up to 30 seconds, looking for expectedDocCount */ + @SuppressWarnings({"rawtypes"}) private NamedList rQuery(int expectedDocCount, String query, SolrClient client) throws Exception { int timeSlept = 0; NamedList res = query(query, client); while (expectedDocCount != numFound(res) && timeSlept < 30000) { - log.info("Waiting for " + expectedDocCount + " docs"); + log.info("Waiting for {} docs", expectedDocCount); timeSlept += 100; Thread.sleep(100); res = query(query, client); } - log.info("Waited for {}ms and found {} docs", timeSlept, numFound(res)); + if (log.isInfoEnabled()) { + log.info("Waited for {}ms and found {} docs", timeSlept, numFound(res)); + } return res; } - private long numFound(NamedList res) { + private long numFound(@SuppressWarnings({"rawtypes"})NamedList res) { return ((SolrDocumentList) res.get("response")).getNumFound(); } @@ -327,10 +331,11 @@ public void doTestDetails() throws Exception { if (i > 0) { rQuery(i, "*:*", slaveClient); + @SuppressWarnings({"rawtypes"}) List replicatedAtCount = (List) ((NamedList) details.get("slave")).get("indexReplicatedAtList"); int tries = 0; while ((replicatedAtCount == null || replicatedAtCount.size() < i) && tries++ < 5) { - Thread.currentThread().sleep(1000); + Thread.sleep(1000); details = getDetails(slaveClient); replicatedAtCount = (List) ((NamedList) details.get("slave")).get("indexReplicatedAtList"); } @@ -512,11 +517,13 @@ public void doTestIndexAndConfigReplication() throws Exception { masterClient.commit(); + @SuppressWarnings({"rawtypes"}) NamedList masterQueryRsp = rQuery(nDocs, "*:*", masterClient); SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response"); assertEquals(nDocs, numFound(masterQueryRsp)); //get docs from slave and check if number is equal to master + @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp = rQuery(nDocs, "*:*", slaveClient); SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp.get("response"); assertEquals(nDocs, numFound(slaveQueryRsp)); @@ -589,11 +596,13 @@ public void doTestStopPoll() throws Exception { masterClient.commit(); + @SuppressWarnings({"rawtypes"}) NamedList masterQueryRsp = rQuery(nDocs, "*:*", masterClient); SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response"); assertEquals(nDocs, numFound(masterQueryRsp)); //get docs from slave and check if number is equal to master + @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp = rQuery(nDocs, "*:*", slaveClient); SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp.get("response"); assertEquals(nDocs, numFound(slaveQueryRsp)); @@ -651,11 +660,13 @@ public void doTestIndexFetchOnMasterRestart() throws Exception { masterClient.commit(); + @SuppressWarnings({"rawtypes"}) NamedList masterQueryRsp = rQuery(nDocs, "*:*", masterClient); SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response"); assertEquals(nDocs, numFound(masterQueryRsp)); //get docs from slave and check if number is equal to master + @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp = rQuery(nDocs, "*:*", slaveClient); SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp.get("response"); assertEquals(nDocs, numFound(slaveQueryRsp)); @@ -705,7 +716,9 @@ public void doTestIndexFetchOnMasterRestart() throws Exception { assertEquals(1, Integer.parseInt(getStringOrNull(slaveDetails,"timesIndexReplicated")) - failed); break; } catch (NumberFormatException | AssertionError notYet) { - log.info((retries+1)+"th attempt failure on " + notYet+" details are "+slaveDetails); + if (log.isInfoEnabled()) { + log.info("{}th attempt failure on {} details are {}", retries + 1, notYet, slaveDetails); // logOk + } if (retries>9) { log.error("giving up: ", notYet); throw notYet; @@ -744,7 +757,9 @@ private NamedList getSlaveDetails() throws SolrServerException, IOExcept QueryResponse response = slaveClient.query(params); // details/slave/timesIndexReplicated + @SuppressWarnings({"unchecked"}) NamedList details = (NamedList) response.getResponse().get("details"); + @SuppressWarnings({"unchecked"}) NamedList slave = (NamedList) details.get("slave"); return slave; } @@ -774,6 +789,7 @@ public void doTestIndexFetchWithMasterUrl() throws Exception { masterClient.commit(); + @SuppressWarnings({"rawtypes"}) NamedList masterQueryRsp = rQuery(nDocs, "*:*", masterClient); SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response"); assertEquals(nDocs, masterQueryResult.getNumFound()); @@ -786,6 +802,7 @@ public void doTestIndexFetchWithMasterUrl() throws Exception { stream.close(); //get docs from slave and check if number is equal to master + @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp = rQuery(nDocs, "*:*", slaveClient); SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp.get("response"); assertEquals(nDocs, slaveQueryResult.getNumFound()); @@ -932,6 +949,7 @@ public void doTestStressReplication() throws Exception { totalDocs += docs; masterClient.commit(); + @SuppressWarnings({"rawtypes"}) NamedList masterQueryRsp = rQuery(totalDocs, "*:*", masterClient); SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp .get("response"); @@ -945,6 +963,7 @@ public void doTestStressReplication() throws Exception { } // get docs from slave and check if number is equal to master + @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp = rQuery(totalDocs, "*:*", slaveClient); SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp .get("response"); @@ -1112,6 +1131,7 @@ public void doTestRepeater() throws Exception { private void assertVersions(SolrClient client1, SolrClient client2) throws Exception { NamedList details = getDetails(client1); + @SuppressWarnings({"unchecked"}) ArrayList> commits = (ArrayList>) details.get("commits"); Long maxVersionClient1 = getVersion(client1); Long maxVersionClient2 = getVersion(client2); @@ -1138,6 +1158,7 @@ private void assertVersions(SolrClient client1, SolrClient client2) throws Excep assertEquals(maxVersionClient2, version); } + @SuppressWarnings({"unchecked"}) private Long getVersion(SolrClient client) throws Exception { NamedList details; ArrayList> commits; @@ -1197,6 +1218,7 @@ public void doTestReplicateAfterStartup() throws Exception { masterClient.commit(); + @SuppressWarnings({"rawtypes"}) NamedList masterQueryRsp = rQuery(nDocs, "*:*", masterClient); SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response"); assertEquals(nDocs, masterQueryResult.getNumFound()); @@ -1211,6 +1233,7 @@ public void doTestReplicateAfterStartup() throws Exception { slaveClient = createNewSolrClient(slaveJetty.getLocalPort()); //get docs from slave and check if number is equal to master + @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp = rQuery(nDocs, "*:*", slaveClient); SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp.get("response"); assertEquals(nDocs, slaveQueryResult.getNumFound()); @@ -1257,6 +1280,7 @@ public void doTestReplicateAfterStartupWithNoActivity() throws Exception { // masterClient = createNewSolrClient(masterJetty.getLocalPort()); + @SuppressWarnings({"rawtypes"}) NamedList masterQueryRsp = rQuery(nDocs, "*:*", masterClient); SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp .get("response"); @@ -1271,6 +1295,7 @@ public void doTestReplicateAfterStartupWithNoActivity() throws Exception { slaveClient = createNewSolrClient(slaveJetty.getLocalPort()); // get docs from slave and check if number is equal to master + @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp = rQuery(nDocs, "*:*", slaveClient); SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp .get("response"); @@ -1310,6 +1335,7 @@ public void doTestReplicateAfterCoreReload() throws Exception { masterClient.commit(); + @SuppressWarnings({"rawtypes"}) NamedList masterQueryRsp = rQuery(docs, "*:*", masterClient); SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response"); assertEquals(docs, masterQueryResult.getNumFound()); @@ -1323,6 +1349,7 @@ public void doTestReplicateAfterCoreReload() throws Exception { slaveClient = createNewSolrClient(slaveJetty.getLocalPort()); //get docs from slave and check if number is equal to master + @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp = rQuery(docs, "*:*", slaveClient); SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp.get("response"); assertEquals(docs, slaveQueryResult.getNumFound()); @@ -1342,6 +1369,7 @@ public void doTestReplicateAfterCoreReload() throws Exception { masterClient.commit(); + @SuppressWarnings({"rawtypes"}) NamedList resp = rQuery(docs + 2, "*:*", masterClient); masterQueryResult = (SolrDocumentList) resp.get("response"); assertEquals(docs + 2, masterQueryResult.getNumFound()); @@ -1364,11 +1392,13 @@ public void doTestIndexAndConfigAliasReplication() throws Exception { masterClient.commit(); + @SuppressWarnings({"rawtypes"}) NamedList masterQueryRsp = rQuery(nDocs, "*:*", masterClient); SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response"); assertEquals(nDocs, masterQueryResult.getNumFound()); //get docs from slave and check if number is equal to master + @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp = rQuery(nDocs, "*:*", slaveClient); SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp.get("response"); @@ -1425,10 +1455,12 @@ public void doTestIndexAndConfigAliasReplication() throws Exception { // wait for slave to reload core by watching updated startTime watchCoreStartAt(slaveClient, 30*1000, slaveStartTime); + @SuppressWarnings({"rawtypes"}) NamedList masterQueryRsp2 = rQuery(1, "id:2000", masterClient); SolrDocumentList masterQueryResult2 = (SolrDocumentList) masterQueryRsp2.get("response"); assertEquals(1, masterQueryResult2.getNumFound()); + @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp2 = rQuery(1, "id:2000", slaveClient); SolrDocumentList slaveQueryResult2 = (SolrDocumentList) slaveQueryRsp2.get("response"); assertEquals(1, slaveQueryResult2.getNumFound()); @@ -1498,6 +1530,7 @@ public void testRateLimitedReplication() throws Exception { new Thread(new AddExtraDocs(masterClient, totalDocs)).start(); //Wait and make sure that it actually replicated correctly. + @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp = rQuery(totalDocs, "*:*", slaveClient); SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp.get("response"); assertEquals(totalDocs, slaveQueryResult.getNumFound()); @@ -1507,7 +1540,8 @@ public void testRateLimitedReplication() throws Exception { long timeTakenInSeconds = TimeUnit.SECONDS.convert(timeTaken, TimeUnit.NANOSECONDS); //Let's make sure it took more than approximateTimeInSeconds to make sure that it was throttled - log.info("approximateTimeInSeconds = " + approximateTimeInSeconds + " timeTakenInSeconds = " + timeTakenInSeconds); + log.info("approximateTimeInSeconds = {} timeTakenInSeconds = {}" + , approximateTimeInSeconds, timeTakenInSeconds); assertTrue(timeTakenInSeconds - approximateTimeInSeconds > 0); } @@ -1705,6 +1739,7 @@ private Date watchCoreStartAt(SolrClient client, final long timeout, QueryRequest req = new QueryRequest(p); req.setPath("/admin/cores"); try { + @SuppressWarnings({"rawtypes"}) NamedList data = adminClient.request(req); for (String k : new String[]{"status", "collection1"}) { Object o = data.get(k); @@ -1731,7 +1766,7 @@ private Date watchCoreStartAt(SolrClient client, final long timeout, } } - private void assertReplicationResponseSucceeded(NamedList response) { + private void assertReplicationResponseSucceeded(@SuppressWarnings({"rawtypes"})NamedList response) { assertNotNull("null response from server", response); assertNotNull("Expected replication response to have 'status' field", response.get("status")); assertEquals("OK", response.get("status")); diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java index 9b997d50e60c..ba4e4522f471 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java @@ -205,11 +205,14 @@ public boolean getAsBoolean() { .add("qt", "/replication") .add("command", ReplicationHandler.CMD_DETAILS) ); - log.info("DETAILS" + Utils.writeJson(response, new StringWriter(), true).toString()); + if (log.isInfoEnabled()) { + log.info("DETAILS {}", Utils.writeJson(response, new StringWriter(), true).toString()); + } assertEquals("slave's clearedLocalIndexFirst (from rep details)", "true", response._getStr("details/slave/clearedLocalIndexFirst", null)); } + @SuppressWarnings({"unchecked", "rawtypes"}) private long indexDocs(SolrClient client, int totalDocs, int start) throws Exception { for (int i = 0; i < totalDocs; i++) TestReplicationHandler.index(client, "id", i + start, "name", TestUtil.randomSimpleString(random(), 1000, 5000)); diff --git a/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java b/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java index da990da26c85..5c52bf419f9b 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java @@ -78,6 +78,7 @@ public void test() throws Exception { } } + @SuppressWarnings({"rawtypes"}) private void testReqParams() throws Exception { CloudSolrClient cloudClient = cluster.getSolrClient(); DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection(COLL_NAME); @@ -282,6 +283,7 @@ private void testReqParams() throws Exception { compareValues(result, new Predicate() { @Override public boolean test(Object o) { + @SuppressWarnings({"rawtypes"}) List l = (List) o; return l.contains("first") && l.contains("second"); } diff --git a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java index e858c2fd50aa..96555ca5053b 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java @@ -1439,8 +1439,7 @@ public void testAggregatesWithoutGrouping() throws Exception { // Test zero hits sParams = mapParams(CommonParams.QT, "/sql", "stmt", "select count(*), sum(a_i), min(a_i), max(a_i), cast(avg(1.0 * a_i) as float), sum(a_f), " + - "min(a_f), max(a_f), avg(a_f) from collection1 where a_s = 'blah'"); - + "min(a_f), max(a_f), avg(a_f) from collection1 where a_s = 'blah'"); tuples = getTuples(sParams, baseUrl); @@ -1873,13 +1872,14 @@ public boolean assertDouble(Tuple tuple, String fieldName, double d) throws Exce return true; } - protected boolean assertMaps(List maps, int... ids) throws Exception { + protected boolean assertMaps(@SuppressWarnings({"rawtypes"})List maps, int... ids) throws Exception { if(maps.size() != ids.length) { throw new Exception("Expected id count != actual map count:"+ids.length+":"+maps.size()); } int i=0; for(int val : ids) { + @SuppressWarnings({"rawtypes"}) Map t = maps.get(i); String tip = (String)t.get("id"); if(!tip.equals(Integer.toString(val))) { diff --git a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java index 5b6c4f1b0234..662354a9c6bc 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java +++ b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java @@ -109,6 +109,7 @@ private void testReqParams() throws Exception{ TestSolrConfigHandler.runConfigCommand(writeHarness,"/config/params", payload); + @SuppressWarnings({"rawtypes"}) Map result = TestSolrConfigHandler.testForResponseElement(null, urls.get(random().nextInt(urls.size())), "/config/params", @@ -274,7 +275,8 @@ private void testReqParams() throws Exception{ } - public static void compareValues(Map result, Object expected, List jsonPath) { + @SuppressWarnings({"unchecked"}) + public static void compareValues(@SuppressWarnings({"rawtypes"})Map result, Object expected, List jsonPath) { Object val = Utils.getObjectByPath(result, false, jsonPath); assertTrue(StrUtils.formatString("Could not get expected value {0} for path {1} full output {2}", expected, jsonPath, result.toString()), expected instanceof Predicate ? diff --git a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java index 17d42bf90f07..1b4380851063 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java +++ b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java @@ -55,18 +55,23 @@ public class TestSolrConfigHandlerConcurrent extends AbstractFullDistribZkTestBa @Test public void test() throws Exception { + @SuppressWarnings({"rawtypes"}) Map editable_prop_map = (Map) Utils.fromJSONResource("EditableSolrConfigAttributes.json"); + @SuppressWarnings({"rawtypes"}) Map caches = (Map) editable_prop_map.get("query"); setupRestTestHarnesses(); List threads = new ArrayList<>(caches.size()); + @SuppressWarnings({"rawtypes"}) final List collectErrors = new ArrayList<>(); for (Object o : caches.entrySet()) { + @SuppressWarnings({"rawtypes"}) final Map.Entry e = (Map.Entry) o; if (e.getValue() instanceof Map) { List errs = new ArrayList<>(); collectErrors.add(errs); + @SuppressWarnings({"rawtypes"}) Map value = (Map) e.getValue(); Thread t = new Thread(() -> { try { @@ -85,10 +90,10 @@ public void test() throws Exception { boolean success = true; - for (List e : collectErrors) { + for (@SuppressWarnings({"rawtypes"})List e : collectErrors) { if(!e.isEmpty()){ success = false; - log.error(e.toString()); + log.error("{}", e); } } @@ -99,7 +104,8 @@ public void test() throws Exception { } - private void invokeBulkCall(String cacheName, List errs, Map val) throws Exception { + private void invokeBulkCall(String cacheName, List errs, + @SuppressWarnings({"rawtypes"})Map val) throws Exception { String payload = "{" + "'set-property' : {'query.CACHENAME.size':'CACHEVAL1'," + @@ -128,6 +134,7 @@ private void invokeBulkCall(String cacheName, List errs, Map val) throw publisher.close(); } + @SuppressWarnings({"rawtypes"}) Map map = (Map) Utils.fromJSONString(response); Object errors = map.get("errors"); if(errors!= null){ @@ -177,6 +184,7 @@ private void invokeBulkCall(String cacheName, List errs, Map val) throw } + @SuppressWarnings({"rawtypes"}) public static LinkedHashMapWriter getAsMap(String uri, CloudSolrClient cloudClient) throws Exception { HttpGet get = new HttpGet(uri) ; HttpEntity entity = null; diff --git a/solr/core/src/test/org/apache/solr/handler/ThrowErrorOnInitRequestHandler.java b/solr/core/src/test/org/apache/solr/handler/ThrowErrorOnInitRequestHandler.java index 6409e1758651..804453ffb533 100644 --- a/solr/core/src/test/org/apache/solr/handler/ThrowErrorOnInitRequestHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/ThrowErrorOnInitRequestHandler.java @@ -41,7 +41,7 @@ public String getDescription() { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { String errorMessage = (String) args.get("error"); if (errorMessage != null) { throw new Error(errorMessage); diff --git a/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java b/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java index 9df56e84a65e..883e94d5dc53 100644 --- a/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java @@ -105,6 +105,7 @@ public void testIntrospect() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("command","XXXX"); params.set("method", "POST"); + @SuppressWarnings({"rawtypes"}) Map result = resAsMap(cluster.getSolrClient(), new V2Request.Builder("/c/"+COLL_NAME+"/_introspect") .withParams(params).build()); @@ -141,14 +142,17 @@ public void testWTParam() throws Exception { @Test public void testSingleWarning() throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList resp = cluster.getSolrClient().request( new V2Request.Builder("/c/"+COLL_NAME+"/_introspect").build()); + @SuppressWarnings({"rawtypes"}) List warnings = resp.getAll("WARNING"); assertEquals(1, warnings.size()); } @Test public void testSetPropertyValidationOfCluster() throws IOException, SolrServerException { + @SuppressWarnings({"rawtypes"}) NamedList resp = cluster.getSolrClient().request( new V2Request.Builder("/cluster").withMethod(SolrRequest.METHOD.POST).withPayload("{set-property: {name: autoAddReplicas, val:false}}").build()); assertTrue(resp.toString().contains("status=0")); @@ -160,6 +164,7 @@ public void testSetPropertyValidationOfCluster() throws IOException, SolrServerE @Test public void testCollectionsApi() throws Exception { CloudSolrClient client = cluster.getSolrClient(); + @SuppressWarnings({"rawtypes"}) Map result = resAsMap(client, new V2Request.Builder("/c/"+COLL_NAME+"/get/_introspect").build()); assertEquals("/c/collection1/get", Utils.getObjectByPath(result, true, "/spec[0]/url/paths[0]")); result = resAsMap(client, new V2Request.Builder("/collections/"+COLL_NAME+"/get/_introspect").build()); @@ -177,6 +182,7 @@ public void testCollectionsApi() throws Exception { .build()); } + @SuppressWarnings({"rawtypes"}) private Map resAsMap(CloudSolrClient client, V2Request request) throws SolrServerException, IOException { NamedList rsp = client.request(request); return rsp.asMap(100); diff --git a/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java b/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java index ccf2a580dcf5..316bc84083f6 100644 --- a/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java +++ b/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java @@ -39,7 +39,9 @@ public void testWelcomeMessage() throws Exception { jetty.start(); try (HttpSolrClient client = getHttpSolrClient(buildUrl(jetty.getLocalPort(),"/solr/"))) { + @SuppressWarnings({"rawtypes"}) NamedList res = client.request(new V2Request.Builder("/").build()); + @SuppressWarnings({"rawtypes"}) NamedList header = (NamedList) res.get("responseHeader"); assertEquals(0, header.get("status")); diff --git a/solr/core/src/test/org/apache/solr/handler/XsltUpdateRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/XsltUpdateRequestHandlerTest.java index fb7bd1999bfd..8e3f4199f85c 100644 --- a/solr/core/src/test/org/apache/solr/handler/XsltUpdateRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/XsltUpdateRequestHandlerTest.java @@ -77,9 +77,10 @@ public void testUpdate() throws Exception streams.add(new ContentStreamBase.StringStream(xml)); req.setContentStreams(streams); SolrQueryResponse rsp = new SolrQueryResponse(); - UpdateRequestHandler handler = new UpdateRequestHandler(); - handler.init(new NamedList()); - handler.handleRequestBody(req, rsp); + try (UpdateRequestHandler handler = new UpdateRequestHandler()) { + handler.init(new NamedList()); + handler.handleRequestBody(req, rsp); + } StringWriter sw = new StringWriter(32000); QueryResponseWriter responseWriter = core.getQueryResponseWriter(req); responseWriter.write(sw,req,rsp); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/AutoscalingHistoryHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/AutoscalingHistoryHandlerTest.java index 935d2cd306a3..962bd933b029 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/AutoscalingHistoryHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/AutoscalingHistoryHandlerTest.java @@ -130,6 +130,7 @@ public void setupTest() throws Exception { " {'replica':'<2', 'shard': '#EACH', 'node': '#ANY'}" + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); solrClient.request(req); @@ -276,7 +277,7 @@ public void testHistory() throws Exception { JettySolrRunner jetty = cluster.startJettySolrRunner(); cluster.waitForAllNodes(30); String nodeAddedName = jetty.getNodeName(); - log.info("### Added node " + nodeAddedName); + log.info("### Added node {}", nodeAddedName); boolean await = actionFiredLatch.await(60, TimeUnit.SECONDS); assertTrue("action did not execute", await); @@ -366,7 +367,7 @@ public void testHistory() throws Exception { } } assertNotNull("no suitable node found", nodeToKill); - log.info("### Stopping node " + nodeToKill); + log.info("### Stopping node {}", nodeToKill); for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) { if (cluster.getJettySolrRunner(i).getNodeName().equals(nodeToKill)) { JettySolrRunner j = cluster.stopJettySolrRunner(i); @@ -374,7 +375,7 @@ public void testHistory() throws Exception { break; } } - log.info("### Stopped node " + nodeToKill); + log.info("### Stopped node {}", nodeToKill); await = actionFiredLatch.await(60, TimeUnit.SECONDS); assertTrue("action did not execute", await); @@ -405,17 +406,19 @@ private SolrDocumentList queryAndAssertDocs(ModifiableSolrParams query, SolrClie QueryResponse rsp = client.query(query); SolrDocumentList docs = rsp.getResults(); if (docs.size() != expected) { - log.info("History query: " + query); - log.info("Wrong response: " + rsp); + log.info("History query: {}", query); + log.info("Wrong response: {}", rsp); ModifiableSolrParams fullQuery = params(CommonParams.QT, CommonParams.AUTOSCALING_HISTORY_PATH); - log.info("Full response: " + client.query(fullQuery)); + if (log.isInfoEnabled()) { + log.info("Full response: {}", client.query(fullQuery)); + } } assertEquals("Wrong number of documents", expected, docs.size()); return docs; } private static void waitForRecovery(String collection) throws Exception { - log.info("Waiting for recovery of " + collection); + log.info("Waiting for recovery of {}", collection); boolean recovered = false; boolean allActive = true; boolean hasLeaders = true; @@ -423,7 +426,7 @@ private static void waitForRecovery(String collection) throws Exception { for (int i = 0; i < 300; i++) { ClusterState state = solrClient.getZkStateReader().getClusterState(); collState = getCollectionState(collection); - log.debug("###### " + collState); + log.debug("###### {}", collState); Collection replicas = collState.getReplicas(); allActive = true; hasLeaders = true; @@ -431,11 +434,11 @@ private static void waitForRecovery(String collection) throws Exception { for (Replica r : replicas) { if (state.getLiveNodes().contains(r.getNodeName())) { if (!r.isActive(state.getLiveNodes())) { - log.info("Not active: " + r); + log.info("Not active: {}", r); allActive = false; } } else { - log.info("Replica no longer on a live node, ignoring: " + r); + log.info("Replica no longer on a live node, ignoring: {}", r); } } } else { @@ -450,7 +453,7 @@ private static void waitForRecovery(String collection) throws Exception { recovered = true; break; } else { - log.info("--- waiting, allActive=" + allActive + ", hasLeaders=" + hasLeaders); + log.info("--- waiting, allActive={}, hasLeaders={}", allActive, hasLeaders); Thread.sleep(1000); } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminCreateDiscoverTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminCreateDiscoverTest.java index 0c123186fedc..5fa5364a9488 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminCreateDiscoverTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminCreateDiscoverTest.java @@ -206,8 +206,10 @@ public void testInstanceDirAsPropertyParam() throws Exception { CoreAdminParams.CoreAdminAction.STATUS.toString(), CoreAdminParams.CORE, "testInstanceDirAsPropertyParam"), resp); + @SuppressWarnings({"rawtypes"}) NamedList status = (NamedList) resp.getValues().get("status"); assertNotNull(status); + @SuppressWarnings({"rawtypes"}) NamedList coreProps = (NamedList) status.get("testInstanceDirAsPropertyParam"); assertNotNull(status); String instanceDir = (String) coreProps.get("instanceDir"); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java index eb3d1ebf86c9..eab817b225ef 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java @@ -82,6 +82,7 @@ public void testCreateWithSysVars() throws Exception { new File(subHome, "solrconfig.snippet.randomindexconfig.xml")); final CoreContainer cores = h.getCoreContainer(); + cores.getAllowPaths().add(workDir.toPath()); final CoreAdminHandler admin = new CoreAdminHandler(cores); @@ -117,6 +118,7 @@ CoreAdminParams.NAME, getCoreName(), // Should have segments in the directory pointed to by the ${DATA_TEST}. File test = new File(dataDir, "index"); assertTrue("Should have found index dir at " + test.getAbsolutePath(), test.exists()); + admin.close(); } @Test @@ -124,6 +126,7 @@ public void testCoreAdminHandler() throws Exception { final File workDir = createTempDir().toFile(); final CoreContainer cores = h.getCoreContainer(); + cores.getAllowPaths().add(workDir.toPath()); final CoreAdminHandler admin = new CoreAdminHandler(cores); @@ -196,6 +199,7 @@ public void testCoreAdminHandler() throws Exception { (Map) resp.getValues().get("initFailures"); assertNotNull("core failures is null", failures); + @SuppressWarnings({"rawtypes"}) NamedList status = (NamedList)resp.getValues().get("status"); assertNotNull("core status is null", status); @@ -243,7 +247,7 @@ public void testCoreAdminHandler() throws Exception { assertNotNull("Core should have been renamed!", cd); // :TODO: because of SOLR-3665 we can't ask for status from all cores - + admin.close(); } @Test @@ -417,5 +421,6 @@ public void testNonexistentCoreReload() throws Exception { , resp); }); assertEquals("Expected error message for non-existent core.", "Missing required parameter: core", e.getMessage()); + admin.close(); } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminRequestStatusTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminRequestStatusTest.java index 9c7ce70cdafe..d95773343032 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminRequestStatusTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminRequestStatusTest.java @@ -42,6 +42,7 @@ public void testCoreAdminRequestStatus() throws Exception { final File workDir = createTempDir().toFile(); final CoreContainer cores = h.getCoreContainer(); + cores.getAllowPaths().add(workDir.toPath()); // Allow core to be created in workDir final CoreAdminHandler admin = new CoreAdminHandler(cores); @@ -97,6 +98,7 @@ public void testCoreAdminRequestStatus() throws Exception { "notfound", resp.getValues().get("STATUS")); admin.shutdown(); + admin.close(); } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreMergeIndexesAdminHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreMergeIndexesAdminHandlerTest.java index d593b73426df..6fdccc790c0f 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/CoreMergeIndexesAdminHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreMergeIndexesAdminHandlerTest.java @@ -75,9 +75,8 @@ public void testMergeIndexesCoreAdminHandler() throws Exception { final CoreContainer cores = h.getCoreContainer(); - final CoreAdminHandler admin = new CoreAdminHandler(cores); - - try (SolrCore core = cores.getCore("collection1")) { + try (final CoreAdminHandler admin = new CoreAdminHandler(cores); + SolrCore core = cores.getCore("collection1")) { DirectoryFactory df = core.getDirectoryFactory(); FailingDirectoryFactory dirFactory = (FailingDirectoryFactory) df; diff --git a/solr/core/src/test/org/apache/solr/handler/admin/HealthCheckHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/HealthCheckHandlerTest.java index 7d517f2f95b5..d7a03ab80828 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/HealthCheckHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/HealthCheckHandlerTest.java @@ -60,6 +60,7 @@ public static void setupCluster() throws Exception { @Test public void testHealthCheckHandler() throws Exception { + @SuppressWarnings({"rawtypes"}) SolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, HEALTH_CHECK_HANDLER_PATH, new ModifiableSolrParams()); // positive check that our only existing "healthy" node works with cloud client diff --git a/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java b/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java index 2ad9b87c9d20..b092be193425 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java @@ -171,29 +171,39 @@ public void testIntegration() throws Exception { assertEquals(0, rsp.getStatus()); assertEquals(0, sampledRsp.getStatus()); for (int i : Arrays.asList(1, 2)) { + @SuppressWarnings({"unchecked"}) NamedList segInfos = (NamedList) rsp.getResponse().findRecursive(collection, "shards", "shard" + i, "leader", "segInfos"); + @SuppressWarnings({"unchecked"}) NamedList rawSize = (NamedList)segInfos.get("rawSize"); assertNotNull("rawSize missing", rawSize); + @SuppressWarnings({"unchecked"}) Map rawSizeMap = rawSize.asMap(10); + @SuppressWarnings({"unchecked"}) Map fieldsBySize = (Map)rawSizeMap.get(IndexSizeEstimator.FIELDS_BY_SIZE); assertNotNull("fieldsBySize missing", fieldsBySize); assertEquals(fieldsBySize.toString(), fields.size(), fieldsBySize.size()); fields.forEach(field -> assertNotNull("missing field " + field, fieldsBySize.get(field))); + @SuppressWarnings({"unchecked"}) Map typesBySize = (Map)rawSizeMap.get(IndexSizeEstimator.TYPES_BY_SIZE); assertNotNull("typesBySize missing", typesBySize); assertTrue("expected at least 8 types: " + typesBySize.toString(), typesBySize.size() >= 8); + @SuppressWarnings({"unchecked"}) Map summary = (Map)rawSizeMap.get(IndexSizeEstimator.SUMMARY); assertNotNull("summary missing", summary); assertEquals(summary.toString(), fields.size(), summary.size()); fields.forEach(field -> assertNotNull("missing field " + field, summary.get(field))); + @SuppressWarnings({"unchecked"}) Map details = (Map)rawSizeMap.get(IndexSizeEstimator.DETAILS); assertNotNull("details missing", summary); assertEquals(details.keySet().toString(), 6, details.size()); // compare with sampled + @SuppressWarnings({"unchecked"}) NamedList sampledRawSize = (NamedList) rsp.getResponse().findRecursive(collection, "shards", "shard" + i, "leader", "segInfos", "rawSize"); assertNotNull("sampled rawSize missing", sampledRawSize); + @SuppressWarnings({"unchecked"}) Map sampledRawSizeMap = rawSize.asMap(10); + @SuppressWarnings({"unchecked"}) Map sampledFieldsBySize = (Map)sampledRawSizeMap.get(IndexSizeEstimator.FIELDS_BY_SIZE); assertNotNull("sampled fieldsBySize missing", sampledFieldsBySize); fieldsBySize.forEach((k, v) -> { diff --git a/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java index df043c1721ee..8065f55488a9 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java @@ -19,12 +19,15 @@ import java.util.Arrays; import java.util.EnumSet; +import javax.xml.xpath.XPathConstants; + import org.apache.solr.common.luke.FieldFlag; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.CustomAnalyzerStrField; // jdoc import org.apache.solr.schema.IndexSchema; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.util.TestHarness; +import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -142,6 +145,18 @@ private static String field(String field) { private static String dynfield(String field) { return "//lst[@name='dynamicFields']/lst[@name='"+field+"']/"; } + + @Test + public void testIndexHeapUsageBytes() throws Exception { + try (SolrQueryRequest req = req("qt", "/admin/luke")) { + String response = h.query(req); + String xpath = "//long[@name='indexHeapUsageBytes']"; + Double num = (Double) TestHarness.evaluateXPath(response, xpath, XPathConstants.NUMBER); + //with docs in the index, indexHeapUsageBytes should be greater than 0 + Assert.assertTrue("indexHeapUsageBytes should be > 0, but was " + num.intValue(), num.intValue() > 0); + } + } + @Test public void testFlParam() { SolrQueryRequest req = req("qt", "/admin/luke", "fl", "solr_t solr_s", "show", "all"); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java index 5ec4b615ef93..33f6f10c0b9d 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java @@ -63,6 +63,7 @@ public void testDiff() throws Exception { NamedList>> diff = SolrInfoMBeanHandler.fromXML(xml); // The stats bean for SolrInfoMBeanHandler + @SuppressWarnings({"rawtypes"}) NamedList stats = (NamedList)diff.get("ADMIN").get("/admin/mbeans").get("stats"); //System.out.println("stats:"+stats); @@ -193,5 +194,6 @@ public SolrMetricsContext getSolrMetricsContext() { reader.start(); counter.await(30, TimeUnit.SECONDS); runSnapshots = false; + bean.close(); } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java index 18b4efe257d5..4ab33df55045 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java @@ -75,9 +75,11 @@ public void test() throws Exception { SolrQueryResponse resp = new SolrQueryResponse(); handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json"), resp); + @SuppressWarnings({"rawtypes"}) NamedList values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) values.get("solr.core.collection1"); assertNotNull(nl); Object o = nl.get("SEARCHER.new.errors"); @@ -181,6 +183,7 @@ public void test() throws Exception { assertNotNull(values.get("solr.core.collection1")); values = (NamedList) values.get("solr.core.collection1"); assertEquals(1, values.size()); + @SuppressWarnings({"rawtypes"}) Map m = (Map) values.get("CACHE.core.fieldCache"); assertNotNull(m); assertNotNull(m.get("entries_count")); @@ -196,8 +199,10 @@ public void test() throws Exception { handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "node", "type", "timer", "prefix", "CONTAINER.cores"), resp); values = resp.getValues(); assertNotNull(values.get("metrics")); + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap map = (SimpleOrderedMap) values.get("metrics"); assertEquals(0, map.size()); + handler.close(); } @Test @@ -206,17 +211,21 @@ public void testCompact() throws Exception { SolrQueryResponse resp = new SolrQueryResponse(); handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.COMPACT_PARAM, "true"), resp); + @SuppressWarnings({"rawtypes"}) NamedList values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) values.get("solr.core.collection1"); assertNotNull(nl); Object o = nl.get("SEARCHER.new.errors"); assertNotNull(o); // counter type assertTrue(o instanceof Number); + handler.close(); } @Test + @SuppressWarnings({"unchecked"}) public void testPropertyFilter() throws Exception { assertQ(req("*:*"), "//result[@numFound='0']"); @@ -225,14 +234,17 @@ public void testPropertyFilter() throws Exception { SolrQueryResponse resp = new SolrQueryResponse(); handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.COMPACT_PARAM, "true", "group", "core", "prefix", "CACHE.searcher"), resp); + @SuppressWarnings({"rawtypes"}) NamedList values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) values.get("solr.core.collection1"); assertNotNull(nl); assertTrue(nl.size() > 0); nl.forEach((k, v) -> { assertTrue(v instanceof Map); + @SuppressWarnings({"rawtypes"}) Map map = (Map) v; assertTrue(map.size() > 2); }); @@ -248,11 +260,13 @@ public void testPropertyFilter() throws Exception { assertTrue(nl.size() > 0); nl.forEach((k, v) -> { assertTrue(v instanceof Map); + @SuppressWarnings({"rawtypes"}) Map map = (Map) v; assertEquals(2, map.size()); assertNotNull(map.get("inserts")); assertNotNull(map.get("size")); }); + handler.close(); } @Test @@ -263,6 +277,7 @@ public void testKeyMetrics() throws Exception { SolrQueryResponse resp = new SolrQueryResponse(); handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.KEY_PARAM, key1), resp); + @SuppressWarnings({"rawtypes"}) NamedList values = resp.getValues(); Object val = values.findRecursive("metrics", key1); assertNotNull(val); @@ -316,6 +331,7 @@ public void testKeyMetrics() throws Exception { handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.KEY_PARAM, "foo", MetricsHandler.KEY_PARAM, "foo:bar:baz:xyz"), resp); values = resp.getValues(); + @SuppressWarnings({"rawtypes"}) NamedList metrics = (NamedList) values.get("metrics"); assertEquals(0, metrics.size()); assertNotNull(values.findRecursive("errors", "foo")); @@ -338,6 +354,8 @@ public void testKeyMetrics() throws Exception { metrics = (NamedList) values.get("metrics"); assertEquals(0, metrics.size()); assertNotNull(values.findRecursive("errors", "solr.jetty:unknown:baz")); + + handler.close(); } @Test diff --git a/solr/core/src/test/org/apache/solr/handler/admin/PropertiesRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/PropertiesRequestHandlerTest.java index 34ab7eb15c3a..bbbde8a66739 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/PropertiesRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/PropertiesRequestHandlerTest.java @@ -60,6 +60,7 @@ public void testDisabledRedaction() throws Exception { } } + @SuppressWarnings({"unchecked"}) private NamedList>> readProperties() throws Exception { String xml = h.query(req( CommonParams.QT, "/admin/properties", diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java index 76eb658cc5dc..270e3ba7b1d4 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java @@ -37,6 +37,7 @@ public class SecurityConfHandlerTest extends SolrTestCaseJ4 { + @SuppressWarnings({"unchecked", "rawtypes"}) public void testEdit() throws Exception { MockSecurityHandler handler = new MockSecurityHandler(); String command = "{\n" + @@ -173,8 +174,10 @@ public void testEdit() throws Exception { req.setContentStreams(Collections.singletonList(o)); rsp = new SolrQueryResponse(); handler.handleRequestBody(req, rsp); + @SuppressWarnings({"rawtypes"}) List l = (List) ((Map) ((List)rsp.getValues().get("errorMessages")).get(0)).get("errorMessages"); assertEquals(1, l.size()); + handler.close(); } @@ -266,7 +269,9 @@ public String getStandardJson() throws Exception { public static void main(String[] args) throws Exception{ - System.out.println(new MockSecurityHandler().getStandardJson()); + try (MockSecurityHandler msh = new MockSecurityHandler()) { + System.out.println(msh.getStandardJson()); + } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SegmentsInfoRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/SegmentsInfoRequestHandlerTest.java index 06d3da506868..23c0df52683c 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/SegmentsInfoRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/SegmentsInfoRequestHandlerTest.java @@ -17,11 +17,14 @@ package org.apache.solr.handler.admin; import java.io.IOException; + +import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.util.Version; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.index.NoMergePolicyFactory; +import org.apache.solr.util.RefCounted; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -35,6 +38,8 @@ public class SegmentsInfoRequestHandlerTest extends SolrTestCaseJ4 { private static final int DEL_COUNT = 1; private static final int NUM_SEGMENTS = 2; + + private static int initialRefCount; @BeforeClass public static void beforeClass() throws Exception { @@ -69,11 +74,18 @@ public static void beforeClass() throws Exception { NUM_SEGMENTS, numSegments); return null; }); - + // see SOLR-14431 + RefCounted iwRef = h.getCore().getSolrCoreState().getIndexWriter(h.getCore()); + initialRefCount = iwRef.getRefcount(); + iwRef.decref(); } @AfterClass - public static void afterClass() { + public static void afterClass() throws Exception { + RefCounted iwRef = h.getCore().getSolrCoreState().getIndexWriter(h.getCore()); + int finalRefCount = iwRef.getRefcount(); + iwRef.decref(); + assertEquals("IW refcount mismatch", initialRefCount, finalRefCount); systemClearPropertySolrTestsMergePolicyFactory(); System.clearProperty("solr.tests.maxBufferedDocs"); System.clearProperty("solr.tests.ramBufferSizeMB"); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java b/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java index fc1b496c9f5e..3ec99a5736f3 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java @@ -116,10 +116,12 @@ private void requestMetrics(boolean softFail) throws Exception { h.getCoreContainer().getRequestHandler("/admin/metrics").handleRequest( req("prefix", "SEARCHER", "registry", registry, "compact", "true"), rsp); + @SuppressWarnings({"rawtypes"}) NamedList values = rsp.getValues(); // this is not guaranteed to exist right away after core reload - there's a // small window between core load and before searcher metrics are registered // so we may have to check a few times, and then fail softly if reload is not complete yet + @SuppressWarnings({"rawtypes"}) NamedList metrics = (NamedList)values.get("metrics"); if (metrics == null) { if (softFail) { diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java index 32d1f58ddb8d..3c66c7b62d03 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java @@ -353,14 +353,18 @@ public List getCommands(boolean validateInput) { } - public static void assertConditions(Map root, Map conditions) { + public static void assertConditions(@SuppressWarnings({"rawtypes"})Map root, + @SuppressWarnings({"rawtypes"})Map conditions) { for (Object o : conditions.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry e = (Map.Entry) o; String path = (String) e.getKey(); List parts = StrUtils.splitSmart(path, path.charAt(0) == '/' ? '/' : ' ', true); Object val = Utils.getObjectByPath(root, false, parts); if (e.getValue() instanceof ValidatingJsonMap.PredicateWithErrMsg) { + @SuppressWarnings({"rawtypes"}) ValidatingJsonMap.PredicateWithErrMsg value = (ValidatingJsonMap.PredicateWithErrMsg) e.getValue(); + @SuppressWarnings({"unchecked"}) String err = value.test(val); if (err != null) { assertEquals(err + " for " + e.getKey() + " in :" + Utils.toJSONString(root), e.getValue(), val); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java b/solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java index e2e0cadc9b8c..617fc1d2a00c 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java @@ -79,27 +79,29 @@ public void testCopyParamsToMap() { } public void testCommands() throws Exception { - MockCollectionsHandler collectionsHandler = new MockCollectionsHandler(); - ApiBag apiBag = new ApiBag(false); - Collection apis = collectionsHandler.getApis(); - for (Api api : apis) apiBag.register(api, Collections.emptyMap()); + ApiBag apiBag; + try (MockCollectionsHandler collectionsHandler = new MockCollectionsHandler()) { + apiBag = new ApiBag(false); + Collection apis = collectionsHandler.getApis(); + for (Api api : apis) apiBag.register(api, Collections.emptyMap()); + } //test a simple create collection call compareOutput(apiBag, "/collections", POST, "{create:{name:'newcoll', config:'schemaless', numShards:2, replicationFactor:2 }}", null, - "{name:newcoll, fromApi:'true', replicationFactor:'2', nrtReplicas:'2', collection.configName:schemaless, numShards:'2', stateFormat:'2', operation:create}"); + "{name:newcoll, fromApi:'true', replicationFactor:'2', nrtReplicas:'2', collection.configName:schemaless, numShards:'2', operation:create}"); compareOutput(apiBag, "/collections", POST, "{create:{name:'newcoll', config:'schemaless', numShards:2, nrtReplicas:2 }}", null, - "{name:newcoll, fromApi:'true', nrtReplicas:'2', replicationFactor:'2', collection.configName:schemaless, numShards:'2', stateFormat:'2', operation:create}"); + "{name:newcoll, fromApi:'true', nrtReplicas:'2', replicationFactor:'2', collection.configName:schemaless, numShards:'2', operation:create}"); compareOutput(apiBag, "/collections", POST, "{create:{name:'newcoll', config:'schemaless', numShards:2, nrtReplicas:2, tlogReplicas:2, pullReplicas:2 }}", null, - "{name:newcoll, fromApi:'true', nrtReplicas:'2', replicationFactor:'2', tlogReplicas:'2', pullReplicas:'2', collection.configName:schemaless, numShards:'2', stateFormat:'2', operation:create}"); + "{name:newcoll, fromApi:'true', nrtReplicas:'2', replicationFactor:'2', tlogReplicas:'2', pullReplicas:'2', collection.configName:schemaless, numShards:'2', operation:create}"); //test a create collection with custom properties compareOutput(apiBag, "/collections", POST, "{create:{name:'newcoll', config:'schemaless', numShards:2, replicationFactor:2, properties:{prop1:'prop1val', prop2: prop2val} }}", null, - "{name:newcoll, fromApi:'true', replicationFactor:'2', nrtReplicas:'2', collection.configName:schemaless, numShards:'2', stateFormat:'2', operation:create, property.prop1:prop1val, property.prop2:prop2val}"); + "{name:newcoll, fromApi:'true', replicationFactor:'2', nrtReplicas:'2', collection.configName:schemaless, numShards:'2', operation:create, property.prop1:prop1val, property.prop2:prop2val}"); compareOutput(apiBag, "/collections", POST, @@ -199,6 +201,7 @@ static ZkNodeProps compareOutput(final ApiBag apiBag, final String path, final S final String payload, final CoreContainer cc, String expectedOutputMapJson) throws Exception { Pair ctx = makeCall(apiBag, path, method, payload, cc); ZkNodeProps output = (ZkNodeProps) ctx.second().getValues().get(ZkNodeProps.class.getName()); + @SuppressWarnings({"rawtypes"}) Map expected = (Map) fromJSONString(expectedOutputMapJson); assertMapEqual(expected, output); return output; @@ -250,9 +253,10 @@ public String getHttpMethod() { return new Pair<>(req, rsp); } - private static void assertMapEqual(Map expected, ZkNodeProps actual) { + private static void assertMapEqual(@SuppressWarnings({"rawtypes"})Map expected, ZkNodeProps actual) { assertEquals(errorMessage(expected, actual), expected.size(), actual.getProperties().size()); for (Object o : expected.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry e = (Map.Entry) o; Object actualVal = actual.get((String) e.getKey()); if (actualVal instanceof String[]) { @@ -262,7 +266,7 @@ private static void assertMapEqual(Map expected, ZkNodeProps actual) { } } - private static String errorMessage(Map expected, ZkNodeProps actual) { + private static String errorMessage(@SuppressWarnings({"rawtypes"})Map expected, ZkNodeProps actual) { return "expected: " + Utils.toJSONString(expected) + "\nactual: " + Utils.toJSONString(actual); } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java b/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java index c0c193de872b..0452a1e47ee8 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java @@ -36,9 +36,10 @@ public class TestConfigsApi extends SolrTestCaseJ4 { + @SuppressWarnings({"unchecked"}) public void testCommands() throws Exception { - ConfigSetsHandler handler = new ConfigSetsHandler(null) { + try (ConfigSetsHandler handler = new ConfigSetsHandler(null) { @Override protected void sendToZk(SolrQueryResponse rsp, ConfigSetOperation operation, @@ -47,13 +48,14 @@ protected void sendToZk(SolrQueryResponse rsp, result.put(QUEUE_OPERATION, operation.action.toLower()); rsp.add(ZkNodeProps.class.getName(), new ZkNodeProps(result)); } - }; - ApiBag apiBag = new ApiBag(false); - for (Api api : handler.getApis()) apiBag.register(api, EMPTY_MAP); - compareOutput(apiBag, "/cluster/configs/sample", DELETE, null, null, - "{name :sample, operation:delete}"); - - compareOutput(apiBag, "/cluster/configs", POST, "{create:{name : newconf, baseConfigSet: sample }}", null, - "{operation:create, name :newconf, baseConfigSet: sample, immutable: false }"); + }) { + ApiBag apiBag = new ApiBag(false); + for (Api api : handler.getApis()) apiBag.register(api, EMPTY_MAP); + compareOutput(apiBag, "/cluster/configs/sample", DELETE, null, null, + "{name :sample, operation:delete}"); + + compareOutput(apiBag, "/cluster/configs", POST, "{create:{name : newconf, baseConfigSet: sample }}", null, + "{operation:create, name :newconf, baseConfigSet: sample, immutable: false }"); + } } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java b/solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java index 2919b1c8c78b..15b502530384 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java @@ -35,14 +35,17 @@ public class TestCoreAdminApis extends SolrTestCaseJ4 { + @SuppressWarnings({"unchecked"}) public void testCalls() throws Exception { Map calls = new HashMap<>(); CoreContainer mockCC = getCoreContainerMock(calls, new HashMap<>()); - CoreAdminHandler coreAdminHandler = new CoreAdminHandler(mockCC); - ApiBag apiBag = new ApiBag(false); - for (Api api : coreAdminHandler.getApis()) { - apiBag.register(api, Collections.EMPTY_MAP); + ApiBag apiBag; + try (CoreAdminHandler coreAdminHandler = new CoreAdminHandler(mockCC)) { + apiBag = new ApiBag(false); + for (Api api : coreAdminHandler.getApis()) { + apiBag.register(api, Collections.EMPTY_MAP); + } } TestCollectionAPIs.makeCall(apiBag, "/cores", SolrRequest.METHOD.POST, "{create:{name: hello, instanceDir : someDir, schema: 'schema.xml'}}", mockCC); @@ -69,6 +72,7 @@ public void testCalls() throws Exception { assertEquals(Boolean.TRUE ,params[1]); } + @SuppressWarnings({"unchecked"}) public static CoreContainer getCoreContainerMock(final Map in,Map out ) { assumeWorkingMockito(); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerTest.java index 693accf93cc2..cf2dd744fdea 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerTest.java @@ -83,12 +83,15 @@ public void monitorZookeeper() throws IOException, SolrServerException, Interrup NamedList nl = solr.httpUriRequest(mntrReq).future.get(10000, TimeUnit.MILLISECONDS); assertEquals("zkStatus", nl.getName(1)); + @SuppressWarnings({"unchecked"}) Map zkStatus = (Map) nl.get("zkStatus"); assertEquals("green", zkStatus.get("status")); assertEquals("standalone", zkStatus.get("mode")); assertEquals(1L, zkStatus.get("ensembleSize")); + @SuppressWarnings({"unchecked"}) List detailsList = (List)zkStatus.get("details"); assertEquals(1, detailsList.size()); + @SuppressWarnings({"unchecked"}) Map details = (Map) detailsList.get(0); assertEquals(true, details.get("ok")); assertTrue(Integer.parseInt((String) details.get("zk_znode_count")) > 50); @@ -157,12 +160,55 @@ public void testEnsembleStatusMock() { @Test(expected = SolrException.class) public void validateNotWhitelisted() { - new ZookeeperStatusHandler(null).validateZkRawResponse(Collections.singletonList("mntr is not executed because it is not in the whitelist."), - "zoo1:2181", "mntr"); + try (ZookeeperStatusHandler zsh = new ZookeeperStatusHandler(null)) { + zsh.validateZkRawResponse(Collections.singletonList("mntr is not executed because it is not in the whitelist."), + "zoo1:2181", "mntr"); + } catch (IOException e) { + fail("Error closing ZookeeperStatusHandler"); + } } @Test(expected = SolrException.class) public void validateEmptyResponse() { - new ZookeeperStatusHandler(null).validateZkRawResponse(Collections.emptyList(), "zoo1:2181", "mntr"); + try (ZookeeperStatusHandler zsh = new ZookeeperStatusHandler(null)) { + zsh.validateZkRawResponse(Collections.emptyList(), "zoo1:2181", "mntr"); + } catch (IOException e) { + fail("Error closing ZookeeperStatusHandler"); + } + } + + @Test + public void testMntrBugZk36Solr14463() { + assumeWorkingMockito(); + ZookeeperStatusHandler zkStatusHandler = mock(ZookeeperStatusHandler.class); + when(zkStatusHandler.getZkRawResponse("zoo1:2181", "ruok")).thenReturn(Arrays.asList("imok")); + when(zkStatusHandler.getZkRawResponse("zoo1:2181", "mntr")).thenReturn( + Arrays.asList("zk_version\t3.5.5-390fe37ea45dee01bf87dc1c042b5e3dcce88653, built on 05/03/2019 12:07 GMT", + "zk_avg_latency\t1", + "zk_server_state\tleader", + "zk_synced_followers\t2")); + when(zkStatusHandler.getZkRawResponse("zoo1:2181", "conf")).thenReturn( + Arrays.asList("clientPort=2181")); + when(zkStatusHandler.getZkStatus(anyString(), any())).thenCallRealMethod(); + when(zkStatusHandler.monitorZookeeper(anyString())).thenCallRealMethod(); + when(zkStatusHandler.validateZkRawResponse(ArgumentMatchers.any(), any(), any())).thenAnswer(Answers.CALLS_REAL_METHODS); + + Map mockStatus = zkStatusHandler.getZkStatus("zoo1:2181", ZkDynamicConfig.fromZkConnectString("zoo1:2181")); + String expected = "{\n" + + " \"mode\":\"ensemble\",\n" + + " \"dynamicReconfig\":true,\n" + + " \"ensembleSize\":1,\n" + + " \"details\":[{\n" + + " \"zk_synced_followers\":\"2\",\n" + + " \"zk_version\":\"3.5.5-390fe37ea45dee01bf87dc1c042b5e3dcce88653, built on 05/03/2019 12:07 GMT\",\n" + + " \"zk_avg_latency\":\"1\",\n" + + " \"host\":\"zoo1:2181\",\n" + + " \"clientPort\":\"2181\",\n" + + " \"ok\":true,\n" + + " \"zk_server_state\":\"leader\"}],\n" + + " \"zkHost\":\"zoo1:2181\",\n" + + " \"errors\":[\"Leader reports 2 followers, but we only found 0. Please check zkHost configuration\"],\n" + + " \"status\":\"red\"}"; + assertEquals(expected, JSONUtil.toJSON(mockStatus)); } } \ No newline at end of file diff --git a/solr/core/src/test/org/apache/solr/handler/component/CustomHighlightComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/CustomHighlightComponentTest.java index 0dd9ee5dc293..84dd45accbd6 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/CustomHighlightComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/CustomHighlightComponentTest.java @@ -49,9 +49,12 @@ protected String highlightingResponseField() { } @Override - protected Object convertHighlights(NamedList hl) { + @SuppressWarnings({"unchecked"}) + protected Object convertHighlights(@SuppressWarnings({"rawtypes"})NamedList hl) { + @SuppressWarnings({"rawtypes"}) final ArrayList hlMaps = new ArrayList<>(); for (int i=0; i(); hlMap.add(id_key, hl.getName(i)); hlMap.add(snippets_key, hl.getVal(i)); @@ -61,15 +64,18 @@ protected Object convertHighlights(NamedList hl) { } @Override + @SuppressWarnings({"rawtypes"}) protected Object[] newHighlightsArray(int size) { return new SimpleOrderedMap[size]; } @Override protected void addHighlights(Object[] objArr, Object obj, Map resultIds) { + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap[] mapArr = (SimpleOrderedMap[])objArr; + @SuppressWarnings({"unchecked", "rawtypes"}) final ArrayList hlMaps = (ArrayList)obj; - for (SimpleOrderedMap hlMap : hlMaps) { + for (@SuppressWarnings({"rawtypes"})SimpleOrderedMap hlMap : hlMaps) { String id = (String)hlMap.get(id_key); ShardDoc sdoc = resultIds.get(id); int idx = sdoc.positionInResponse; @@ -79,12 +85,14 @@ protected void addHighlights(Object[] objArr, Object obj, Map @Override protected Object getAllHighlights(Object[] objArr) { + @SuppressWarnings({"rawtypes"}) final SimpleOrderedMap[] mapArr = (SimpleOrderedMap[])objArr; // remove nulls in case not all docs were able to be retrieved + @SuppressWarnings({"rawtypes"}) ArrayList mapList = new ArrayList<>(); - for (SimpleOrderedMap map : mapArr) { + for (@SuppressWarnings({"rawtypes"})SimpleOrderedMap map : mapArr) { if (map != null) { - mapList.add((SimpleOrderedMap)map); + mapList.add(map); } } return mapList; @@ -199,6 +207,7 @@ public void test() throws Exception { // analyse the response final Map>> highlighting = queryResponse.getHighlighting(); + @SuppressWarnings({"unchecked"}) final ArrayList> custom_highlighting = (ArrayList>)queryResponse.getResponse().get("custom_highlighting"); @@ -289,13 +298,15 @@ protected void checkHighlightingResponseList(ArrayList> } // snippets element { + @SuppressWarnings({"unchecked"}) SimpleOrderedMap snippets = (SimpleOrderedMap)highlightingListElementMap.get(snippets_key); if (highlightedField == null) { assertEquals(0, snippets.size()); } else { + @SuppressWarnings({"unchecked"}) ArrayList docHighlights = (ArrayList)(snippets).get(highlightedField); assertEquals(1, docHighlights.size()); - actualHighlightText = (String)docHighlights.get(0); + actualHighlightText = docHighlights.get(0); assertEquals(expectedHighlightText, actualHighlightText); } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/CustomTermsComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/CustomTermsComponentTest.java index f648535f9ae9..213e22480d59 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/CustomTermsComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/CustomTermsComponentTest.java @@ -36,7 +36,7 @@ public class CustomTermsComponentTest extends ShardsWhitelistTest { public static class CustomTermsComponent extends TermsComponent { - public void init( NamedList args ) + public void init( @SuppressWarnings({"rawtypes"})NamedList args ) { super.init(args); } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java index e3958250af3a..cde6f0497d2f 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java @@ -208,7 +208,7 @@ public void testRandom() throws Exception { debug.add("true"); all = true; } - q.set("debug", (String[])debug.toArray(new String[debug.size()])); + q.set("debug", debug.toArray(new String[debug.size()])); QueryResponse r = client.query(q); try { @@ -404,8 +404,8 @@ public void testTolerantSearch() throws SolrServerException, IOException { QueryResponse response = collection1.query(query); assertTrue((Boolean)response.getResponseHeader().get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); @SuppressWarnings("unchecked") - NamedList badShardTrack = (NamedList) ((NamedList>) - ((NamedList>>)response.getDebugMap().get("track")).get("EXECUTE_QUERY")).get(badShard); + NamedList badShardTrack = + (((NamedList>>)response.getDebugMap().get("track")).get("EXECUTE_QUERY")).get(badShard); assertEquals("Unexpected response size for shard", 1, badShardTrack.size()); Entry exception = badShardTrack.iterator().next(); assertEquals("Expected key 'Exception' not found", "Exception", exception.getKey()); @@ -422,7 +422,7 @@ private void assertSectionEquals(QueryResponse distrib, QueryResponse nonDistrib @SuppressWarnings({"unchecked", "rawtypes"}) private void assertSameKeys(NamedList object, NamedList object2) { - Iterator> iteratorObj2 = ((NamedList)object2).iterator(); + Iterator> iteratorObj2 = (object2).iterator(); for (Map.Entry entry:(NamedList)object) { assertTrue(iteratorObj2.hasNext()); Map.Entry entry2 = iteratorObj2.next(); diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java index 7938e23897e6..8276f943b0c2 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java @@ -863,6 +863,7 @@ private void doTestDeepPivotStats() throws Exception { /** * spot checks some pivot values and the ranges hanging on them */ + @SuppressWarnings({"unchecked"}) private void doTestPivotRanges() throws Exception { // note: 'p0' is only a top level range, not included in per-pivot ranges @@ -972,7 +973,7 @@ private void assertPivot(String field, Object value, int count, // int numKids, * asserts that the actual RangeFacet matches the expected criteria */ private void assertRange(String name, Object start, Object gap, Object end, int numCount, - RangeFacet actual) { + @SuppressWarnings({"rawtypes"})RangeFacet actual) { assertEquals("NAME: " + actual.toString(), name, actual.getName()); assertEquals("START: " + actual.toString(), start, actual.getStart()); assertEquals("GAP: " + actual.toString(), gap, actual.getGap()); diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLongTailTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLongTailTest.java index 996ae2e429f1..37d99892e4ec 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLongTailTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLongTailTest.java @@ -67,6 +67,7 @@ public void test() throws Exception { doTestDeepPivotStats(); } + @SuppressWarnings({"rawtypes"}) private void sanityCheckIndividualShards() throws Exception { assertEquals("This test assumes exactly 3 shards/clients", 3, clients.size()); @@ -81,6 +82,7 @@ private void sanityCheckIndividualShards() throws Exception { PivotField pivot = null; List pivots = null; + @SuppressWarnings({"unchecked", "rawtypes"}) List[] shardPivots = new List[clients.size()]; for (int i = 0; i < clients.size(); i++) { shardPivots[i] = clients.get(i).query( req ).getFacetPivot().get("foo_s,bar_s"); diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java index 71e34e3bbc27..77ef0bc531fb 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java @@ -1523,6 +1523,7 @@ private void assertThereCanBeOnlyOne(PivotField pf, FieldStatsInfo stats, Object assertEquals(msg + " stats max", val, stats.getMax()); } + @SuppressWarnings({"rawtypes"}) private List createExpectedRange(String key, int start, int end, int gap, int... values) { List expectedRanges = new ArrayList<>(); @@ -1541,7 +1542,8 @@ private List createExpectedRange(String key, int start, int end, public static class ComparablePivotField extends PivotField { public ComparablePivotField(String f, Object v, int count, - List pivot, Map queryCounts, List ranges) { + List pivot, Map queryCounts, + @SuppressWarnings({"rawtypes"})List ranges) { super(f, v, count, pivot, null, queryCounts, ranges); } @@ -1570,9 +1572,9 @@ public boolean equals(Object obj) { if (other.getFacetRanges() != null) return false; } else { if (getFacetRanges().size() != other.getFacetRanges().size()) return false; - for (RangeFacet entry : getFacetRanges()) { + for (@SuppressWarnings({"rawtypes"})RangeFacet entry : getFacetRanges()) { boolean found = false; - for (RangeFacet otherRange : other.getFacetRanges()) { + for (@SuppressWarnings({"rawtypes"})RangeFacet otherRange : other.getFacetRanges()) { if (otherRange.getName().equals(entry.getName())) { found = true; @@ -1580,7 +1582,9 @@ public boolean equals(Object obj) { if (!entry.getStart().equals(otherRange.getStart())) return false; if (!entry.getEnd().equals(otherRange.getEnd())) return false; + @SuppressWarnings({"unchecked"}) List myCounts = entry.getCounts(); + @SuppressWarnings({"unchecked"}) List otherRangeCounts = otherRange.getCounts(); if ( (myCounts == null && otherRangeCounts != null) || (myCounts != null && otherRangeCounts == null) @@ -1606,10 +1610,20 @@ public boolean equals(Object obj) { } return true; } + + @Override + public int hashCode() { + throw new UnsupportedOperationException("Calling hashCode in ComparablePivotField"); + } } public static class UnorderedEqualityArrayList extends ArrayList { - + + @Override + public int hashCode() { + throw new UnsupportedOperationException("Calling hashCode in UnorderedEqualityArrayList"); + } + @Override public boolean equals(Object o) { boolean equal = false; @@ -1659,9 +1673,9 @@ public int compare(PivotField o1, PivotField o2) { } } if (compare == 0) { - for (RangeFacet entry : o1.getFacetRanges()) { + for (@SuppressWarnings({"rawtypes"})RangeFacet entry : o1.getFacetRanges()) { boolean found = false; - for (RangeFacet otherRangeFacet : o2.getFacetRanges()) { + for (@SuppressWarnings({"rawtypes"})RangeFacet otherRangeFacet : o2.getFacetRanges()) { if (otherRangeFacet.getName().equals(entry.getName())) { found = true; } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java index d8cdd2a1ed0f..4e91e0d4e89b 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java @@ -75,7 +75,7 @@ public static void afterClass() { @Test @ShardsFixed(num = 3) - // commented 4-Sep-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018 + @SuppressWarnings({"unchecked"}) public void test() throws Exception { del("*:*"); index(id, "1", "lowerfilt", "toyota", "lowerfilt1", "x"); diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java index 6818676706dd..8572ae4ae0c4 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java @@ -263,6 +263,7 @@ private QueryResponse queryWithAsserts(String... q) throws Exception { if (distribSinglePass) { Map debugMap = response.getDebugMap(); + @SuppressWarnings({"unchecked"}) SimpleOrderedMap track = (SimpleOrderedMap) debugMap.get("track"); assertNotNull(track); assertNotNull(track.get("EXECUTE_QUERY")); diff --git a/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java b/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java index b1b02e12fc9b..5d315a550a2c 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java @@ -101,6 +101,7 @@ public void testReloadDuringBuild() throws Exception { ExecutorService executor = ExecutorUtil.newMDCAwareCachedThreadPool("InfixSuggesterTest"); try { // Build the suggester in the background with a long dictionary + @SuppressWarnings({"rawtypes"}) Future job = executor.submit(() -> expectThrows(RuntimeException.class, SolrCoreState.CoreIsClosedException.class, () -> assertQ(req("qt", rh_analyzing_long, @@ -125,6 +126,7 @@ public void testShutdownDuringBuild() throws Exception { (SolrCoreState.CoreIsClosedException.class, SolrException.class, IllegalStateException.class, NullPointerException.class)); final Throwable[] outerException = new Throwable[1]; // Build the suggester in the background with a long dictionary + @SuppressWarnings({"rawtypes"}) Future job = executor.submit(() -> outerException[0] = expectThrowsAnyOf(expected, () -> assertQ(req("qt", rh_analyzing_long, SuggesterParams.SUGGEST_BUILD_ALL, "true"), "//str[@name='command'][.='buildAll']"))); diff --git a/solr/core/src/test/org/apache/solr/handler/component/PhrasesIdentificationComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/PhrasesIdentificationComponentTest.java index b54b7a9bb4a7..9acf82745891 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/PhrasesIdentificationComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/PhrasesIdentificationComponentTest.java @@ -381,6 +381,7 @@ public void testWhiteboxStats() throws Exception { } + @SuppressWarnings({"unchecked"}) public void testWhiteboxScores() throws Exception { final SchemaField analysisField = h.getCore().getLatestSchema().getField("multigrams_body"); assertNotNull(analysisField); @@ -452,6 +453,7 @@ public void testWhiteboxScores() throws Exception { } + @SuppressWarnings({"unchecked"}) public void testWhiteboxScorcesStopwords() throws Exception { final String input = "why the lazy dog brown fox"; final Map fieldWeights = new TreeMap<>(); @@ -771,6 +773,7 @@ public void assertEmptyStream(final String msg, final Stream st } /** helper, docs for future junit/hamcrest seems to have something similar */ + @SuppressWarnings({"rawtypes"}) public static Matcher lessThan(double expected) { return new BaseMatcher() { @Override public boolean matches(Object actual) { @@ -782,6 +785,7 @@ public static Matcher lessThan(double expected) { }; } /** helper, docs for future junit/hamcrest seems to have something similar */ + @SuppressWarnings({"rawtypes"}) public static Matcher greaterThan(double expected) { return new BaseMatcher() { @Override public boolean matches(Object actual) { diff --git a/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java index ad0816b61930..3f7a34075803 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java @@ -384,46 +384,50 @@ public void testInterface() throws Exception { args.add(QueryElevationComponent.FIELD_TYPE, "string"); args.add(QueryElevationComponent.CONFIG_FILE, "elevate.xml"); - QueryElevationComponent comp = new QueryElevationComponent(); - comp.init(args); - comp.inform(core); + IndexReader reader; + try (SolrQueryRequest req = req()) { + reader = req.getSearcher().getIndexReader(); + } - SolrQueryRequest req = req(); - IndexReader reader = req.getSearcher().getIndexReader(); - QueryElevationComponent.ElevationProvider elevationProvider = comp.getElevationProvider(reader, core); - req.close(); + try (QueryElevationComponent comp = new QueryElevationComponent()) { + comp.init(args); + comp.inform(core); - // Make sure the boosts loaded properly - assertEquals(11, elevationProvider.size()); - assertEquals(1, elevationProvider.getElevationForQuery("XXXX").elevatedIds.size()); - assertEquals(2, elevationProvider.getElevationForQuery("YYYY").elevatedIds.size()); - assertEquals(3, elevationProvider.getElevationForQuery("ZZZZ").elevatedIds.size()); - assertNull(elevationProvider.getElevationForQuery("xxxx")); - assertNull(elevationProvider.getElevationForQuery("yyyy")); - assertNull(elevationProvider.getElevationForQuery("zzzz")); + QueryElevationComponent.ElevationProvider elevationProvider = comp.getElevationProvider(reader, core); + + // Make sure the boosts loaded properly + assertEquals(11, elevationProvider.size()); + assertEquals(1, elevationProvider.getElevationForQuery("XXXX").elevatedIds.size()); + assertEquals(2, elevationProvider.getElevationForQuery("YYYY").elevatedIds.size()); + assertEquals(3, elevationProvider.getElevationForQuery("ZZZZ").elevatedIds.size()); + assertNull(elevationProvider.getElevationForQuery("xxxx")); + assertNull(elevationProvider.getElevationForQuery("yyyy")); + assertNull(elevationProvider.getElevationForQuery("zzzz")); + } // Now test the same thing with a lowercase filter: 'lowerfilt' args = new NamedList<>(); args.add(QueryElevationComponent.FIELD_TYPE, "lowerfilt"); args.add(QueryElevationComponent.CONFIG_FILE, "elevate.xml"); - comp = new QueryElevationComponent(); - comp.init(args); - comp.inform(core); - elevationProvider = comp.getElevationProvider(reader, core); - assertEquals(11, elevationProvider.size()); - assertEquals(1, elevationProvider.getElevationForQuery("XXXX").elevatedIds.size()); - assertEquals(2, elevationProvider.getElevationForQuery("YYYY").elevatedIds.size()); - assertEquals(3, elevationProvider.getElevationForQuery("ZZZZ").elevatedIds.size()); - assertEquals(1, elevationProvider.getElevationForQuery("xxxx").elevatedIds.size()); - assertEquals(2, elevationProvider.getElevationForQuery("yyyy").elevatedIds.size()); - assertEquals(3, elevationProvider.getElevationForQuery("zzzz").elevatedIds.size()); - - assertEquals("xxxx", comp.analyzeQuery("XXXX")); - assertEquals("xxxxyyyy", comp.analyzeQuery("XXXX YYYY")); - - assertQ("Make sure QEC handles null queries", req("qt", "/elevate", "q.alt", "*:*", "defType", "dismax"), - "//*[@numFound='0']"); + try (QueryElevationComponent comp = new QueryElevationComponent()) { + comp.init(args); + comp.inform(core); + QueryElevationComponent.ElevationProvider elevationProvider = comp.getElevationProvider(reader, core); + assertEquals(11, elevationProvider.size()); + assertEquals(1, elevationProvider.getElevationForQuery("XXXX").elevatedIds.size()); + assertEquals(2, elevationProvider.getElevationForQuery("YYYY").elevatedIds.size()); + assertEquals(3, elevationProvider.getElevationForQuery("ZZZZ").elevatedIds.size()); + assertEquals(1, elevationProvider.getElevationForQuery("xxxx").elevatedIds.size()); + assertEquals(2, elevationProvider.getElevationForQuery("yyyy").elevatedIds.size()); + assertEquals(3, elevationProvider.getElevationForQuery("zzzz").elevatedIds.size()); + + assertEquals("xxxx", comp.analyzeQuery("XXXX")); + assertEquals("xxxxyyyy", comp.analyzeQuery("XXXX YYYY")); + + assertQ("Make sure QEC handles null queries", req("qt", "/elevate", "q.alt", "*:*", "defType", "dismax"), + "//*[@numFound='0']"); + } } finally { delete(); } @@ -702,7 +706,9 @@ private void writeElevationConfigFile(File file, String query, String... ids) th out.flush(); out.close(); - log.info("OUT:" + file.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("OUT: {}", file.getAbsolutePath()); + } } @Test @@ -901,7 +907,7 @@ public void testQuerySubsetMatching() throws Exception { @Test public void testElevatedIds() throws Exception { - try { + try (QueryElevationComponent comp = new QueryElevationComponent()) { init("schema12.xml"); SolrCore core = h.getCore(); @@ -909,7 +915,6 @@ public void testElevatedIds() throws Exception { args.add(QueryElevationComponent.FIELD_TYPE, "text"); args.add(QueryElevationComponent.CONFIG_FILE, "elevate.xml"); - QueryElevationComponent comp = new QueryElevationComponent(); comp.init(args); comp.inform(core); diff --git a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java b/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java index d268a4e424da..c36a8ad714e0 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java +++ b/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java @@ -119,7 +119,9 @@ public TestObject decode(InputStream inputStream) { assertEquals("bar", dict.get("foo")); assertEquals("bam", dict.get("baz")); - log.info("Loaded {} using {}", getDict().size(), this.getClass().getClassLoader()); + if (log.isInfoEnabled()) { + log.info("Loaded {} using {}", getDict().size(), this.getClass().getClassLoader()); + } // if we get here we have seen the data from the blob and all we need is to test that two collections // are able to see the same object.. diff --git a/solr/core/src/test/org/apache/solr/handler/component/SearchHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/component/SearchHandlerTest.java index c8289bda5f0c..f0b297329043 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/SearchHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/SearchHandlerTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.handler.component; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; @@ -45,8 +46,6 @@ public static void beforeTests() throws Exception { initCore("solrconfig.xml","schema.xml"); } - - @SuppressWarnings("unchecked") @Test public void testInitialization() { @@ -57,15 +56,18 @@ public void testInitialization() List names0 = new ArrayList<>(); names0.add( MoreLikeThisComponent.COMPONENT_NAME ); - NamedList args = new NamedList(); + NamedList> args = new NamedList<>(); args.add( SearchHandler.INIT_COMPONENTS, names0 ); - SearchHandler handler = new SearchHandler(); - handler.init( args ); - handler.inform( core ); - - assertEquals( 1, handler.getComponents().size() ); - assertEquals( core.getSearchComponent( MoreLikeThisComponent.COMPONENT_NAME ), - handler.getComponents().get( 0 ) ); + try (SearchHandler handler = new SearchHandler()) { + handler.init(args); + handler.inform(core); + + assertEquals(1, handler.getComponents().size()); + assertEquals(core.getSearchComponent(MoreLikeThisComponent.COMPONENT_NAME), + handler.getComponents().get(0)); + } catch (IOException e) { + fail("IOExcepiton closing SearchHandler"); + } // Build an explicit list that includes the debug comp. //----------------------------------------------- @@ -74,19 +76,22 @@ public void testInitialization() names0.add( DebugComponent.COMPONENT_NAME ); names0.add( MoreLikeThisComponent.COMPONENT_NAME ); - args = new NamedList(); + args = new NamedList<>(); args.add( SearchHandler.INIT_COMPONENTS, names0 ); - handler = new SearchHandler(); - handler.init( args ); - handler.inform( core ); - - assertEquals( 3, handler.getComponents().size() ); - assertEquals( core.getSearchComponent( FacetComponent.COMPONENT_NAME ), - handler.getComponents().get( 0 ) ); - assertEquals( core.getSearchComponent( DebugComponent.COMPONENT_NAME ), - handler.getComponents().get( 1 ) ); - assertEquals( core.getSearchComponent( MoreLikeThisComponent.COMPONENT_NAME ), - handler.getComponents().get( 2 ) ); + try (SearchHandler handler = new SearchHandler()) { + handler.init(args); + handler.inform(core); + + assertEquals(3, handler.getComponents().size()); + assertEquals(core.getSearchComponent(FacetComponent.COMPONENT_NAME), + handler.getComponents().get(0)); + assertEquals(core.getSearchComponent(DebugComponent.COMPONENT_NAME), + handler.getComponents().get(1)); + assertEquals(core.getSearchComponent(MoreLikeThisComponent.COMPONENT_NAME), + handler.getComponents().get(2)); + } catch (IOException e) { + fail("Exception when closing SearchHandler"); + } // First/Last list @@ -97,19 +102,22 @@ public void testInitialization() List names1 = new ArrayList<>(); names1.add( FacetComponent.COMPONENT_NAME ); - args = new NamedList(); + args = new NamedList<>(); args.add( SearchHandler.INIT_FIRST_COMPONENTS, names0 ); args.add( SearchHandler.INIT_LAST_COMPONENTS, names1 ); - handler = new SearchHandler(); - handler.init( args ); - handler.inform( core ); - - List comps = handler.getComponents(); - assertEquals( 2+handler.getDefaultComponents().size(), comps.size() ); - assertEquals( core.getSearchComponent( MoreLikeThisComponent.COMPONENT_NAME ), comps.get( 0 ) ); - assertEquals( core.getSearchComponent( FacetComponent.COMPONENT_NAME ), comps.get( comps.size()-2 ) ); - //Debug component is always last in this case - assertEquals( core.getSearchComponent( DebugComponent.COMPONENT_NAME ), comps.get( comps.size()-1 ) ); + try (SearchHandler handler = new SearchHandler()) { + handler.init(args); + handler.inform(core); + + List comps = handler.getComponents(); + assertEquals(2 + handler.getDefaultComponents().size(), comps.size()); + assertEquals(core.getSearchComponent(MoreLikeThisComponent.COMPONENT_NAME), comps.get(0)); + assertEquals(core.getSearchComponent(FacetComponent.COMPONENT_NAME), comps.get(comps.size() - 2)); + //Debug component is always last in this case + assertEquals(core.getSearchComponent(DebugComponent.COMPONENT_NAME), comps.get(comps.size() - 1)); + } catch (IOException e) { + fail("Exception when closing SearchHandler"); + } } @Test diff --git a/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java index 3d18b9a8ad58..30457e4617bf 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java @@ -245,6 +245,7 @@ public void testRelativeIndexDirLocation() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testReloadOnStart() throws Exception { assertU(adoc("id", "0", "lowerfilt", "This is a title")); assertU(commit()); @@ -253,7 +254,9 @@ public void testReloadOnStart() throws Exception { "default", "spellcheck.build", "true"); assertQ(request, "//arr[@name='suggestion'][.='title']"); + @SuppressWarnings({"rawtypes"}) NamedList args = new NamedList(); + @SuppressWarnings({"rawtypes"}) NamedList spellchecker = new NamedList(); spellchecker.add(SolrSpellChecker.DICTIONARY_NAME, "default"); spellchecker.add(AbstractLuceneSpellChecker.FIELD, "lowerfilt"); @@ -282,6 +285,7 @@ public void testReloadOnStart() throws Exception { } rb.req.close(); + checker.close(); } @SuppressWarnings("unchecked") @@ -298,6 +302,7 @@ public void testRebuildOnCommit() throws Exception { } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testThresholdTokenFrequency() throws Exception { //"document" is in 2 documents but "another" is only in 1. @@ -345,7 +350,7 @@ public void testThresholdTokenFrequency() throws Exception { req.close(); values = rsp.getValues(); spellCheck = (NamedList) values.get("spellcheck"); - suggestions = (NamedList) spellCheck.get("suggestions"); + suggestions = (NamedList) spellCheck.get("suggestions"); assertTrue(suggestions.get("suggestion")==null); assertTrue((Boolean) spellCheck.get("correctlySpelled")==false); } diff --git a/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java index 2a445d4c641e..b1636d1cc042 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java @@ -405,8 +405,6 @@ public void testFieldStatisticsResultsStringField() throws Exception { args.put(StatsParams.STATS_FIELD, f); args.put("f." + f +".stats.calcdistinct","true"); args.put("indent", "true"); - SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); - for (SolrParams baseParams : new SolrParams[] { params("stats.field", f, "stats", "true", "f." + f +".stats.calcdistinct","true"), @@ -1281,13 +1279,15 @@ public void testEnumFieldTypeStatus() throws Exception { ); } - private Doc createDocValuesDocument(List types, String fieldName, String id, Comparable... values) throws Exception { + private Doc createDocValuesDocument(List types, String fieldName, String id, + @SuppressWarnings({"rawtypes"})Comparable... values) throws Exception { Doc doc = createDoc(types); doc.getValues("id").set(0, id); initMultyValued(doc.getValues(fieldName), values); return doc; } + @SuppressWarnings({"rawtypes"}) private List initMultyValued(List cat_docValues, Comparable... comparables) { Collections.addAll(cat_docValues, comparables); return cat_docValues; @@ -1951,6 +1951,7 @@ public void testPercentiles() throws Exception { } + @SuppressWarnings({"unchecked"}) private NamedList extractPercentils(SolrQueryResponse rsp, String key) { return ((NamedList>>> ) rsp.getValues().get("stats")).get("stats_fields").get(key).get("percentiles"); diff --git a/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java index d5567aca375b..660498ae9001 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.handler.component; +import java.io.IOException; import java.util.Arrays; import java.util.regex.Pattern; @@ -154,10 +155,14 @@ public void testRegexpFlagParsing() { ModifiableSolrParams params = new ModifiableSolrParams(); params.add(TermsParams.TERMS_REGEXP_FLAG, "case_insensitive", "literal", "comments", "multiline", "unix_lines", "unicode_case", "dotall", "canon_eq"); - int flags = new TermsComponent().resolveRegexpFlags(params); - int expected = Pattern.CASE_INSENSITIVE | Pattern.LITERAL | Pattern.COMMENTS | Pattern.MULTILINE | Pattern.UNIX_LINES - | Pattern.UNICODE_CASE | Pattern.DOTALL | Pattern.CANON_EQ; - assertEquals(expected, flags); + try (TermsComponent termsComponent = new TermsComponent()) { + int flags = termsComponent.resolveRegexpFlags(params); + int expected = Pattern.CASE_INSENSITIVE | Pattern.LITERAL | Pattern.COMMENTS | Pattern.MULTILINE | Pattern.UNIX_LINES + | Pattern.UNICODE_CASE | Pattern.DOTALL | Pattern.CANON_EQ; + assertEquals(expected, flags); + } catch (IOException e) { + fail("Error closing TermsComponent"); + } } @Test diff --git a/solr/core/src/test/org/apache/solr/handler/component/TestHttpShardHandlerFactory.java b/solr/core/src/test/org/apache/solr/handler/component/TestHttpShardHandlerFactory.java index f486dbd0e951..32c665eed731 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TestHttpShardHandlerFactory.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TestHttpShardHandlerFactory.java @@ -79,6 +79,7 @@ public void testLoadBalancerRequestsMinMax() throws Exception { // test that factory is HttpShardHandlerFactory with expected url reserve fraction assertTrue(factory instanceof HttpShardHandlerFactory); + @SuppressWarnings("resource") final HttpShardHandlerFactory httpShardHandlerFactory = ((HttpShardHandlerFactory)factory); assertEquals(expectedLoadBalancerRequestsMinimumAbsolute, httpShardHandlerFactory.permittedLoadBalancerRequestsMinimumAbsolute, 0.0); assertEquals(expectedLoadBalancerRequestsMaximumFraction, httpShardHandlerFactory.permittedLoadBalancerRequestsMaximumFraction, 0.0); @@ -122,6 +123,7 @@ public void getShardsWhitelist() throws Exception { cc = CoreContainer.createAndLoad(home, home.resolve("solr.xml")); factory = cc.getShardHandlerFactory(); assertTrue(factory instanceof HttpShardHandlerFactory); + @SuppressWarnings("resource") final HttpShardHandlerFactory httpShardHandlerFactory = ((HttpShardHandlerFactory)factory); assertThat(httpShardHandlerFactory.getWhitelistHostChecker().getWhitelistHosts().size(), is(2)); assertThat(httpShardHandlerFactory.getWhitelistHostChecker().getWhitelistHosts(), hasItem("abc:8983")); @@ -140,7 +142,7 @@ public void testLiveNodesToHostUrl() throws Exception { "1.2.3.4:9000_", "1.2.3.4:9001_solr-2", })); - ClusterState cs = new ClusterState(0, liveNodes, new HashMap<>()); + ClusterState cs = new ClusterState(liveNodes, new HashMap<>()); WhitelistHostChecker checker = new WhitelistHostChecker(null, true); Set hostSet = checker.generateWhitelistFromLiveNodes(cs); assertThat(hostSet.size(), is(3)); diff --git a/solr/core/src/test/org/apache/solr/handler/component/TestTrackingShardHandlerFactory.java b/solr/core/src/test/org/apache/solr/handler/component/TestTrackingShardHandlerFactory.java index 6a87bd768234..613696910fa9 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TestTrackingShardHandlerFactory.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TestTrackingShardHandlerFactory.java @@ -61,6 +61,7 @@ public void testRequestTracking() throws Exception { CoreContainer container = runner.getCoreContainer(); ShardHandlerFactory factory = container.getShardHandlerFactory(); assert factory instanceof TrackingShardHandlerFactory; + @SuppressWarnings("resource") TrackingShardHandlerFactory trackingShardHandlerFactory = (TrackingShardHandlerFactory) factory; assertSame(trackingQueue, trackingShardHandlerFactory.getTrackingQueue()); } @@ -116,6 +117,7 @@ public void testRequestTracking() throws Exception { CoreContainer container = runner.getCoreContainer(); ShardHandlerFactory factory = container.getShardHandlerFactory(); assert factory instanceof TrackingShardHandlerFactory; + @SuppressWarnings("resource") TrackingShardHandlerFactory trackingShardHandlerFactory = (TrackingShardHandlerFactory) factory; assertFalse(trackingShardHandlerFactory.isTracking()); } diff --git a/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java b/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java index 4bd21fe36b2b..2d51647136c3 100644 --- a/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java +++ b/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java @@ -42,6 +42,8 @@ import org.junit.Test; public class TestExportWriter extends SolrTestCaseJ4 { + + private ObjectMapper mapper = new ObjectMapper(); @BeforeClass public static void beforeClass() throws Exception { @@ -706,6 +708,58 @@ public void testMultipleSorts() throws Exception { validateSort(numDocs); } + private void createLargeIndex() throws Exception { + int BATCH_SIZE = 1000; + int NUM_BATCHES = 100; + SolrInputDocument[] docs = new SolrInputDocument[BATCH_SIZE]; + for (int i = 0; i < NUM_BATCHES; i++) { + for (int j = 0; j < BATCH_SIZE; j++) { + docs[j] = new SolrInputDocument( + "id", String.valueOf(i * BATCH_SIZE + j), + "batch_i_p", String.valueOf(i), + "random_i_p", String.valueOf(random().nextInt(BATCH_SIZE)), + "sortabledv", TestUtil.randomSimpleString(random(), 2, 3), + "sortabledv_udvas", String.valueOf(random().nextInt(100)), + "small_i_p", String.valueOf((i + j) % 7) + ); + } + updateJ(jsonAdd(docs), null); + } + assertU(commit()); + } + + @Test + public void testExpr() throws Exception { + assertU(delQ("*:*")); + assertU(commit()); + createLargeIndex(); + SolrQueryRequest req = req("q", "*:*", "qt", "/export", "fl", "id", "sort", "id asc", "expr", "top(n=2,input(),sort=\"id desc\")"); + assertJQ(req, + "response/numFound==100000", + "response/docs/[0]/id=='99999'", + "response/docs/[1]/id=='99998'" + ); + req = req("q", "*:*", "qt", "/export", "fl", "id,sortabledv_udvas", "sort", "sortabledv_udvas asc", "expr", "unique(input(),over=\"sortabledv_udvas\")"); + String rsp = h.query(req); + @SuppressWarnings({"unchecked"}) + Map rspMap = mapper.readValue(rsp, HashMap.class); + @SuppressWarnings({"unchecked"}) + List> docs = (List>) Utils.getObjectByPath(rspMap, false, "/response/docs"); + assertNotNull("missing document results: " + rspMap, docs); + assertEquals("wrong number of unique docs", 100, docs.size()); + for (int i = 0; i < 99; i++) { + boolean found = false; + String si = String.valueOf(i); + for (int j = 0; j < docs.size(); j++) { + if (docs.get(j).get("sortabledv_udvas").equals(si)) { + found = true; + break; + } + } + assertTrue("missing value " + i + " in results", found); + } + } + private void validateSort(int numDocs) throws Exception { // 10 fields List fieldNames = new ArrayList<>(Arrays.asList("floatdv", "intdv", "stringdv", "longdv", "doubledv", @@ -727,13 +781,16 @@ private void validateSort(int numDocs) throws Exception { String fieldsStr = String.join(",", fieldStrs); // fl : field1, field2 String resp = h.query(req("q", "*:*", "qt", "/export", "fl", "id," + fieldsStr, "sort", sortStr)); - ObjectMapper mapper = new ObjectMapper(); + @SuppressWarnings({"rawtypes"}) HashMap respMap = mapper.readValue(resp, HashMap.class); + @SuppressWarnings({"rawtypes"}) List docs = (ArrayList) ((HashMap) respMap.get("response")).get("docs"); SolrQueryRequest selectReq = req("q", "*:*", "qt", "/select", "fl", "id," + fieldsStr, "sort", sortStr, "rows", Integer.toString(numDocs), "wt", "json"); String response = h.query(selectReq); + @SuppressWarnings({"rawtypes"}) Map rsp = (Map)Utils.fromJSONString(response); + @SuppressWarnings({"rawtypes"}) List doclist = (List)(((Map)rsp.get("response")).get("docs")); assert docs.size() == numDocs; diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTest.java b/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTest.java index 9bc0bccf9d26..71d5af5dffaf 100644 --- a/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTest.java @@ -80,7 +80,7 @@ public void testFormat() throws Exception { " \n" + " \n" + "\n" + - "\n" + + "\n" + " \n" + " 1\n" + " London Business School\n" + @@ -109,7 +109,7 @@ public void testFormatMatchText() throws Exception { " \n" + " \n" + "\n" + - "\n" + + "\n" + " \n" + " 1\n" + " London Business School\n" + @@ -307,7 +307,7 @@ public void testEmptyCollection() throws Exception { "\n" + "0\n" + "\n" + - "\n" + + "\n" + "\n" + "\n"; assertEquals(expected, rspStr); diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java b/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java index a1d3f6735662..492a58bdf1ae 100644 --- a/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java +++ b/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java @@ -62,7 +62,9 @@ public abstract class TaggerTestCase extends SolrTestCaseJ4 { public TestWatcher watchman = new TestWatcher() { @Override protected void starting(Description description) { - log.info("{} being run...", description.getDisplayName()); + if (log.isInfoEnabled()) { + log.info("{} being run...", description.getDisplayName()); + } } }; @@ -138,6 +140,7 @@ protected void assertTags(SolrQueryRequest req, TestTag... eTags) throws Excepti @SuppressWarnings("unchecked") protected TestTag[] pullTagsFromResponse(SolrQueryRequest req, SolrQueryResponse rsp ) throws IOException { + @SuppressWarnings({"rawtypes"}) NamedList rspValues = rsp.getValues(); Map matchingNames = new HashMap<>(); SolrIndexSearcher searcher = req.getSearcher(); @@ -153,9 +156,10 @@ protected TestTag[] pullTagsFromResponse(SolrQueryRequest req, SolrQueryResponse } //build TestTag[] aTags from response ('a' is actual) + @SuppressWarnings({"rawtypes"}) List mTagsList = (List) rspValues.get("tags"); List aTags = new ArrayList<>(); - for (NamedList map : mTagsList) { + for (@SuppressWarnings({"rawtypes"})NamedList map : mTagsList) { List foundIds = (List) map.get("ids"); for (String id : foundIds) { aTags.add(new TestTag( @@ -175,7 +179,7 @@ protected SolrQueryRequest reqDoc(String doc, String... moreParams) { /** REMEMBER to close() the result req object. */ protected SolrQueryRequest reqDoc(String doc, SolrParams moreParams) { - log.debug("Test doc: "+doc); + log.debug("Test doc: {}", doc); SolrParams params = SolrParams.wrapDefaults(moreParams, baseParams); SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), params) {}; Iterable stream = Collections.singleton((ContentStream)new ContentStreamBase.StringStream(doc)); @@ -202,6 +206,7 @@ public void assertSortedArrayEquals(String message, Object[] expecteds, Object[] fail(message+": didn't expect "+actualsRemaining.first()+" (of "+actualsRemaining.size()+"); "+ error); } + @SuppressWarnings({"rawtypes"}) class TestTag implements Comparable { final int startOffset, endOffset; final String substring; diff --git a/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java b/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java index 3836a12b86aa..f380129266b5 100644 --- a/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java +++ b/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java @@ -27,6 +27,7 @@ public class DummyHighlighter extends SolrHighlighter { @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public NamedList doHighlighting(DocList docs, Query query, SolrQueryRequest req, String[] defaultFields) throws IOException { NamedList fragments = new SimpleOrderedMap(); diff --git a/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java b/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java index a77917bbedaa..0deb6a2e8602 100644 --- a/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java +++ b/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java @@ -200,7 +200,7 @@ public void testOffsetWindowTokenFilter() throws Exception { try (Analyzer a1 = new WhitespaceAnalyzer()) { TokenStream tokenStream = a1.tokenStream("", "a b c d e f g h i j k l m n"); - try (OffsetWindowTokenFilter tots = new OffsetWindowTokenFilter(tokenStream)) { + try (DefaultSolrHighlighter.OffsetWindowTokenFilter tots = new DefaultSolrHighlighter.OffsetWindowTokenFilter(tokenStream)) { for (String v : multivalued) { TokenStream ts1 = tots.advanceToNextWindowOfLength(v.length()); ts1.reset(); @@ -917,6 +917,24 @@ public void testGetHighlightFields() { localRequest, new String[] {}))); assertEquals(highlightedSetExpected, highlightedSetActual); } + + // SOLR-11334 + args.put("hl.fl", "title, text"); // comma then space + lrf = h.getRequestFactory("", 0, 10, args); + request = lrf.makeRequest("test"); + highlighter = HighlightComponent.getHighlighter(h.getCore()); + highlightFieldNames = Arrays.asList(highlighter.getHighlightFields(null, + request, new String[] {})); + assertEquals("Expected one field to highlight on", 2, highlightFieldNames + .size()); + assertTrue("Expected to highlight on field \"title\"", + highlightFieldNames.contains("title")); + assertTrue("Expected to highlight on field \"text\"", + highlightFieldNames.contains("text")); + assertFalse("Expected to not highlight on field \"\"", + highlightFieldNames.contains("")); + + request.close(); } @Test diff --git a/solr/core/src/test/org/apache/solr/index/hdfs/CheckHdfsIndexTest.java b/solr/core/src/test/org/apache/solr/index/hdfs/CheckHdfsIndexTest.java index d31bd820be89..de7286d91ab2 100644 --- a/solr/core/src/test/org/apache/solr/index/hdfs/CheckHdfsIndexTest.java +++ b/solr/core/src/test/org/apache/solr/index/hdfs/CheckHdfsIndexTest.java @@ -118,7 +118,9 @@ public void doTest() throws Exception { { SolrClient client = clients.get(0); NamedList response = client.query(new SolrQuery().setRequestHandler("/admin/system")).getResponse(); + @SuppressWarnings({"unchecked"}) NamedList coreInfo = (NamedList) response.get("core"); + @SuppressWarnings({"unchecked"}) String indexDir = ((NamedList) coreInfo.get("directory")).get("data") + "/index"; args = new String[] {indexDir}; diff --git a/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java b/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java index 2caf3e0df3a6..a351ea811265 100644 --- a/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java +++ b/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java @@ -47,6 +47,7 @@ public void before() { // explicitly. See SOLR-12732. @Test public void testLog4jWatcher() throws InterruptedException { + @SuppressWarnings({"rawtypes"}) LogWatcher watcher = null; int lim = random().nextInt(3) + 2; // Every time through this loop, insure that, of all the test messages that have been logged, only the current diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java index 8cf977f5b5ee..1a2c79cfb56f 100644 --- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java @@ -237,6 +237,7 @@ public void testDefaultCloudReporterPeriodUnchanged() throws Exception { assertEquals(60, SolrMetricManager.DEFAULT_CLOUD_REPORTER_PERIOD); } + @SuppressWarnings({"unchecked"}) private PluginInfo createPluginInfo(String name, String group, String registry) { Map attrs = new HashMap<>(); attrs.put("name", name); @@ -247,6 +248,7 @@ private PluginInfo createPluginInfo(String name, String group, String registry) if (registry != null) { attrs.put("registry", registry); } + @SuppressWarnings({"rawtypes"}) NamedList initArgs = new NamedList(); initArgs.add("configurable", "true"); return new PluginInfo("SolrMetricReporter", attrs, initArgs, null); diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java index 21a80fa575ca..f6eb498062b4 100644 --- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java @@ -132,7 +132,7 @@ public void testConfigureReporter() throws Exception { String metricName = SolrMetricManager.mkName(METRIC_NAME, HANDLER_CATEGORY.toString(), HANDLER_NAME); SolrCoreMetricManager coreMetricManager = h.getCore().getCoreMetricManager(); - Timer timer = (Timer) metricManager.timer(null, coreMetricManager.getRegistryName(), metricName); + Timer timer = metricManager.timer(null, coreMetricManager.getRegistryName(), metricName); long initialCount = timer.getCount(); diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java index 10e0179495e2..c5bcde2c4921 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java @@ -73,7 +73,7 @@ public void testReporter() throws Exception { assertTrue(reporter instanceof SolrGraphiteReporter); Thread.sleep(5000); assertTrue(mock.lines.size() >= 3); - String[] frozenLines = (String[])mock.lines.toArray(new String[mock.lines.size()]); + String[] frozenLines = mock.lines.toArray(new String[mock.lines.size()]); for (String line : frozenLines) { assertTrue(line, line.startsWith("test.solr.node.CONTAINER.cores.")); } diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java index 3da4b1446b63..94205b2eb756 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java @@ -99,7 +99,9 @@ public void testJmxReporter() throws Exception { QueryExp exp = Query.eq(Query.attr(JmxMetricsReporter.INSTANCE_TAG), Query.value(Integer.toHexString(v.hashCode()))); Set beans = mBeanServer.queryMBeans(null, exp); if (((SolrJmxReporter) v).isStarted() && beans.isEmpty() && jmxReporters < 2) { - log.info("DocCollection: " + getCollectionState(COLLECTION)); + if (log.isInfoEnabled()) { + log.info("DocCollection: {}", getCollectionState(COLLECTION)); + } fail("JMX reporter " + k + " for registry " + registry + " failed to register any beans!"); } else { Set categories = new HashSet<>(); @@ -109,7 +111,7 @@ public void testJmxReporter() throws Exception { categories.add(cat); } }); - log.info("Registered categories: " + categories); + log.info("Registered categories: {}", categories); assertTrue("Too few categories: " + categories, categories.size() > 5); } }); diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java index e93938abf5de..4387f9507cf3 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java @@ -69,6 +69,7 @@ public void testReporter() throws Exception { assertTrue(reporter2 instanceof SolrSlf4jReporter); LogWatcherConfig watcherCfg = new LogWatcherConfig(true, null, null, 100); + @SuppressWarnings({"rawtypes"}) LogWatcher watcher = LogWatcher.newRegisteredLogWatcher(watcherCfg, null); watcher.setThreshold("INFO"); diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java index c8a6af4d324b..3d8669acf0e3 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java @@ -32,6 +32,8 @@ import org.apache.solr.metrics.AggregateMetric; import org.apache.solr.metrics.SolrCoreMetricManager; import org.apache.solr.metrics.SolrMetricManager; +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,6 +48,16 @@ public SolrShardReporterTest() { schemaString = "schema15.xml"; // we need a string id } + @BeforeClass + public static void shardReporterBeforeClass() { + System.setProperty("solr.allowPaths", "*"); + } + + @AfterClass + public static void shardReporterAfterClass() { + System.clearProperty("solr.allowPaths"); + } + @Override public String getSolrXml() { return "solr-solrreporter.xml"; diff --git a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java index 55f3c1b79cdd..2d63c1832015 100644 --- a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java +++ b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java @@ -19,12 +19,12 @@ import java.io.IOException; import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Collections; -import java.util.Map; +import java.util.*; import java.util.concurrent.Callable; import org.apache.commons.codec.digest.DigestUtils; +import org.apache.lucene.analysis.util.ResourceLoader; +import org.apache.lucene.analysis.util.ResourceLoaderAware; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrRequest; @@ -40,19 +40,27 @@ import org.apache.solr.client.solrj.request.beans.Package; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.util.ClientUtils; -import org.apache.solr.cloud.ConfigRequest; import org.apache.solr.cloud.MiniSolrCloudCluster; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.MapWriterMap; import org.apache.solr.common.NavigableObject; import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.annotation.JsonProperty; import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.ReflectMapWriter; import org.apache.solr.common.util.Utils; +import org.apache.solr.core.SolrCore; import org.apache.solr.filestore.PackageStoreAPI; import org.apache.solr.filestore.TestDistribPackageStore; +import org.apache.solr.handler.RequestHandlerBase; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.response.SolrQueryResponse; +import org.apache.solr.search.QParser; +import org.apache.solr.search.QParserPlugin; import org.apache.solr.util.LogLevel; +import org.apache.solr.util.plugin.SolrCoreAware; import org.apache.zookeeper.data.Stat; import org.junit.After; import org.junit.Before; @@ -80,8 +88,15 @@ public void setup() { public void teardown() { System.clearProperty("enable.packages"); } - + public static class ConfigPlugin implements ReflectMapWriter { + @JsonProperty + public String name; + + @JsonProperty("class") + public String klass; + } @Test + @SuppressWarnings({"unchecked"}) public void testPluginLoading() throws Exception { MiniSolrCloudCluster cluster = configureCluster(4) @@ -98,8 +113,6 @@ public void testPluginLoading() throws Exception { String COLLECTION_NAME = "testPluginLoadingColl"; byte[] derFile = readFile("cryptokeys/pub_key512.der"); uploadKey(derFile, PackageStoreAPI.KEYS_DIR+"/pub_key512.der", cluster); -// cluster.getZkClient().makePath("/keys/exe", true); -// cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true); postFileAndWait(cluster, "runtimecode/runtimelibs.jar.bin", FILE1, "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="); @@ -136,20 +149,39 @@ public void testPluginLoading() throws Exception { ":result:packages:mypkg[0]:version", "1.0", ":result:packages:mypkg[0]:files[0]", FILE1 )); - - String payload = "{\n" + - "'create-requesthandler' : { 'name' : '/runtime', 'class': 'mypkg:org.apache.solr.core.RuntimeLibReqHandler' }," + - "'create-searchcomponent' : { 'name' : 'get', 'class': 'mypkg:org.apache.solr.core.RuntimeLibSearchComponent' }," + - "'create-queryResponseWriter' : { 'name' : 'json1', 'class': 'mypkg:org.apache.solr.core.RuntimeLibResponseWriter' }" + - "'create-updateProcessor' : { 'name' : 'myurp', 'class': 'mypkg:org.apache.solr.update.TestVersionedURP' }," + - " create-expressible: {name: mincopy , class: 'mypkg:org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric'}" + - "}"; - cluster.getSolrClient().request(new ConfigRequest(payload) { - @Override - public String getCollection() { - return COLLECTION_NAME; - } - }); + Map plugins = new LinkedHashMap<>(); + ConfigPlugin p = new ConfigPlugin(); + p.klass = "mypkg:org.apache.solr.core.RuntimeLibReqHandler"; + p.name = "/runtime"; + plugins.put("create-requesthandler", p); + + p = new ConfigPlugin(); + p.klass = "mypkg:org.apache.solr.core.RuntimeLibSearchComponent"; + p.name = "get"; + plugins.put("create-searchcomponent", p); + + p = new ConfigPlugin(); + p.klass = "mypkg:org.apache.solr.core.RuntimeLibResponseWriter"; + p.name = "json1"; + plugins.put("create-queryResponseWriter", p); + + p = new ConfigPlugin(); + p.klass = "mypkg:org.apache.solr.update.TestVersionedURP"; + p.name = "myurp"; + plugins.put("create-updateProcessor", p); + + p = new ConfigPlugin(); + p.klass = "mypkg:org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric"; + p.name = "mincopy"; + plugins.put("create-expressible", p); + + + V2Request v2r = new V2Request.Builder( "/c/"+COLLECTION_NAME+ "/config") + .withMethod(SolrRequest.METHOD.POST) + .withPayload(plugins) + .forceV2(true) + .build(); + cluster.getSolrClient().request(v2r); verifyCmponent(cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", @@ -373,6 +405,27 @@ public RequestWriter.ContentWriter getContentWriter(String expectedType) { verifyCmponent(cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "2.1" ); + + plugins.clear(); + p = new ConfigPlugin(); + p.name = "/rt_2"; + p.klass = "mypkg:"+ C.class.getName(); + plugins.put("create-requesthandler", p); + + p = new ConfigPlugin(); + p.name = "qp1"; + p.klass = "mypkg:"+ C2.class.getName(); + plugins.put("create-queryparser", p); + + v2r = new V2Request.Builder( "/c/"+COLLECTION_NAME+ "/config") + .withMethod(SolrRequest.METHOD.POST) + .withPayload(plugins) + .forceV2(true) + .build(); + cluster.getSolrClient().request(v2r); + assertTrue(C.informCalled); + assertTrue(C2.informCalled); + //we create a new node. This node does not have the packages. But it should download it from another node JettySolrRunner jetty = cluster.startJettySolrRunner(); //create a new replica for this collection. it should end up @@ -390,23 +443,11 @@ public RequestWriter.ContentWriter getContentWriter(String expectedType) { } } - /* new V2Request.Builder("/c/"+COLLECTIONORALIAS+"/config").withMethod(SolrRequest.METHOD.POST) - .withPayload("{add-expressible: {name: mincopy , class: org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric}}") - .build().process(cluster.getSolrClient()); - - ModifiableSolrParams _params = new ModifiableSolrParams(); - QueryRequest query = new QueryRequest(new MapSolrParams("action","plugins", "collection", COLLECTIONORALIAS, "wt", "javabin")); - query.setPath("/stream"); - NamedList rsp = cluster.getSolrClient().request(query); - assertEquals("org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric", rsp._getStr("/plugins/mincopy", null)); - _params = new ModifiableSolrParams(); - query = new QueryRequest(new MapSolrParams("componentName","mincopy", "meta" ,"true", "collection", COLLECTIONORALIAS, "wt", "javabin")); - query.setPath("/config/expressible"); - rsp = cluster.getSolrClient().request(query); - - System.out.println();*/ - - private void executeReq(String uri, JettySolrRunner jetty, Utils.InputStreamConsumer parser, Map expected) throws Exception { + + @SuppressWarnings({"unchecked"}) + private void executeReq(String uri, JettySolrRunner jetty, + @SuppressWarnings({"rawtypes"})Utils.InputStreamConsumer parser, + @SuppressWarnings({"rawtypes"})Map expected) throws Exception { try(HttpSolrClient client = (HttpSolrClient) jetty.newClient()){ TestDistribPackageStore.assertResponseValues(10, () -> { @@ -422,6 +463,7 @@ private void executeReq(String uri, JettySolrRunner jetty, Utils.InputStreamCons private void verifyCmponent(SolrClient client, String COLLECTION_NAME, String componentType, String componentName, String pkg, String version) throws Exception { + @SuppressWarnings({"unchecked"}) SolrParams params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, WT, JAVABIN, "componentName", componentName, @@ -480,13 +522,11 @@ public void testAPI() throws Exception { "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="); // with correct signature //after uploading the file, let's delete the keys to see if we get proper error message -// cluster.getZkClient().delete("/keys/exe/pub_key512.der", -1, true); add.files = Arrays.asList(new String[]{FILE2}); /*expectError(req, cluster.getSolrClient(), errPath, "ZooKeeper does not have any public keys");*/ //Now lets' put the keys back -// cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true); //this time we have a file with proper signature, public keys are in ZK // so the add {} command should succeed @@ -567,6 +607,41 @@ public NavigableObject call() throws Exception { cluster.shutdown(); } } + public static class C extends RequestHandlerBase implements SolrCoreAware { + static boolean informCalled = false; + + @Override + public void inform(SolrCore core) { + informCalled = true; + + } + + @Override + public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) { + + } + + @Override + public String getDescription() { + return "test"; + } + } + + public static class C2 extends QParserPlugin implements ResourceLoaderAware { + static boolean informCalled = false; + + + @Override + public void inform(ResourceLoader loader) throws IOException { + informCalled = true; + + } + + @Override + public QParser createParser(String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) { + return null; + } + } static void postFileAndWait(MiniSolrCloudCluster cluster, String fname, String path, String sig) throws Exception { ByteBuffer fileContent = getFileContent(fname); diff --git a/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java b/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java index e17c821b75c0..15f5b7efafce 100644 --- a/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java +++ b/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java @@ -524,6 +524,7 @@ static void doEmptyFacetCounts(String field, String[] prefixes) throws Exception SchemaField sf = h.getCore().getLatestSchema().getField(field); String response = JQ(req("q", "*:*")); + @SuppressWarnings({"rawtypes"}) Map rsp = (Map) fromJSONString(response); Long numFound = (Long)(((Map)rsp.get("response")).get("numFound")); @@ -3561,7 +3562,7 @@ public void testRangeFacetFilterVsDocValuesRandom() throws Exception { NamedList rangeFacetsDv; SolrQueryRequest req = req(params); - log.info("Using Params: " + params); + log.info("Using Params: {}", params); try { SolrQueryResponse rsp = h.queryAndResponse("", req); rangeFacetsFilter = (NamedList) ((NamedList) rsp.getValues().get("facet_counts")).get("facet_ranges"); diff --git a/solr/core/src/test/org/apache/solr/request/TestFaceting.java b/solr/core/src/test/org/apache/solr/request/TestFaceting.java index 59b3a41c52c6..0e9284e0fd14 100644 --- a/solr/core/src/test/org/apache/solr/request/TestFaceting.java +++ b/solr/core/src/test/org/apache/solr/request/TestFaceting.java @@ -27,7 +27,9 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.FacetParams; +import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.uninverting.DocTermOrds; import org.junit.After; import org.junit.BeforeClass; @@ -931,5 +933,28 @@ public void testListedTermCounts() throws Exception { "//lst[@name='facet_fields']/lst[@name='title_ws']/int[2][@name='Book2']", "//lst[@name='facet_fields']/lst[@name='title_ws']/int[3][@name='Book3']"); } + + @Test + public void testFacetCountsWithMinExactCount() throws Exception { + final int NUM_DOCS = 20; + for (int i = 0; i < NUM_DOCS ; i++) { + assertU(adoc("id", String.valueOf(i), "title_ws", "Book1")); + assertU(commit()); + } + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("q", "title_ws:Book1"); + params.set(FacetParams.FACET, "true"); + params.set(FacetParams.FACET_FIELD, "title_ws"); + assertQ(req(params), + "//lst[@name='facet_fields']/lst[@name='title_ws']/int[1][@name='Book1'][.='20']" + ,"//*[@numFoundExact='true']" + ,"//*[@numFound='" + NUM_DOCS + "']"); + + // It doesn't matter if we request minExactCount, when requesting facets, the numFound value is precise + assertQ(req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2"), + "//lst[@name='facet_fields']/lst[@name='title_ws']/int[1][@name='Book1'][.='20']" + ,"//*[@numFoundExact='true']" + ,"//*[@numFound='" + NUM_DOCS + "']"); + } } diff --git a/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java b/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java index 7882dc0aab50..a581264c92b5 100644 --- a/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java +++ b/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java @@ -398,7 +398,7 @@ private void doTestQuery(int cardinality, String[] fields) throws Exception { try { SolrQueryResponse rsp = h.queryAndResponse("", req); NamedList facetQueries = (NamedList) ((NamedList) rsp.getValues().get("facet_counts")).get("facet_queries"); - NamedList facetIntervals = (NamedList) ((NamedList) (NamedList) ((NamedList) rsp.getValues().get("facet_counts")) + NamedList facetIntervals = (NamedList) ((NamedList) ((NamedList) rsp.getValues().get("facet_counts")) .get("facet_intervals")).get(field); assertEquals("Responses don't have the same number of facets: \n" + facetQueries + "\n" + facetIntervals, facetQueries.size(), getCountDistinctIntervals(facetIntervals)); diff --git a/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java b/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java index 201f9cb63528..d484b353cf2a 100644 --- a/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java +++ b/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java @@ -149,7 +149,10 @@ void doPerf(String writerName, SolrQueryRequest req, int encIter, int decIter) t double decodeTime = timer.getTime(); - log.info("writer "+writerName+", size="+out.size()+", encodeRate="+(encIter*1000L/encodeTime) + ", decodeRate="+(decIter*1000L/decodeTime)); + if (log.isInfoEnabled()) { + log.info("writer {}, size={}, encodeRate={} decodeRate={}" + , writerName, out.size(), (encIter * 1000L / encodeTime), (decIter * 1000L / decodeTime)); + } req.close(); } diff --git a/solr/core/src/test/org/apache/solr/request/macro/TestMacroExpander.java b/solr/core/src/test/org/apache/solr/request/macro/TestMacroExpander.java index 733b960930a4..c6c60cb44815 100644 --- a/solr/core/src/test/org/apache/solr/request/macro/TestMacroExpander.java +++ b/solr/core/src/test/org/apache/solr/request/macro/TestMacroExpander.java @@ -121,6 +121,7 @@ public void testMap() { // see SOLR-9740, the second fq param was being dropped. request.put("expr", new String[] {"${one_ref}"}); // expr is for streaming expressions, no replacement by default request.put("one_ref",new String[] {"one"}); request.put("three_ref",new String[] {"three"}); + @SuppressWarnings({"rawtypes"}) Map expanded = MacroExpander.expand(request); assertEquals("zero", ((String[])expanded.get("fq"))[0]); assertEquals("one", ((String[])expanded.get("fq"))[1]); @@ -142,6 +143,7 @@ public void testMapExprExpandOn() { String oldVal = System.getProperty("StreamingExpressionMacros","false"); System.setProperty("StreamingExpressionMacros", "true"); try { + @SuppressWarnings({"rawtypes"}) Map expanded = MacroExpander.expand(request); assertEquals("zero", ((String[])expanded.get("fq"))[0]); assertEquals("one", ((String[])expanded.get("fq"))[1]); diff --git a/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java b/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java index 8a6cb71ede2d..5631df34dd81 100644 --- a/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java +++ b/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java @@ -93,12 +93,14 @@ public void testJSON() throws IOException { assertEquals(JSONWriter.JSON_NL_STYLE_COUNT, namedListStyles.length); } + @SuppressWarnings({"unchecked"}) private void implTestJSON(final String namedListStyle) throws IOException { SolrQueryRequest req = req("wt","json","json.nl",namedListStyle, "indent", "off"); SolrQueryResponse rsp = new SolrQueryResponse(); JSONResponseWriter w = new JSONResponseWriter(); StringWriter buf = new StringWriter(); + @SuppressWarnings({"rawtypes"}) NamedList nl = new NamedList(); nl.add("data1", "he\u2028llo\u2029!"); // make sure that 2028 and 2029 are both escaped (they are illegal in javascript) nl.add(null, 42); @@ -180,8 +182,8 @@ public void testJSONSolrDocument() throws Exception { result.contains("\"id\"") && result.contains("\"score\"") && result.contains("_children_")); - String expectedResult = "{'response':{'numFound':1,'start':0,'maxScore':0.7,'docs':[{'id':'1', 'score':'0.7'," + - " '_children_':{'numFound':1,'start':0,'docs':[{'id':'2', 'score':'0.4', 'path':['a>b', 'a>b>c']}] }}] }}"; + String expectedResult = "{'response':{'numFound':1,'start':0,'maxScore':0.7, 'numFoundExact':true,'docs':[{'id':'1', 'score':'0.7'," + + " '_children_':{'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'2', 'score':'0.4', 'path':['a>b', 'a>b>c']}] }}] }}"; String error = JSONTestUtil.match(result, "=="+expectedResult); assertNull("response validation failed with error: " + error, error); @@ -211,7 +213,7 @@ public void testArrntvWriterOverridesAllWrites() { methodsExpectedNotOverriden.add("public default void org.apache.solr.common.util.JsonTextWriter.writeIterator(org.apache.solr.common.IteratorWriter) throws java.io.IOException"); methodsExpectedNotOverriden.add("public default void org.apache.solr.common.util.JsonTextWriter.writeJsonIter(java.util.Iterator) throws java.io.IOException"); - final Class subClass = ArrayOfNameTypeValueJSONWriter.class; + final Class subClass = JSONResponseWriter.ArrayOfNameTypeValueJSONWriter.class; final Class superClass = subClass.getSuperclass(); List allSuperClassMethods = new ArrayList<>(); @@ -256,7 +258,7 @@ public void testArrntvWriterOverridesAllWrites() { @Test public void testArrntvWriterLacksMethodsOfItsOwn() { - final Class subClass = ArrayOfNameTypeValueJSONWriter.class; + final Class subClass = JSONResponseWriter.ArrayOfNameTypeValueJSONWriter.class; final Class superClass = subClass.getSuperclass(); // ArrayOfNamedValuePairJSONWriter is a simple sub-class // which should have (almost) no methods of its own diff --git a/solr/core/src/test/org/apache/solr/response/SmileWriterTest.java b/solr/core/src/test/org/apache/solr/response/SmileWriterTest.java index 871f8c42b621..4d2df6ac9dca 100644 --- a/solr/core/src/test/org/apache/solr/response/SmileWriterTest.java +++ b/solr/core/src/test/org/apache/solr/response/SmileWriterTest.java @@ -71,6 +71,7 @@ public void testTypes() throws IOException { SmileResponseWriter smileResponseWriter = new SmileResponseWriter(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); smileResponseWriter.write(baos,req,rsp); + @SuppressWarnings({"rawtypes"}) Map m = (Map) decodeSmile(new ByteArrayInputStream(baos.toByteArray())); CharArr out = new CharArr(); JSONWriter jsonWriter = new JSONWriter(out, 2); @@ -83,12 +84,14 @@ public void testTypes() throws IOException { } @Test + @SuppressWarnings({"unchecked"}) public void testJSON() throws IOException { SolrQueryRequest req = req("wt","json","json.nl","arrarr"); SolrQueryResponse rsp = new SolrQueryResponse(); SmileResponseWriter w = new SmileResponseWriter(); ByteArrayOutputStream buf = new ByteArrayOutputStream(); + @SuppressWarnings({"rawtypes"}) NamedList nl = new NamedList(); nl.add("data1", "he\u2028llo\u2029!"); // make sure that 2028 and 2029 are both escaped (they are illegal in javascript) nl.add(null, 42); @@ -98,7 +101,9 @@ public void testJSON() throws IOException { rsp.add("short", Short.valueOf((short)-4)); String expected = "{\"nl\":[[\"data1\",\"he\\u2028llo\\u2029!\"],[null,42]],byte:-3,short:-4}"; w.write(buf, req, rsp); + @SuppressWarnings({"rawtypes"}) Map m = (Map) decodeSmile(new ByteArrayInputStream(buf.toByteArray())); + @SuppressWarnings({"rawtypes"}) Map o2 = (Map) new ObjectBuilder(new JSONParser(new StringReader(expected))).getObject(); assertEquals(Utils.toJSONString(m), Utils.toJSONString(o2)); req.close(); @@ -133,9 +138,12 @@ public void testJSONSolrDocument() throws IOException { w.write(buf, req, rsp); byte[] bytes = buf.toByteArray(); + @SuppressWarnings({"rawtypes"}) Map m = (Map) decodeSmile(new ByteArrayInputStream(bytes)); m = (Map) m.get("response"); + @SuppressWarnings({"rawtypes"}) List l = (List) m.get("docs"); + @SuppressWarnings({"rawtypes"}) Map doc = (Map) l.get(0); assertFalse(doc.containsKey("subject")); assertFalse(doc.containsKey("title")); @@ -146,6 +154,7 @@ public void testJSONSolrDocument() throws IOException { @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void test10Docs() throws IOException { SolrQueryResponse response = new SolrQueryResponse(); SolrDocumentList l = constructSolrDocList(response); @@ -166,6 +175,7 @@ public void test10Docs() throws IOException { } + @SuppressWarnings({"unchecked"}) public static SolrDocumentList constructSolrDocList(SolrQueryResponse response) { SolrDocumentList l = new SolrDocumentList(); for(int i=0;i<10; i++){ @@ -219,6 +229,7 @@ public static Object decodeSmile( InputStream is) throws IOException { } + @SuppressWarnings({"unchecked", "rawtypes"}) public static Object getVal(JsonNode value) { if (value instanceof NullNode) { return null; diff --git a/solr/core/src/test/org/apache/solr/response/TestBinaryResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestBinaryResponseWriter.java index bbc4985ad1d6..e80b28c02cbe 100644 --- a/solr/core/src/test/org/apache/solr/response/TestBinaryResponseWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestBinaryResponseWriter.java @@ -62,6 +62,7 @@ public void testBytesRefWriting() { compareStringFormat("LIVE: सबरीमाला मंदिर के पास पहुंची दो महिलाएं, जमकर हो रहा विरोध-प्रदर्शन"); } + @SuppressWarnings({"unchecked", "rawtypes"}) public void testJavabinCodecWithCharSeq() throws IOException { SolrDocument document = new SolrDocument(); document.put("id", "1"); @@ -103,6 +104,7 @@ public void testUUID() throws Exception { BinaryQueryResponseWriter writer = (BinaryQueryResponseWriter) h.getCore().getQueryResponseWriter("javabin"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); writer.write(baos, req, rsp); + @SuppressWarnings({"rawtypes"}) NamedList res; try (JavaBinCodec jbc = new JavaBinCodec()) { res = (NamedList) jbc.unmarshal(new ByteArrayInputStream(baos.toByteArray())); diff --git a/solr/core/src/test/org/apache/solr/response/TestGeoJSONResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestGeoJSONResponseWriter.java index 191136bc2a36..99e2c5093854 100644 --- a/solr/core/src/test/org/apache/solr/response/TestGeoJSONResponseWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestGeoJSONResponseWriter.java @@ -79,6 +79,7 @@ public static void createIndex() { assertU(commit()); } + @SuppressWarnings({"unchecked"}) protected Map readJSON(String json) { try { return jsonmapper.readValue(json, Map.class); @@ -91,6 +92,7 @@ protected Map readJSON(String json) { return null; } + @SuppressWarnings({"unchecked"}) protected Map getFirstFeatureGeometry(Map json) { Map rsp = (Map)json.get("response"); @@ -213,8 +215,10 @@ public void testGeoJSONOutput() throws Exception { } } + @SuppressWarnings({"unchecked"}) protected Map readFirstDoc(String json) { + @SuppressWarnings({"rawtypes"}) List docs = (List)((Map)readJSON(json).get("response")).get("docs"); return (Map)docs.get(0); } diff --git a/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java index 72af04b23ae1..7edffa4d9aa0 100644 --- a/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java @@ -33,6 +33,7 @@ import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.stream.expr.Explanation; import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.util.BaseTestHarness; import org.junit.BeforeClass; import org.junit.Test; @@ -44,9 +45,12 @@ public static void beforeClass() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testGraphMLOutput() throws Exception { SolrQueryRequest request = req("blah", "blah"); // Just need a request to attach the stream and traversal to. + @SuppressWarnings({"rawtypes"}) SolrQueryResponse response = new SolrQueryResponse(); + @SuppressWarnings({"rawtypes"}) Map context = request.getContext(); TupleStream stream = new TestStream(); //Simulates a GatherNodesStream Traversal traversal = new Traversal(); @@ -59,7 +63,7 @@ public void testGraphMLOutput() throws Exception { String graphML = writer.toString(); //Validate the nodes - String error = h.validateXPath(graphML, + String error = BaseTestHarness.validateXPath(graphML, "//graph/node[1][@id ='bill']", "//graph/node[2][@id ='jim']", "//graph/node[3][@id ='max']"); @@ -67,7 +71,7 @@ public void testGraphMLOutput() throws Exception { throw new Exception(error); } //Validate the edges - error = h.validateXPath(graphML, + error = BaseTestHarness.validateXPath(graphML, "//graph/edge[1][@source ='jim']", "//graph/edge[1][@target ='bill']", "//graph/edge[2][@source ='max']", @@ -84,31 +88,35 @@ public void testGraphMLOutput() throws Exception { } + @SuppressWarnings({"unchecked"}) private static class TestStream extends TupleStream { private Iterator tuples; public TestStream() { //Create some nodes - List testTuples = new ArrayList(); + List testTuples = new ArrayList<>(); + @SuppressWarnings({"rawtypes"}) Map m1 = new HashMap(); - List an1 = new ArrayList(); + List an1 = new ArrayList<>(); an1.add("jim"); an1.add("max"); m1.put("node", "bill"); m1.put("ancestors", an1); testTuples.add(new Tuple(m1)); + @SuppressWarnings({"rawtypes"}) Map m2 = new HashMap(); - List an2 = new ArrayList(); + List an2 = new ArrayList<>(); an2.add("max"); m2.put("node", "jim"); m2.put("ancestors", an2); testTuples.add(new Tuple(m2)); + @SuppressWarnings({"rawtypes"}) Map m3 = new HashMap(); - List an3 = new ArrayList(); + List an3 = new ArrayList<>(); an3.add("jim"); m3.put("node", "max"); m3.put("ancestors", an3); @@ -133,10 +141,12 @@ public List children() { return null; } + @SuppressWarnings({"unchecked"}) public Tuple read() { if(tuples.hasNext()) { return tuples.next(); } else { + @SuppressWarnings({"rawtypes"}) Map map = new HashMap(); map.put("EOF", true); return new Tuple(map); diff --git a/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java b/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java index 81d0e267048f..74e0c958edb5 100644 --- a/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java +++ b/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java @@ -65,6 +65,7 @@ public void testKnown() throws IOException { " \"a_s\":\"hello3\",\n" + " \"a_i\":3,\n" + " \"a_f\":3.0}]}}"; + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap nl = convert2OrderedMap((Map) Utils.fromJSONString(payload)); byte[] bytes = serialize(nl); @@ -81,6 +82,7 @@ public void testKnown() throws IOException { } + @SuppressWarnings({"unchecked", "rawtypes"}) public SimpleOrderedMap convert2OrderedMap(Map m) { SimpleOrderedMap result = new SimpleOrderedMap<>(); m.forEach((k, v) -> { @@ -93,7 +95,7 @@ public SimpleOrderedMap convert2OrderedMap(Map m) { } public void testSimple() throws IOException { - List> l = new ArrayList(); + List> l = new ArrayList<>(); l.add(Utils.makeMap("id", 1, "f", 1.0f, "s", "Some str 1")); l.add(Utils.makeMap("id", 2, "f", 2.0f, "s", "Some str 2")); l.add(Utils.makeMap("id", 3, "f", 1.0f, "s", "Some str 3")); @@ -141,6 +143,7 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { byte[] bytes = serialize(tupleStream); JavabinTupleStreamParser parser = new JavabinTupleStreamParser(new ByteArrayInputStream(bytes), true); + @SuppressWarnings({"rawtypes"}) Map m = parser.next(); assertEquals(1L, m.get("id")); assertEquals(1.0, (Double) m.get("f"), 0.01); @@ -167,6 +170,7 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { assertEquals(Boolean.TRUE, m.get("EOF")); } + @SuppressWarnings({"unchecked", "rawtypes"}) public void testSolrDocumentList() throws IOException { SolrQueryResponse response = new SolrQueryResponse(); SolrDocumentList l = constructSolrDocList(response); @@ -190,6 +194,7 @@ public void testSolrDocumentList() throws IOException { } } + @SuppressWarnings({"unchecked"}) public static byte[] serialize(Object o) throws IOException { SolrQueryResponse response = new SolrQueryResponse(); response.getValues().add("results", o); diff --git a/solr/core/src/test/org/apache/solr/response/TestPHPSerializedResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestPHPSerializedResponseWriter.java index 641882584b4b..e38374388318 100644 --- a/solr/core/src/test/org/apache/solr/response/TestPHPSerializedResponseWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestPHPSerializedResponseWriter.java @@ -94,7 +94,7 @@ public void testSolrDocuments() throws IOException { rsp.addResponse(sdl); w.write(buf, req, rsp); - assertEquals("a:1:{s:8:\"response\";a:3:{s:8:\"numFound\";i:0;s:5:\"start\";i:0;s:4:\"docs\";a:2:{i:0;a:6:{s:2:\"id\";s:1:\"1\";s:5:\"data1\";s:5:\"hello\";s:5:\"data2\";i:42;s:5:\"data3\";b:1;s:5:\"data4\";a:2:{s:7:\"data4.1\";s:7:\"hashmap\";s:7:\"data4.2\";s:5:\"hello\";}s:5:\"data5\";a:3:{i:0;s:7:\"data5.1\";i:1;s:7:\"data5.2\";i:2;s:7:\"data5.3\";}}i:1;a:1:{s:2:\"id\";s:1:\"2\";}}}}", + assertEquals("a:1:{s:8:\"response\";a:4:{s:8:\"numFound\";i:0;s:5:\"start\";i:0;s:13:\"numFoundExact\";b:1;s:4:\"docs\";a:2:{i:0;a:6:{s:2:\"id\";s:1:\"1\";s:5:\"data1\";s:5:\"hello\";s:5:\"data2\";i:42;s:5:\"data3\";b:1;s:5:\"data4\";a:2:{s:7:\"data4.1\";s:7:\"hashmap\";s:7:\"data4.2\";s:5:\"hello\";}s:5:\"data5\";a:3:{i:0;s:7:\"data5.1\";i:1;s:7:\"data5.2\";i:2;s:7:\"data5.3\";}}i:1;a:1:{s:2:\"id\";s:1:\"2\";}}}}", buf.toString()); req.close(); } diff --git a/solr/core/src/test/org/apache/solr/response/TestPushWriter.java b/solr/core/src/test/org/apache/solr/response/TestPushWriter.java index 7f1eddb81ce1..3fb32f43b3b0 100644 --- a/solr/core/src/test/org/apache/solr/response/TestPushWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestPushWriter.java @@ -50,7 +50,10 @@ public void testStandardResponse() throws IOException { new LocalSolrQueryRequest(null, new ModifiableSolrParams()), new SolrQueryResponse()); writeData(pw); osw.flush(); - log.info(new String(baos.toByteArray(), StandardCharsets.UTF_8)); + if (log.isInfoEnabled()) { + log.info("{}", new String(baos.toByteArray(), StandardCharsets.UTF_8)); + } + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSON(baos.toByteArray()); checkValues(m); try (JavaBinCodec jbc = new JavaBinCodec(baos= new ByteArrayOutputStream(), null)) { @@ -62,7 +65,7 @@ public void testStandardResponse() throws IOException { checkValues(m); } - protected void checkValues(Map m) { + protected void checkValues(@SuppressWarnings({"rawtypes"})Map m) { assertEquals(0, ((Number)Utils.getObjectByPath(m, true, "responseHeader/status")).intValue()); assertEquals(10, ((Number)Utils.getObjectByPath(m, true, "response/numFound")).intValue()); assertEquals(1, ((Number)Utils.getObjectByPath(m, true, "response/docs[0]/id")).intValue()); diff --git a/solr/core/src/test/org/apache/solr/response/TestRawResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestRawResponseWriter.java index 7995efa47324..75986c04201a 100644 --- a/solr/core/src/test/org/apache/solr/response/TestRawResponseWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestRawResponseWriter.java @@ -189,8 +189,10 @@ public void testStructuredDataViaBaseWriters() throws IOException { * * @param baseWriter null or the name of a valid base writer */ + @SuppressWarnings({"unchecked"}) private static RawResponseWriter newRawResponseWriter(String baseWriter) { RawResponseWriter writer = new RawResponseWriter(); + @SuppressWarnings({"rawtypes"}) NamedList initArgs = new NamedList(); if (null != baseWriter) { initArgs.add("base", baseWriter); diff --git a/solr/core/src/test/org/apache/solr/response/TestRetrieveFieldsOptimizer.java b/solr/core/src/test/org/apache/solr/response/TestRetrieveFieldsOptimizer.java index 4409efda9ac8..07609d3ad1b3 100644 --- a/solr/core/src/test/org/apache/solr/response/TestRetrieveFieldsOptimizer.java +++ b/solr/core/src/test/org/apache/solr/response/TestRetrieveFieldsOptimizer.java @@ -106,6 +106,7 @@ public static void initManagedSchemaCore() throws Exception { //TODO, how to generalize? + @SuppressWarnings({"unchecked"}) private static void setupAllFields() throws IOException { IndexSchema schema = h.getCore().getLatestSchema(); @@ -231,6 +232,7 @@ public void run() { } } + @SuppressWarnings({"unchecked", "rawtypes"}) private void checkFetchSources(SolrReturnFields.FIELD_SOURCES source) throws Exception { String flAll = fieldsHolder.allFields.stream() .map(RetrieveField::getName) // This will call testField.getName() @@ -318,6 +320,7 @@ private void check(String flIn, SolrReturnFields.FIELD_SOURCES source) throws Ex assertEquals("We didn't get the values from the expected places! ", source, ((SolrReturnFields) rsp.returnFields).getFieldSources()); + @SuppressWarnings({"rawtypes"}) NamedList res; try (JavaBinCodec jbc = new JavaBinCodec()) { res = (NamedList) jbc.unmarshal(new ByteArrayInputStream(baos.toByteArray())); diff --git a/solr/core/src/test/org/apache/solr/response/TestSolrQueryResponse.java b/solr/core/src/test/org/apache/solr/response/TestSolrQueryResponse.java index 3e4dc6c6f17a..66aca82c98aa 100644 --- a/solr/core/src/test/org/apache/solr/response/TestSolrQueryResponse.java +++ b/solr/core/src/test/org/apache/solr/response/TestSolrQueryResponse.java @@ -59,6 +59,7 @@ public void testValues() throws Exception { assertEquals("values new value", newValue, response.getValues()); response.add("key2", "value2"); { + @SuppressWarnings({"unchecked"}) final Iterator> it = response.getValues().iterator(); assertTrue(it.hasNext()); final Map.Entry entry1 = it.next(); diff --git a/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformerHierarchy.java b/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformerHierarchy.java index 26e5f05e9cac..bda4e7d17807 100644 --- a/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformerHierarchy.java +++ b/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformerHierarchy.java @@ -335,6 +335,7 @@ private static int id() { return idCounter.incrementAndGet(); } + @SuppressWarnings({"unchecked"}) private static void cleanSolrDocumentFields(SolrDocument input) { for(String fieldName: fieldsToRemove) { input.removeFields(fieldName); diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java index 68c7811f1210..88b9d862e3a5 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java @@ -109,15 +109,20 @@ public void testMultipleAddFieldWithErrors() throws Exception { " }"; String response = restTestHarness.post("/schema", json(payload)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) fromJSONString(response); + @SuppressWarnings({"rawtypes"}) Map error = (Map)map.get("error"); assertNotNull("No errors", error); + @SuppressWarnings({"rawtypes"}) List details = (List)error.get("details"); assertNotNull("No details", details); assertEquals("Wrong number of details", 2, details.size()); + @SuppressWarnings({"rawtypes"}) List firstErrorList = (List)((Map)details.get(0)).get("errorMessages"); assertEquals(1, firstErrorList.size()); assertTrue (((String)firstErrorList.get(0)).contains("Field 'a1': Field type 'string1' not found.\n")); + @SuppressWarnings({"rawtypes"}) List secondErrorList = (List)((Map)details.get(1)).get("errorMessages"); assertEquals(1, secondErrorList.size()); assertTrue (((String)secondErrorList.get(0)).contains("is a required field")); @@ -148,12 +153,16 @@ public void testAnalyzerClass() throws Exception { String response = restTestHarness.post("/schema", json(addFieldTypeAnalyzerWithClass + ',' + charFilters + tokenizer + filters + suffix)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) fromJSONString(response); + @SuppressWarnings({"rawtypes"}) Map error = (Map)map.get("error"); assertNotNull("No errors", error); + @SuppressWarnings({"rawtypes"}) List details = (List)error.get("details"); assertNotNull("No details", details); assertEquals("Wrong number of details", 1, details.size()); + @SuppressWarnings({"rawtypes"}) List errorList = (List)((Map)details.get(0)).get("errorMessages"); assertEquals(1, errorList.size()); assertTrue (((String)errorList.get(0)).contains @@ -191,6 +200,7 @@ public void testAnalyzerClass() throws Exception { map = getObj(restTestHarness, "myNewTextFieldWithAnalyzerClass", "fieldTypes"); assertNotNull(map); + @SuppressWarnings({"rawtypes"}) Map analyzer = (Map)map.get("analyzer"); assertEquals("org.apache.lucene.analysis.core.WhitespaceAnalyzer", String.valueOf(analyzer.get("class"))); assertEquals("5.0.0", String.valueOf(analyzer.get(IndexSchema.LUCENE_MATCH_VERSION_PARAM))); @@ -214,14 +224,19 @@ public void testAnalyzerByName() throws Exception { "}}"; String response = restTestHarness.post("/schema", json(addFieldTypeAnalyzer)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) fromJSONString(response); assertNull(response, map.get("error")); map = getObj(restTestHarness, "myNewTextField", "fieldTypes"); assertNotNull(map); + @SuppressWarnings({"rawtypes"}) Map analyzer = (Map)map.get("analyzer"); + @SuppressWarnings({"rawtypes"}) Map tokenizer = (Map)analyzer.get("tokenizer"); + @SuppressWarnings({"rawtypes"}) List charFilters = (List)analyzer.get("charFilters"); + @SuppressWarnings({"rawtypes"}) List tokenFilters = (List)analyzer.get("filters"); assertEquals("whitespace", String.valueOf(tokenizer.get("name"))); assertEquals("patternReplace", String.valueOf(((Map)charFilters.get(0)).get("name"))); @@ -240,12 +255,16 @@ public void testAnalyzerByBogusName() throws Exception { "}}"; String response = restTestHarness.post("/schema", json(addFieldTypeAnalyzer)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) fromJSONString(response); + @SuppressWarnings({"rawtypes"}) Map error = (Map)map.get("error"); assertNotNull("No errors", error); + @SuppressWarnings({"rawtypes"}) List details = (List)error.get("details"); assertNotNull("No details", details); assertEquals("Wrong number of details", 1, details.size()); + @SuppressWarnings({"rawtypes"}) List errorList = (List)((Map)details.get(0)).get("errorMessages"); assertEquals(1, errorList.size()); assertTrue (((String)errorList.get(0)).contains @@ -257,6 +276,7 @@ public void testAddFieldMatchingExistingDynamicField() throws Exception { String newFieldName = "attr_non_dynamic"; + @SuppressWarnings({"rawtypes"}) Map map = getObj(harness, newFieldName, "fields"); assertNull("Field '" + newFieldName + "' already exists in the schema", map); @@ -299,6 +319,7 @@ public void testAddIllegalDynamicField() throws Exception { " }"; String response = harness.post("/schema", json(payload)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) fromJSONString(response); assertNotNull(response, map.get("error")); @@ -306,6 +327,7 @@ public void testAddIllegalDynamicField() throws Exception { assertNull(newFieldName + " illegal dynamic field should not have been added to schema", map); } + @SuppressWarnings({"rawtypes"}) public void testAddIllegalFields() throws Exception { RestTestHarness harness = restTestHarness; @@ -348,6 +370,7 @@ public void testAddIllegalFields() throws Exception { assertNotNull(response, map.get("error")); } + @SuppressWarnings({"rawtypes"}) public void testAddFieldWithExistingCatchallDynamicField() throws Exception { RestTestHarness harness = restTestHarness; @@ -400,6 +423,7 @@ public void testAddFieldWithExistingCatchallDynamicField() throws Exception { assertNotNull("Field '" + newFieldName + "' is not in the schema", map); } + @SuppressWarnings({"unchecked", "rawtypes"}) public void testMultipleCommands() throws Exception{ RestTestHarness harness = restTestHarness; @@ -679,12 +703,14 @@ public void testMultipleCommands() throws Exception{ public void testCopyFieldRules() throws Exception { RestTestHarness harness = restTestHarness; + @SuppressWarnings({"rawtypes"}) Map m = getObj(harness, "name", "fields"); assertNotNull("'name' field does not exist in the schema", m); m = getObj(harness, "bind", "fields"); assertNotNull("'bind' field does not exist in the schema", m); + @SuppressWarnings({"rawtypes"}) List l = getSourceCopyFields(harness, "bleh_s"); assertTrue("'bleh_s' copyField rule exists in the schema", l.isEmpty()); @@ -696,6 +722,7 @@ public void testCopyFieldRules() throws Exception { " }\n"; String response = harness.post("/schema", json(payload)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) fromJSONString(response); assertNull(response, map.get("error")); @@ -746,6 +773,7 @@ public void testCopyFieldRules() throws Exception { assertTrue("'bleh_s' copyField rule exists in the schema", l.isEmpty()); } + @SuppressWarnings({"unchecked", "rawtypes"}) public void testDeleteAndReplace() throws Exception { RestTestHarness harness = restTestHarness; @@ -995,9 +1023,11 @@ public void testSortableTextFieldWithAnalyzer() throws Exception { String response = restTestHarness.post("/schema", json(payload)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) fromJSONString(response); assertNull(response, map.get("errors")); + @SuppressWarnings({"rawtypes"}) Map fields = getObj(restTestHarness, fieldName, "fields"); assertNotNull("field " + fieldName + " not created", fields); @@ -1073,9 +1103,11 @@ public void testSimilarityParser() throws Exception { String response = harness.post("/schema", json(payload)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) fromJSONString(response); assertNull(response, map.get("error")); + @SuppressWarnings({"rawtypes"}) Map fields = getObj(harness, fieldName, "fields"); assertNotNull("field " + fieldName + " not created", fields); @@ -1110,26 +1142,30 @@ public void testSimilarityParser() throws Exception { sim -> assertEquals("Unexpected discountedOverlaps", discountOverlaps, sim.getDiscountOverlaps())); } + @SuppressWarnings({"rawtypes"}) public static Map getObj(RestTestHarness restHarness, String fld, String key) throws Exception { Map map = getRespMap(restHarness); List l = (List) ((Map)map.get("schema")).get(key); for (Object o : l) { - Map m = (Map) o; + @SuppressWarnings({"rawtypes"})Map m = (Map) o; if (fld.equals(m.get("name"))) return m; } return null; } + @SuppressWarnings({"rawtypes"}) public static Map getRespMap(RestTestHarness restHarness) throws Exception { return getAsMap("/schema", restHarness); } + @SuppressWarnings({"rawtypes"}) public static Map getAsMap(String uri, RestTestHarness restHarness) throws Exception { String response = restHarness.query(uri); return (Map) fromJSONString(response); } + @SuppressWarnings({"unchecked", "rawtypes"}) public static List getSourceCopyFields(RestTestHarness harness, String src) throws Exception { Map map = getRespMap(harness); List l = (List) ((Map)map.get("schema")).get("copyFields"); @@ -1141,6 +1177,7 @@ public static List getSourceCopyFields(RestTestHarness harness, String src) thro return result; } + @SuppressWarnings({"unchecked", "rawtypes"}) public static List getDestCopyFields(RestTestHarness harness, String dest) throws Exception { Map map = getRespMap(harness); List l = (List) ((Map)map.get("schema")).get("copyFields"); @@ -1158,6 +1195,7 @@ public static List getDestCopyFields(RestTestHarness harness, String dest) throw * Executes each of the specified Similarity-accepting validators. */ @SafeVarargs + @SuppressWarnings({"unchecked", "varargs"}) private static void assertFieldSimilarity(String fieldname, Class expected, Consumer... validators) { CoreContainer cc = jetty.getCoreContainer(); try (SolrCore core = cc.getCore("collection1")) { diff --git a/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java b/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java index dbff7e208e93..24997e72220e 100644 --- a/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java +++ b/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java @@ -141,7 +141,8 @@ public void testOptimizeDiffSchemas() throws Exception { changed.getUpdateHandler().commit(new CommitUpdateCommand(req, false)); changed.getUpdateHandler().commit(new CommitUpdateCommand(req, true)); } catch (Throwable e) { - log.error("Test exception, logging so not swallowed if there is a (finally) shutdown exception: " + e.getMessage(), e); + log.error("Test exception, logging so not swallowed if there is a (finally) shutdown exception: {}" + , e.getMessage(), e); throw e; } finally { if (cc != null) cc.shutdown(); diff --git a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java index dcda834dac81..73178c3c2a59 100644 --- a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java +++ b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java @@ -570,7 +570,7 @@ public void testFloatAndDoubleRangeQueryRandom() throws Exception { } assertU(commit()); - log.info("Indexed values: "+values); + log.info("Indexed values: {}", values); // Querying int numQueries = 10000; for (int j=0; j analyzerProps = (SimpleOrderedMap)schema.getFieldTypeByName("text_ws") .getNamedPropertyValues(true).get("analyzer"); @@ -58,12 +59,14 @@ public void testSchemaLoadingComplexAnalyzer() { IndexSchema schema = core.getLatestSchema(); assertTrue( schema.getFieldTypes().containsKey("text") ); + @SuppressWarnings({"unchecked"}) SimpleOrderedMap indexAnalyzerProps = (SimpleOrderedMap)schema.getFieldTypeByName("text") .getNamedPropertyValues(true).get("indexAnalyzer"); checkTokenizerName(indexAnalyzerProps, "whitespace"); checkTokenFilterNames(indexAnalyzerProps, new String[]{"stop", "wordDelimiterGraph", "lowercase", "keywordMarker", "porterStem", "removeDuplicates", "flattenGraph"}); + @SuppressWarnings({"unchecked"}) SimpleOrderedMap queryAnalyzerProps = (SimpleOrderedMap)schema.getFieldTypeByName("text") .getNamedPropertyValues(true).get("queryAnalyzer"); @@ -79,6 +82,7 @@ public void testSchemaLoadingAnalyzerWithCharFilters() { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); assertTrue( schema.getFieldTypes().containsKey("charfilthtmlmap") ); + @SuppressWarnings({"unchecked"}) SimpleOrderedMap analyzerProps = (SimpleOrderedMap)schema.getFieldTypeByName("charfilthtmlmap") .getNamedPropertyValues(true).get("analyzer"); @@ -110,12 +114,14 @@ public void testSchemaLoadingClassAndNameTokenFilter() throws Exception { } private void checkTokenizerName(SimpleOrderedMap analyzerProps, String name) { + @SuppressWarnings({"unchecked"}) SimpleOrderedMap tokenizerProps = (SimpleOrderedMap)analyzerProps.get("tokenizer"); assertNull(tokenizerProps.get("class")); assertEquals(name, tokenizerProps.get("name")); } private void checkTokenFilterNames(SimpleOrderedMap analyzerProps, String[] names) { + @SuppressWarnings({"unchecked"}) List> tokenFilterProps = (List>)analyzerProps.get("filters"); assertEquals(names.length, tokenFilterProps.size()); for (int i = 0; i < names.length; i++) { @@ -125,6 +131,7 @@ private void checkTokenFilterNames(SimpleOrderedMap analyzerProps, Strin } private void checkCharFilterNames(SimpleOrderedMap analyzerProps, String[] names) { + @SuppressWarnings({"unchecked"}) List> charFilterProps = (List>)analyzerProps.get("charFilters"); assertEquals(names.length, charFilterProps.size()); for (int i = 0; i < names.length; i++) { diff --git a/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java b/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java index 5524a3130a98..c780d73454ae 100644 --- a/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java +++ b/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java @@ -202,6 +202,7 @@ public void testShapeToFromStringWKT() throws Exception { setupRPTField("miles", "true", "WKT", random().nextBoolean() ? new SpatialRecursivePrefixTreeFieldType() : new RptWithGeometrySpatialField()); + @SuppressWarnings({"rawtypes"}) AbstractSpatialFieldType ftype = (AbstractSpatialFieldType) h.getCore().getLatestSchema().getField("geo").getType(); @@ -220,6 +221,7 @@ public void testShapeToFromStringGeoJSON() throws Exception { setupRPTField("miles", "true", "GeoJSON", random().nextBoolean() ? new SpatialRecursivePrefixTreeFieldType() : new RptWithGeometrySpatialField()); + @SuppressWarnings({"rawtypes"}) AbstractSpatialFieldType ftype = (AbstractSpatialFieldType) h.getCore().getLatestSchema().getField("geo").getType(); diff --git a/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java b/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java index b55e6dbc0b40..df196d759d0f 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java +++ b/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java @@ -53,11 +53,13 @@ protected String getCloudSolrConfig() { } @Test + @SuppressWarnings({"unchecked"}) public void test() throws Exception { final int threadCount = 5; setupRestTestHarnesses(); Thread[] threads = new Thread[threadCount]; + @SuppressWarnings({"rawtypes"}) final List collectErrors = Collections.synchronizedList(new ArrayList<>()); for (int i = 0 ; i < threadCount ; i++) { @@ -65,6 +67,7 @@ public void test() throws Exception { threads[i] = new Thread(){ @Override public void run() { + @SuppressWarnings({"rawtypes"}) ArrayList errs = new ArrayList(); collectErrors.add(errs); try { @@ -84,16 +87,17 @@ public void run() { boolean success = true; - for (List e : collectErrors) { + for (@SuppressWarnings({"rawtypes"})List e : collectErrors) { if (e != null && !e.isEmpty()) { success = false; - log.error(e.toString()); + log.error("{}", e); } } assertTrue(collectErrors.toString(), success); } + @SuppressWarnings({"unchecked"}) private void invokeBulkAddCall(int seed, ArrayList errs) throws Exception { String payload = "{\n" + " 'add-field' : {\n" + @@ -130,6 +134,7 @@ private void invokeBulkAddCall(int seed, ArrayList errs) throws Exceptio RestTestHarness publisher = randomRestTestHarness(r); String response = publisher.post("/schema", SolrTestCaseJ4.json(payload)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) Utils.fromJSONString(response); Object errors = map.get("errors"); if (errors != null) { @@ -145,12 +150,14 @@ private void invokeBulkAddCall(int seed, ArrayList errs) throws Exceptio long maxTimeoutMillis = 100000; while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) { errmessages.clear(); + @SuppressWarnings({"rawtypes"}) Map m = getObj(harness, aField, "fields"); if (m == null) errmessages.add(StrUtils.formatString("field {0} not created", aField)); m = getObj(harness, dynamicFldName, "dynamicFields"); if (m == null) errmessages.add(StrUtils.formatString("dynamic field {0} not created", dynamicFldName)); + @SuppressWarnings({"rawtypes"}) List l = getSourceCopyFields(harness, aField); if (!checkCopyField(l, aField, dynamicCopyFldDest)) errmessages.add(StrUtils.formatString("CopyField source={0},dest={1} not created", aField, dynamicCopyFldDest)); @@ -170,6 +177,7 @@ private void invokeBulkAddCall(int seed, ArrayList errs) throws Exceptio } } + @SuppressWarnings({"unchecked"}) private void invokeBulkReplaceCall(int seed, ArrayList errs) throws Exception { String payload = "{\n" + " 'replace-field' : {\n" + @@ -200,6 +208,7 @@ private void invokeBulkReplaceCall(int seed, ArrayList errs) throws Exce RestTestHarness publisher = randomRestTestHarness(r); String response = publisher.post("/schema", SolrTestCaseJ4.json(payload)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) Utils.fromJSONString(response); Object errors = map.get("errors"); if (errors != null) { @@ -215,12 +224,14 @@ private void invokeBulkReplaceCall(int seed, ArrayList errs) throws Exce long maxTimeoutMillis = 100000; while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) { errmessages.clear(); + @SuppressWarnings({"rawtypes"}) Map m = getObj(harness, aField, "fields"); if (m == null) errmessages.add(StrUtils.formatString("field {0} no longer present", aField)); m = getObj(harness, dynamicFldName, "dynamicFields"); if (m == null) errmessages.add(StrUtils.formatString("dynamic field {0} no longer present", dynamicFldName)); + @SuppressWarnings({"rawtypes"}) List l = getSourceCopyFields(harness, aField); if (!checkCopyField(l, aField, dynamicCopyFldDest)) errmessages.add(StrUtils.formatString("CopyField source={0},dest={1} no longer present", aField, dynamicCopyFldDest)); @@ -240,6 +251,7 @@ private void invokeBulkReplaceCall(int seed, ArrayList errs) throws Exce } } + @SuppressWarnings({"unchecked"}) private void invokeBulkDeleteCall(int seed, ArrayList errs) throws Exception { String payload = "{\n" + " 'delete-copy-field' : {\n" + @@ -262,6 +274,7 @@ private void invokeBulkDeleteCall(int seed, ArrayList errs) throws Excep RestTestHarness publisher = randomRestTestHarness(r); String response = publisher.post("/schema", SolrTestCaseJ4.json(payload)); + @SuppressWarnings({"rawtypes"}) Map map = (Map) Utils.fromJSONString(response); Object errors = map.get("errors"); if (errors != null) { @@ -277,12 +290,14 @@ private void invokeBulkDeleteCall(int seed, ArrayList errs) throws Excep long maxTimeoutMillis = 100000; while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) { errmessages.clear(); + @SuppressWarnings({"rawtypes"}) Map m = getObj(harness, aField, "fields"); if (m != null) errmessages.add(StrUtils.formatString("field {0} still exists", aField)); m = getObj(harness, dynamicFldName, "dynamicFields"); if (m != null) errmessages.add(StrUtils.formatString("dynamic field {0} still exists", dynamicFldName)); + @SuppressWarnings({"rawtypes"}) List l = getSourceCopyFields(harness, aField); if (checkCopyField(l, aField, dynamicCopyFldDest)) errmessages.add(StrUtils.formatString("CopyField source={0},dest={1} still exists", aField, dynamicCopyFldDest)); @@ -302,9 +317,9 @@ private void invokeBulkDeleteCall(int seed, ArrayList errs) throws Excep } } - private boolean checkCopyField(List l, String src, String dest) { + private boolean checkCopyField(@SuppressWarnings({"rawtypes"})List l, String src, String dest) { if (l == null) return false; - for (Map map : l) { + for (@SuppressWarnings({"rawtypes"})Map map : l) { if (src.equals(map.get("source")) && dest.equals(map.get("dest"))) return true; } diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java index 2427f19f48d2..085ba1ad75c0 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java +++ b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java @@ -59,9 +59,12 @@ public void test() throws Exception { String previousBaseURL = client.getBaseURL(); // Strip /collection1 step from baseURL - requests fail otherwise client.setBaseURL(previousBaseURL.substring(0, previousBaseURL.lastIndexOf("/"))); + @SuppressWarnings({"rawtypes"}) NamedList namedListResponse = client.request(request); client.setBaseURL(previousBaseURL); // Restore baseURL + @SuppressWarnings({"rawtypes"}) NamedList status = (NamedList)namedListResponse.get("status"); + @SuppressWarnings({"rawtypes"}) NamedList collectionStatus = (NamedList)status.getVal(0); String collectionSchema = (String)collectionStatus.get(CoreAdminParams.SCHEMA); // Make sure the upgrade to managed schema happened diff --git a/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java b/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java index ae8dd4ecb530..f0ccd99e49ce 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java +++ b/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java @@ -145,8 +145,11 @@ private void assertSchemaResource(String collection, String expectedSchemaResour SolrQueryResponse response = new SolrQueryResponse(); admin.handleRequestBody(request, response); assertNull("Exception on create", response.getException()); + @SuppressWarnings({"rawtypes"}) NamedList responseValues = response.getValues(); + @SuppressWarnings({"rawtypes"}) NamedList status = (NamedList)responseValues.get("status"); + @SuppressWarnings({"rawtypes"}) NamedList collectionStatus = (NamedList)status.get(collection); String collectionSchema = (String)collectionStatus.get(CoreAdminParams.SCHEMA); assertEquals("Schema resource name differs from expected name", expectedSchemaResource, collectionSchema); diff --git a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java index 6635764d0dbd..a2375bafd957 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java +++ b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java @@ -97,7 +97,7 @@ private void addStringField(String fieldName, String collection, CloudSolrClient assertEquals(0, addFieldResponse.getStatus()); assertNull(addFieldResponse.getResponse().get("errors")); - log.info("added new field="+fieldName); + log.info("added new field={}", fieldName); } private void testModifyField(String collection) throws IOException, SolrServerException { diff --git a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java index 811680a7356c..c6113595d039 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java +++ b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java @@ -171,6 +171,7 @@ private void perhapsExpired() throws SessionExpiredException { return zkController; } + @SuppressWarnings({"rawtypes"}) private Runnable indexSchemaLoader(String configsetName, final ZkController zkController) { return () -> { try { diff --git a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java index 7758991dafb9..a47911522ed0 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java +++ b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java @@ -3856,6 +3856,7 @@ public void doTestReturnNonStored(final String fieldName, boolean shouldReturnFi public void testWhiteboxCreateFields() throws Exception { String[] typeNames = new String[]{"i", "l", "f", "d", "dt"}; + @SuppressWarnings({"rawtypes"}) Class[] expectedClasses = new Class[]{IntPoint.class, LongPoint.class, FloatPoint.class, DoublePoint.class, LongPoint.class}; Date dateToTest = new Date(); diff --git a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored2.java b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored2.java index 1e38669fa587..41e550454347 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored2.java +++ b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored2.java @@ -89,6 +89,7 @@ public void testSchemaAPI() throws Exception { String response = harness.post("/schema", json(payload)); + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSONString(response); assertNull(response, m.get("errors")); diff --git a/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java b/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java index 0bc140bb1507..94d90e0fc462 100644 --- a/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java +++ b/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java @@ -96,7 +96,9 @@ public void test() throws Exception { } private void assertCountOnly(QueryResponse rsp, int count) throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList response = rsp.getResponse(); + @SuppressWarnings({"rawtypes"}) NamedList analytics = (NamedList)response.get("analytics"); Integer c = (Integer)analytics.get("mycount"); if(c.intValue() != count) { @@ -105,7 +107,9 @@ private void assertCountOnly(QueryResponse rsp, int count) throws Exception { } private void assertCount(QueryResponse rsp, int count) throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList response = rsp.getResponse(); + @SuppressWarnings({"rawtypes"}) NamedList analytics = (NamedList)response.get("analytics"); Integer c = (Integer)analytics.get("mycount"); if(c.intValue() != count) { diff --git a/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java index 158df1722d1e..b2a733f8f2a9 100644 --- a/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java @@ -87,7 +87,9 @@ public void collect(int doc) throws IOException { leafDelegate.collect(doc); } + @SuppressWarnings({"unchecked"}) public void finish() throws IOException { + @SuppressWarnings({"rawtypes"}) NamedList analytics = new NamedList(); rb.rsp.add("analytics", analytics); analytics.add("mycount", count+base); @@ -114,12 +116,16 @@ public int getCost() { public void handleMergeFields(ResponseBuilder rb, SolrIndexSearcher searcher) { } + @SuppressWarnings({"unchecked"}) public void merge(ResponseBuilder rb, ShardRequest shardRequest) { int count = 0; + @SuppressWarnings({"rawtypes"}) NamedList merged = new NamedList(); for(ShardResponse shardResponse : shardRequest.responses) { + @SuppressWarnings({"rawtypes"}) NamedList response = shardResponse.getSolrResponse().getResponse(); + @SuppressWarnings({"rawtypes"}) NamedList analytics = (NamedList)response.get("analytics"); Integer c = (Integer)analytics.get("mycount"); count += c.intValue(); @@ -132,10 +138,13 @@ public void merge(ResponseBuilder rb, ShardRequest shardRequest) { static class TestIterative extends IterativeMergeStrategy { + @SuppressWarnings({"unchecked"}) public void process(ResponseBuilder rb, ShardRequest sreq) throws Exception { int count = 0; for(ShardResponse shardResponse : sreq.responses) { + @SuppressWarnings({"rawtypes"}) NamedList response = shardResponse.getSolrResponse().getResponse(); + @SuppressWarnings({"rawtypes"}) NamedList analytics = (NamedList)response.get("analytics"); Integer c = (Integer)analytics.get("mycount"); count += c.intValue(); @@ -158,11 +167,13 @@ public void process(ResponseBuilder rb, ShardRequest sreq) throws Exception { for(Future future : futures) { QueryResponse response = future.get().getResponse(); + @SuppressWarnings({"rawtypes"}) NamedList analytics = (NamedList)response.getResponse().get("analytics"); Integer c = (Integer)analytics.get("mycount"); nextCount += c.intValue(); } + @SuppressWarnings({"rawtypes"}) NamedList merged = new NamedList(); merged.add("mycount", nextCount); rb.rsp.add("analytics", merged); diff --git a/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java b/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java index 2afe4970eb9e..b0c824b9aaa1 100644 --- a/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java +++ b/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java @@ -135,8 +135,10 @@ public void testSimpleRangeFacetsOfSymetricRates() throws Exception { final String gap = args.get(4); final String end = args.get(5); + @SuppressWarnings({"rawtypes"}) final List range_facets = rsp.getFacetRanges(); assertEquals(1, range_facets.size()); + @SuppressWarnings({"rawtypes"}) final RangeFacet result = range_facets.get(0); assertEquals(FIELD, result.getName()); assertEquals(start, result.getStart()); @@ -146,6 +148,7 @@ public void testSimpleRangeFacetsOfSymetricRates() throws Exception { assertEquals(3, result.getAfter()); assertEquals(9, result.getBetween()); + @SuppressWarnings({"unchecked"}) List counts = result.getCounts(); if (use_mincount) { assertEquals(3, counts.size()); @@ -174,17 +177,20 @@ public void testSimpleRangeFacetsOfSymetricRates() throws Exception { try { assertEquals(NUM_DOCS, rsp.getResults().getNumFound()); + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); assertEquals("before", 3L, ((NamedList)foo.get("before")).get("count")); assertEquals("after", 3L, ((NamedList)foo.get("after")).get("count")); assertEquals("between", 9L, ((NamedList)foo.get("between")).get("count")); + @SuppressWarnings({"unchecked", "rawtypes"}) final List buckets = (List) foo.get("buckets"); if (use_mincount) { assertEquals(3, buckets.size()); for (int i = 0; i < 3; i++) { + @SuppressWarnings({"rawtypes"}) NamedList bucket = buckets.get(i); assertEquals((4 + (3 * i)) + ".00,USD", bucket.get("val")); assertEquals("bucket #" + i, 3L, bucket.get("count")); @@ -192,6 +198,7 @@ public void testSimpleRangeFacetsOfSymetricRates() throws Exception { } else { assertEquals(7, buckets.size()); for (int i = 0; i < 7; i++) { + @SuppressWarnings({"rawtypes"}) NamedList bucket = buckets.get(i); assertEquals((4 + i) + ".00,USD", bucket.get("val")); assertEquals("bucket #" + i, (i == 0 || i == 3 || i == 6) ? 3L : 0L, bucket.get("count")); @@ -219,8 +226,10 @@ public void testFacetRangeOfAsymetricRates() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { assertEquals(NUM_DOCS, rsp.getResults().getNumFound()); + @SuppressWarnings({"rawtypes"}) final List range_facets = rsp.getFacetRanges(); assertEquals(1, range_facets.size()); + @SuppressWarnings({"rawtypes"}) final RangeFacet result = range_facets.get(0); assertEquals(FIELD, result.getName()); assertEquals("8.00,EUR", result.getStart()); @@ -230,6 +239,7 @@ public void testFacetRangeOfAsymetricRates() throws Exception { assertEquals(3, result.getAfter()); assertEquals(6, result.getBetween()); + @SuppressWarnings({"unchecked"}) List counts = result.getCounts(); if (use_mincount) { assertEquals(2, counts.size()); @@ -266,17 +276,20 @@ public void testJsonFacetRangeOfAsymetricRates() throws Exception { try { assertEquals(NUM_DOCS, rsp.getResults().getNumFound()); + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); assertEquals("before", 6L, ((NamedList)foo.get("before")).get("count")); assertEquals("after", 3L, ((NamedList)foo.get("after")).get("count")); assertEquals("between", 6L, ((NamedList)foo.get("between")).get("count")); + @SuppressWarnings({"unchecked", "rawtypes"}) final List buckets = (List) foo.get("buckets"); if (use_mincount) { assertEquals(2, buckets.size()); for (int i = 0; i < 2; i++) { + @SuppressWarnings({"rawtypes"}) NamedList bucket = buckets.get(i); assertEquals((12 + (i * 2)) + ".00,EUR", bucket.get("val")); assertEquals("bucket #" + i, 3L, bucket.get("count")); @@ -284,6 +297,7 @@ public void testJsonFacetRangeOfAsymetricRates() throws Exception { } else { assertEquals(7, buckets.size()); for (int i = 0; i < 7; i++) { + @SuppressWarnings({"rawtypes"}) NamedList bucket = buckets.get(i); assertEquals((8 + (i * 2)) + ".00,EUR", bucket.get("val")); assertEquals("bucket #" + i, (i == 2 || i == 3) ? 3L : 0L, bucket.get("count")); @@ -357,10 +371,15 @@ public void testJsonRangeFacetWithSubFacet() throws Exception { // this top level result count sanity check that should vary based on how we are filtering our facets... assertEquals(use_domain ? 15 : 10, rsp.getResults().getNumFound()); + @SuppressWarnings({"unchecked"}) final NamedList bar = ((NamedList>)rsp.getResponse().get("facets")).get("bar"); + @SuppressWarnings({"unchecked"}) final List> bar_buckets = (List>) bar.get("buckets"); + @SuppressWarnings({"unchecked"}) final NamedList before = (NamedList) bar.get("before"); + @SuppressWarnings({"unchecked"}) final NamedList between = (NamedList) bar.get("between"); + @SuppressWarnings({"unchecked"}) final NamedList after = (NamedList) bar.get("after"); // sanity check our high level expectations... @@ -379,6 +398,7 @@ public void testJsonRangeFacetWithSubFacet() throws Exception { final NamedList bucket = bar_buckets.get(i); assertEquals((i * 10) + ".00,EUR", bucket.get("val")); assertEquals("bucket #" + i, 4L, bucket.get("count")); + @SuppressWarnings({"unchecked"}) final List> foo_buckets = ((NamedList>>)bucket.get("foo")).get("buckets"); assertEquals("bucket #" + i + " foo num buckets", 2, foo_buckets.size()); assertEquals("bucket #" + i + " foo top term", (0==i ? "x2" : "x0"), foo_buckets.get(0).get("val")); @@ -390,6 +410,7 @@ public void testJsonRangeFacetWithSubFacet() throws Exception { } { // between... + @SuppressWarnings({"unchecked"}) final List> buckets = ((NamedList>>)between.get("foo")).get("buckets"); assertEquals("between num buckets", 2, buckets.size()); // the counts should both be 3, and the term order should break the tie... @@ -400,6 +421,7 @@ public void testJsonRangeFacetWithSubFacet() throws Exception { } { // after... + @SuppressWarnings({"unchecked"}) final List> buckets = ((NamedList>>)after.get("foo")).get("buckets"); assertEquals("after num buckets", 2, buckets.size()); // the counts should both be 1, and the term order should break the tie... @@ -441,16 +463,19 @@ public void testJsonRangeFacetAsSubFacet() throws Exception { // this top level result count sanity check that should vary based on how we are filtering our facets... assertEquals(use_domain ? 15 : 11, rsp.getResults().getNumFound()); + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); // sanity check... // because of the facet limit, foo should only have 1 bucket // because of the fq, the val should be "x2" and the count=5 + @SuppressWarnings({"unchecked"}) final List> foo_buckets = (List>) foo.get("buckets"); assertEquals(1, foo_buckets.size()); assertEquals("x2", foo_buckets.get(0).get("val")); assertEquals("foo bucket count", 5L, foo_buckets.get(0).get("count")); + @SuppressWarnings({"unchecked"}) final NamedList bar = (NamedList)foo_buckets.get(0).get("bar"); // these are the 'x2' specific counts, based on our fq... @@ -459,9 +484,11 @@ public void testJsonRangeFacetAsSubFacet() throws Exception { assertEquals("after", 1L, ((NamedList)bar.get("after")).get("count")); assertEquals("between", 2L, ((NamedList)bar.get("between")).get("count")); + @SuppressWarnings({"unchecked", "rawtypes"}) final List buckets = (List) bar.get("buckets"); assertEquals(7, buckets.size()); for (int i = 0; i < 7; i++) { + @SuppressWarnings({"rawtypes"}) NamedList bucket = buckets.get(i); assertEquals((8 + (i * 2)) + ".00,EUR", bucket.get("val")); // 12,EUR & 15,EUR are the 2 values that align with x2 docs diff --git a/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java b/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java index 89a40a9a49ca..48378f971a67 100644 --- a/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java +++ b/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java @@ -36,6 +36,7 @@ public class LargeFieldTest extends SolrTestCaseJ4 { private static final String BIG_FIELD = "bigField"; @BeforeClass + @SuppressWarnings({"unchecked"}) public static void initManagedSchemaCore() throws Exception { // This testing approach means no schema file or per-test temp solr-home! System.setProperty("managed.schema.mutable", "true"); diff --git a/solr/core/src/test/org/apache/solr/search/MergeStrategyTest.java b/solr/core/src/test/org/apache/solr/search/MergeStrategyTest.java index ff73925b31c4..d7bb4d899d7a 100644 --- a/solr/core/src/test/org/apache/solr/search/MergeStrategyTest.java +++ b/solr/core/src/test/org/apache/solr/search/MergeStrategyTest.java @@ -47,6 +47,7 @@ public static void setUpBeforeClass() throws Exception { @Test @ShardsFixed(num = 3) + @SuppressWarnings({"unchecked"}) public void test() throws Exception { del("*:*"); diff --git a/solr/core/src/test/org/apache/solr/search/MockSearchComponent.java b/solr/core/src/test/org/apache/solr/search/MockSearchComponent.java index 874b21a8c6b7..e401f3c7e194 100644 --- a/solr/core/src/test/org/apache/solr/search/MockSearchComponent.java +++ b/solr/core/src/test/org/apache/solr/search/MockSearchComponent.java @@ -27,7 +27,7 @@ public class MockSearchComponent extends SearchComponent { private String testParam = null; @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { super.init(args); testParam = (String) args.get("testParam"); } diff --git a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java index 00108d7e6ed6..a44d51d7b92a 100644 --- a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java +++ b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java @@ -21,7 +21,6 @@ import java.util.Map; import java.util.Set; -import junit.framework.AssertionFailedError; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryUtils; import org.apache.solr.SolrTestCaseJ4; @@ -290,6 +289,7 @@ public void testQueryTerm() throws Exception { } } + @SuppressWarnings({"unchecked"}) public void testQueryCollapse() throws Exception { SolrQueryRequest req = req("myField","foo_s1", "g_sort","foo_s1 asc, foo_i desc"); @@ -314,7 +314,9 @@ public void testQueryCollapse() throws Exception { "{!collapse field=$myField max=a nullPolicy=expand}"); //Add boosted documents to the request context. + @SuppressWarnings({"rawtypes"}) Map context = req.getContext(); + @SuppressWarnings({"rawtypes"}) Set boosted = new HashSet(); boosted.add("doc1"); boosted.add("doc2"); @@ -1241,7 +1243,7 @@ public void testChildField() throws Exception { public void testPayloadScoreQuery() throws Exception { // There was a bug with PayloadScoreQuery's .equals() method that said two queries were equal with different includeSpanScore settings - expectThrows(AssertionFailedError.class, "queries should not have been equal", + expectThrows(AssertionError.class, "queries should not have been equal", () -> assertQueryEquals ("payload_score" , "{!payload_score f=foo_dpf v=query func=min includeSpanScore=false}" @@ -1251,7 +1253,7 @@ public void testPayloadScoreQuery() throws Exception { } public void testPayloadCheckQuery() throws Exception { - expectThrows(AssertionFailedError.class, "queries should not have been equal", + expectThrows(AssertionError.class, "queries should not have been equal", () -> assertQueryEquals ("payload_check" , "{!payload_check f=foo_dpf payloads=2}one" @@ -1282,7 +1284,7 @@ public void testBoolQuery() throws Exception { "{!bool must='{!lucene}foo_s:c' filter='{!lucene}foo_s:d' " + "must_not='{!lucene}foo_s:a' should='{!lucene}foo_s:b' filter='{!lucene}foo_s:e'}"); - expectThrows(AssertionFailedError.class, "queries should not have been equal", + expectThrows(AssertionError.class, "queries should not have been equal", () -> assertQueryEquals ("bool" , "{!bool must='{!lucene}foo_s:a'}" diff --git a/solr/core/src/test/org/apache/solr/search/RankQueryTestPlugin.java b/solr/core/src/test/org/apache/solr/search/RankQueryTestPlugin.java index 102488313cd4..120847bf3f01 100644 --- a/solr/core/src/test/org/apache/solr/search/RankQueryTestPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/RankQueryTestPlugin.java @@ -132,6 +132,7 @@ public TestRankQuery(int collector, int mergeStrategy) { this.mergeStrategy = mergeStrategy; } + @SuppressWarnings({"rawtypes"}) public TopDocsCollector getTopDocsCollector(int len, QueryCommand cmd, IndexSearcher searcher) { if(collector == 0) return new TestCollector(null); @@ -165,6 +166,7 @@ public void handleMergeFields(ResponseBuilder rb, SolrIndexSearcher searcher) { } + @SuppressWarnings({"unchecked"}) public void merge(ResponseBuilder rb, ShardRequest sreq) { // id to shard mapping, to eliminate any accidental dups @@ -183,7 +185,7 @@ public void merge(ResponseBuilder rb, ShardRequest sreq) { long numFound = 0; Float maxScore=null; boolean partialResults = false; - List shardDocs = new ArrayList(); + List shardDocs = new ArrayList<>(); for (ShardResponse srsp : sreq.responses) { SolrDocumentList docs = null; @@ -460,6 +462,7 @@ public float score() { } } + @SuppressWarnings({"unchecked"}) public void merge(ResponseBuilder rb, ShardRequest sreq) { // id to shard mapping, to eliminate any accidental dups @@ -478,7 +481,7 @@ public void merge(ResponseBuilder rb, ShardRequest sreq) { long numFound = 0; Float maxScore=null; boolean partialResults = false; - List shardDocs = new ArrayList(); + List shardDocs = new ArrayList<>(); for (ShardResponse srsp : sreq.responses) { SolrDocumentList docs = null; @@ -535,8 +538,11 @@ public void merge(ResponseBuilder rb, ShardRequest sreq) { SortSpec ss = rb.getSortSpec(); Sort sort = ss.getSort(); + @SuppressWarnings({"rawtypes"}) NamedList sortFieldValues = (NamedList)(srsp.getSolrResponse().getResponse().get("merge_values")); + @SuppressWarnings({"rawtypes"}) NamedList unmarshalledSortFieldValues = unmarshalSortValues(ss, sortFieldValues, schema); + @SuppressWarnings({"rawtypes"}) List lst = (List)unmarshalledSortFieldValues.getVal(0); for (int i=0; i list = new ArrayList(); + private List list = new ArrayList<>(); - public TestCollector(PriorityQueue pq) { + @SuppressWarnings({"unchecked"}) + public TestCollector(@SuppressWarnings({"rawtypes"})PriorityQueue pq) { super(pq); } @@ -690,6 +699,7 @@ public int topDocsSize() { return list.size(); } + @SuppressWarnings({"unchecked", "rawtypes"}) public TopDocs topDocs() { Collections.sort(list, new Comparator() { public int compare(Object o1, Object o2) { @@ -722,11 +732,13 @@ public ScoreMode scoreMode() { } } + @SuppressWarnings({"rawtypes"}) static class TestCollector1 extends TopDocsCollector { - private List list = new ArrayList(); + private List list = new ArrayList<>(); - public TestCollector1(PriorityQueue pq) { + @SuppressWarnings({"unchecked"}) + public TestCollector1(@SuppressWarnings({"rawtypes"})PriorityQueue pq) { super(pq); } @@ -752,6 +764,7 @@ public int topDocsSize() { return list.size(); } + @SuppressWarnings({"unchecked", "rawtypes"}) public TopDocs topDocs() { Collections.sort(list, new Comparator() { public int compare(Object o1, Object o2) { diff --git a/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java b/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java new file mode 100644 index 000000000000..d1f557e0a833 --- /dev/null +++ b/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.search; + +import java.io.IOException; + +import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.search.Weight; +import org.apache.solr.SolrTestCaseJ4; +import org.junit.Before; +import org.junit.BeforeClass; + +public class SolrIndexSearcherTest extends SolrTestCaseJ4 { + + private final static int NUM_DOCS = 20; + + @BeforeClass + public static void setUpClass() throws Exception { + initCore("solrconfig.xml", "schema.xml"); + for (int i = 0 ; i < NUM_DOCS ; i ++) { + assertU(adoc("id", String.valueOf(i), + "field1_s", "foo", + "field2_s", String.valueOf(i % 2), + "field3_i_dvo", String.valueOf(i), + "field4_t", numbersTo(i))); + assertU(commit()); //commit inside the loop to get multiple segments + } + } + + private static String numbersTo(int i) { + StringBuilder numbers = new StringBuilder(); + for (int j = 0; j <= i ; j++) { + numbers.append(String.valueOf(j) + " "); + } + return numbers.toString(); + } + + @Before + public void setUp() throws Exception { + assertU(adoc("id", "1", + "field1_s", "foo", + "field2_s", "1", + "field3_i_dvo", "1", + "field4_t", numbersTo(1))); + assertU(commit()); + super.setUp(); + } + + public void testMinExactCountLongValue() { + assertQ("test query on empty index", + req("q", "field1_s:foo", + "minExactCount", Long.toString(10L * Integer.MAX_VALUE), + "rows", "2") + ,"//*[@numFoundExact='true']" + ,"//*[@numFound='" + NUM_DOCS + "']" + ); + } + + public void testMinExactCount() { + assertQ("minExactCount is lower than numFound,should produce approximated results", + req("q", "field1_s:foo", + "minExactCount", "2", + "rows", "2") + ,"//*[@numFoundExact='false']" + ,"//*[@numFound<='" + NUM_DOCS + "']" + ); + assertQ("minExactCount is higher than numFound,should produce exact results", + req("q", "field1_s:foo", + "minExactCount", "200", + "rows", "2") + ,"//*[@numFoundExact='true']" + ,"//*[@numFound='" + NUM_DOCS + "']" + ); + } + + private void assertMatchesEqual(int expectedCount, SolrIndexSearcher searcher, QueryCommand cmd) throws IOException { + QueryResult qr = new QueryResult(); + searcher.search(qr, cmd); + assertEquals(expectedCount, qr.getDocList().matches()); + assertEquals(TotalHits.Relation.EQUAL_TO, qr.getDocList().hitCountRelation()); + } + + private QueryResult assertMatchesGreaterThan(int expectedCount, SolrIndexSearcher searcher, QueryCommand cmd) throws IOException { + QueryResult qr = new QueryResult(); + searcher.search(qr, cmd); + assertTrue("Expecting returned matches to be greater than " + expectedCount + " but got " + qr.getDocList().matches(), + expectedCount >= qr.getDocList().matches()); + assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, qr.getDocList().hitCountRelation()); + return qr; + } + + public void testLowMinExactCountGeneratesApproximation() throws IOException { + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(NUM_DOCS / 2, 10, "field1_s", "foo"); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + return null; + }); + + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(1, 1, "field2_s", "1"); + assertMatchesGreaterThan(NUM_DOCS/2, searcher, cmd); + return null; + }); + } + + public void testHighMinExactCountGeneratesExactCount() throws IOException { + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 10, "field1_s", "foo"); + assertMatchesEqual(NUM_DOCS, searcher, cmd); + return null; + }); + + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 10, "field2_s", "1"); + assertMatchesEqual(NUM_DOCS/2, searcher, cmd); + return null; + }); + } + + + + public void testLowMinExactCountWithQueryResultCache() throws IOException { + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(NUM_DOCS / 2, 10, "field1_s", "foo"); + cmd.clearFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE); + searcher.search(new QueryResult(), cmd); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + return null; + }); + } + + public void testHighMinExactCountWithQueryResultCache() throws IOException { + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 2, "field1_s", "foo"); + cmd.clearFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE); + searcher.search(new QueryResult(), cmd); + assertMatchesEqual(NUM_DOCS, searcher, cmd); + return null; + }); + } + + public void testMinExactCountMoreRows() throws IOException { + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(2, NUM_DOCS, "field1_s", "foo"); + assertMatchesEqual(NUM_DOCS, searcher, cmd); + return null; + }); + } + + public void testMinExactCountMatchWithDocSet() throws IOException { + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(2, 2, "field1_s", "foo"); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + + cmd.setNeedDocSet(true); + assertMatchesEqual(NUM_DOCS, searcher, cmd); + return null; + }); + } + + public void testMinExactCountWithMaxScoreRequested() throws IOException { + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(2, 2, "field1_s", "foo"); + cmd.setFlags(SolrIndexSearcher.GET_SCORES); + QueryResult qr = assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + assertNotEquals(Float.NaN, qr.getDocList().maxScore()); + return null; + }); + } + + public void testMinExactWithFilters() throws Exception { + + h.getCore().withSearcher(searcher -> { + //Sanity Check - No Filter + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + return null; + }); + + + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + Query filterQuery = new TermQuery(new Term("field4_t", "19")); + cmd.setFilterList(filterQuery); + assertNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); + assertMatchesEqual(1, searcher, cmd); + return null; + }); + } + + public void testMinExactWithPostFilters() throws Exception { + h.getCore().withSearcher(searcher -> { + //Sanity Check - No Filter + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + return null; + }); + + + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + MockPostFilter filterQuery = new MockPostFilter(1, 101); + cmd.setFilterList(filterQuery); + assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); + assertMatchesEqual(1, searcher, cmd); + return null; + }); + + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + MockPostFilter filterQuery = new MockPostFilter(100, 101); + cmd.setFilterList(filterQuery); + assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + return null; + }); + + } + + public void testMinExactWithPostFilterThatChangesScoreMode() throws Exception { + h.getCore().withSearcher(searcher -> { + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + // Use ScoreMode.COMPLETE for the PostFilter + MockPostFilter filterQuery = new MockPostFilter(100, 101, ScoreMode.COMPLETE); + cmd.setFilterList(filterQuery); + assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); + assertMatchesEqual(NUM_DOCS, searcher, cmd); + return null; + }); + } + + private QueryCommand createBasicQueryCommand(int minExactCount, int length, String field, String q) { + QueryCommand cmd = new QueryCommand(); + cmd.setMinExactCount(minExactCount); + cmd.setLen(length); + cmd.setFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE); + cmd.setQuery(new TermQuery(new Term(field, q))); + return cmd; + } + + private final static class MockPostFilter extends TermQuery implements PostFilter { + + private final int cost; + private final int maxDocsToCollect; + private final ScoreMode scoreMode; + + public MockPostFilter(int maxDocsToCollect, int cost, ScoreMode scoreMode) { + super(new Term("foo", "bar"));//The term won't really be used. just the collector + assert cost > 100; + this.cost = cost; + this.maxDocsToCollect = maxDocsToCollect; + this.scoreMode = scoreMode; + } + + public MockPostFilter(int maxDocsToCollect, int cost) { + this(maxDocsToCollect, cost, null); + } + + @Override + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + throw new UnsupportedOperationException("This class is only intended to be used as a PostFilter"); + } + + @Override + public boolean getCache() { + return false; + } + + @Override + public void setCache(boolean cache) {} + + @Override + public int getCost() { + return cost; + } + + @Override + public void setCost(int cost) {} + + @Override + public boolean getCacheSep() { + return false; + } + + @Override + public void setCacheSep(boolean cacheSep) { + } + + @Override + public DelegatingCollector getFilterCollector(IndexSearcher searcher) { + return new DelegatingCollector() { + private int collected = 0; + @Override + public void collect(int doc) throws IOException { + if (++collected <= maxDocsToCollect) { + super.collect(doc); + } + } + + @Override + public ScoreMode scoreMode() { + if (scoreMode != null) { + return scoreMode; + } + return super.scoreMode(); + } + }; + } + + } +} diff --git a/solr/core/src/test/org/apache/solr/search/TestAddFieldRealTimeGet.java b/solr/core/src/test/org/apache/solr/search/TestAddFieldRealTimeGet.java index 34f7cdec6d0b..aaa54406b8f2 100644 --- a/solr/core/src/test/org/apache/solr/search/TestAddFieldRealTimeGet.java +++ b/solr/core/src/test/org/apache/solr/search/TestAddFieldRealTimeGet.java @@ -59,11 +59,13 @@ public void test() throws Exception { String newFieldType = "string"; String newFieldValue = "xyz"; + ignoreException("unknown field"); assertFailedU("Should fail due to unknown field '" + newFieldName + "'", adoc("id", "1", newFieldName, newFieldValue)); + unIgnoreException("unknown field"); IndexSchema schema = h.getCore().getLatestSchema(); - SchemaField newField = schema.newField(newFieldName, newFieldType, Collections.emptyMap()); + SchemaField newField = schema.newField(newFieldName, newFieldType, Collections.emptyMap()); IndexSchema newSchema = schema.addField(newField); h.getCore().setLatestSchema(newSchema); @@ -74,7 +76,7 @@ public void test() throws Exception { assertJQ(req("qt","/get", "id","1", "fl","id,"+newFieldName), "=={'doc':{'id':'1'," + newFieldKeyValue + "}}"); assertJQ(req("qt","/get","ids","1", "fl","id,"+newFieldName), - "=={'response':{'numFound':1,'start':0,'docs':[{'id':'1'," + newFieldKeyValue + "}]}}"); + "=={'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'," + newFieldKeyValue + "}]}}"); assertU(commit()); @@ -83,6 +85,6 @@ public void test() throws Exception { assertJQ(req("qt","/get", "id","1", "fl","id,"+newFieldName), "=={'doc':{'id':'1'," + newFieldKeyValue + "}}"); assertJQ(req("qt","/get","ids","1", "fl","id,"+newFieldName), - "=={'response':{'numFound':1,'start':0,'docs':[{'id':'1'," + newFieldKeyValue + "}]}}"); + "=={'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'," + newFieldKeyValue + "}]}}"); } } diff --git a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java index 09444e1ae88a..cec08b57476c 100644 --- a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java @@ -283,8 +283,8 @@ public void testNeedsScoreBugFixed() throws Exception { @Test public void testMergeBoost() throws Exception { - Set boosted = new HashSet(); - Set results = new HashSet(); + Set boosted = new HashSet<>(); + Set results = new HashSet<>(); for(int i=0; i<200; i++) { boosted.add(random().nextInt(1000)); @@ -314,7 +314,7 @@ public void testMergeBoost() throws Exception { CollapsingQParserPlugin.MergeBoost mergeBoost = new CollapsingQParserPlugin.MergeBoost(boostedArray); - List boostedResults = new ArrayList(); + List boostedResults = new ArrayList<>(); for(int i=0; i controlResults = new ArrayList(); + List controlResults = new ArrayList<>(); for(int i=0; i h.query(req("uf", "fl=trait*,id", "defType", "edismax"))); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); + assertEquals("dynamic field name must start or end with *", + exception.getMessage()); + } finally { + resetExceptionIgnores(); + } + + // simple test to validate dynamic uf parsing works + assertQ(req("uf", "trait* id", "defType", "edismax")); + } + public void testCyclicAliasing() throws Exception { try { ignoreException(".*Field aliases lead to a cycle.*"); @@ -1220,12 +1238,8 @@ public void testMinShouldMatchOptional() throws Exception { "defType", "edismax") , "*[count(//doc)=4]"); - assertQ("test minShouldMatch (top level optional terms only and sow=false)", - req("q", "stocks oil gold", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold))~1) - "qf", "text_sw", - "mm", "50%", - "sow", sow, - "defType", "edismax") + assertQ("test minShouldMatch (top level optional terms only) local mm=50%", + req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks oil gold'}") , "*[count(//doc)=4]"); assertQ("test minShouldMatch (top level optional and negative terms mm=50%)", @@ -1236,6 +1250,10 @@ public void testMinShouldMatchOptional() throws Exception { "defType", "edismax") , "*[count(//doc)=3]"); + assertQ("test minShouldMatch (top level optional and negative terms local mm=50%)", + req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks oil gold -stockade'}") + , "*[count(//doc)=3]"); + assertQ("test minShouldMatch (top level optional and negative terms mm=100%)", req("q", "stocks gold -stockade", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold) -(text_sw:stockad))~2) "qf", "text_sw", @@ -1244,6 +1262,10 @@ public void testMinShouldMatchOptional() throws Exception { "defType", "edismax") , "*[count(//doc)=1]"); + assertQ("test minShouldMatch (top level optional and negative terms local mm=100%)", + req("q", "{!edismax qf=text_sw mm=100% sow=" + sow + " v='stocks gold -stockade'}") + , "*[count(//doc)=1]"); + assertQ("test minShouldMatch (top level required terms only)", req("q", "stocks AND oil", // +(+(text_sw:stock) +(text_sw:oil)) "qf", "text_sw", @@ -1252,6 +1274,10 @@ public void testMinShouldMatchOptional() throws Exception { "defType", "edismax") , "*[count(//doc)=1]"); + assertQ("test minShouldMatch (top level required terms only) local mm=50%)", + req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks AND oil'}") + , "*[count(//doc)=1]"); + assertQ("test minShouldMatch (top level optional and required terms)", req("q", "oil gold +stocks", // +(((text_sw:oil) (text_sw:gold) +(text_sw:stock))~1) "qf", "text_sw", @@ -1260,6 +1286,10 @@ public void testMinShouldMatchOptional() throws Exception { "defType", "edismax") , "*[count(//doc)=3]"); + assertQ("test minShouldMatch (top level optional and required terms) local mm=50%)", + req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='oil gold +stocks'}") + , "*[count(//doc)=3]"); + assertQ("test minShouldMatch (top level optional with explicit OR and parens)", req("q", "(snake OR stocks) oil", "qf", "text_sw", @@ -1268,6 +1298,10 @@ public void testMinShouldMatchOptional() throws Exception { "defType", "edismax") , "*[count(//doc)=2]"); + assertQ("test minShouldMatch (top level optional with explicit OR and parens) local mm=100%)", + req("q", "{!edismax qf=text_sw mm=100% sow=" + sow + " v='(snake OR stocks) oil'}") + , "*[count(//doc)=2]"); + // The results for these two appear odd, but are correct as per BooleanQuery processing. // See: http://searchhub.org/2011/12/28/why-not-and-or-and-not/ // Non-parenthesis OR/AND precedence is not true to abstract boolean logic in solr when q.op = AND @@ -1280,6 +1314,11 @@ public void testMinShouldMatchOptional() throws Exception { "sow", sow, "defType", "edismax") , "*[count(//doc)=0]"); + + assertQ("test minShouldMatch (top level optional with explicit OR without parens) local mm=100%)", + req("q", "{!edismax qf=text_sw q.op=OR mm=100% sow=" + sow + " v='snake OR stocks oil'}") + , "*[count(//doc)=0]"); + assertQ("test minShouldMatch (top level optional with explicit OR without parens)", req("q", "snake OR stocks oil", "qf", "text_sw", @@ -1289,6 +1328,10 @@ public void testMinShouldMatchOptional() throws Exception { "defType", "edismax") , "*[count(//doc)=0]"); + assertQ("test minShouldMatch (top level optional with explicit OR without parens) local mm=100%)", + req("q", "{!edismax qf=text_sw q.op=AND mm=100% sow=" + sow + " v='snake OR stocks oil'}") + , "*[count(//doc)=0]"); + // SOLR-9174 assertQ("test minShouldMatch=1<-1 with explicit OR, one impossible clause, and no explicit q.op", req("q", "barbie OR (hair AND nonexistentword)", @@ -1297,6 +1340,10 @@ public void testMinShouldMatchOptional() throws Exception { "sow", sow, "defType", "edismax") , "*[count(//doc)=3]"); + + assertQ("test local minShouldMatch=1<-1 with explicit OR, one impossible clause, and no explicit q.op", + req("q", "{!edismax qf=text_sw mm=1<-1 sow=" + sow + " v='barbie OR (hair AND nonexistentword)'}") + , "*[count(//doc)=3]"); } } @@ -1772,6 +1819,7 @@ public void testOperatorsAndMultiWordSynonyms() throws Exception { , "/response/numFound==1" ); + @SuppressWarnings({"rawtypes"}) Map all = (Map) Utils.fromJSONString(h.query(req("q", "*:*", "rows", "0", "wt", "json"))); int totalDocs = Integer.parseInt(((Map)all.get("response")).get("numFound").toString()); int allDocsExceptOne = totalDocs - 1; diff --git a/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java index 6e529dcab942..97b995285f52 100644 --- a/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java @@ -20,6 +20,7 @@ import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.util.BaseTestHarness; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -65,7 +66,7 @@ public void testManyHashPartitions() throws Exception { params.add("partitionKeys", "a_i,a_s,a_i,a_s"); params.add("wt", "xml"); String response = h.query(req(params)); - h.validateXPath(response, "//*[@numFound='0']"); + BaseTestHarness.validateXPath(response, "//*[@numFound='0']"); params = new ModifiableSolrParams(); params.add("q", "*:*"); @@ -103,7 +104,7 @@ public void testHashPartitionWithEmptyValues() throws Exception { params.add("partitionKeys", "a_s"); params.add("wt", "xml"); String response = h.query(req(params)); - h.validateXPath(response, "//*[@numFound='4']"); + BaseTestHarness.validateXPath(response, "//*[@numFound='4']"); //Test with int hash params = new ModifiableSolrParams(); @@ -112,11 +113,12 @@ public void testHashPartitionWithEmptyValues() throws Exception { params.add("partitionKeys", "a_i"); params.add("wt", "xml"); response = h.query(req(params)); - h.validateXPath(response, "//*[@numFound='4']"); + BaseTestHarness.validateXPath(response, "//*[@numFound='4']"); } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testHashPartition() throws Exception { @@ -153,7 +155,7 @@ public void testHashPartition() throws Exception { while(it.hasNext()) { String s = it.next(); - String results = h.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); + String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); if(results == null) { set1.add(s); } @@ -172,7 +174,7 @@ public void testHashPartition() throws Exception { while(it.hasNext()) { String s = it.next(); - String results = h.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); + String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); if(results == null) { set2.add(s); } @@ -192,7 +194,7 @@ public void testHashPartition() throws Exception { while(it.hasNext()) { String s = it.next(); - String results = h.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); + String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); if(results == null) { set3.add(s); } @@ -223,7 +225,7 @@ public void testHashPartition() throws Exception { while(it.hasNext()) { String s = it.next(); - String results = h.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); + String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); if(results == null) { set1.add(s); } @@ -242,7 +244,7 @@ public void testHashPartition() throws Exception { while(it.hasNext()) { String s = it.next(); - String results = h.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); + String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); if(results == null) { set2.add(s); } @@ -270,7 +272,7 @@ public void testHashPartition() throws Exception { while(it.hasNext()) { String s = it.next(); - String results = h.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); + String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); if(results == null) { set1.add(s); } @@ -289,7 +291,7 @@ public void testHashPartition() throws Exception { while(it.hasNext()) { String s = it.next(); - String results = h.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); + String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); if(results == null) { set2.add(s); } @@ -302,7 +304,9 @@ public void testHashPartition() throws Exception { } - private void assertNoOverLap(Set setA, Set setB) throws Exception { + private void assertNoOverLap(@SuppressWarnings({"rawtypes"})Set setA, + @SuppressWarnings({"rawtypes"})Set setB) throws Exception { + @SuppressWarnings({"rawtypes"}) Iterator it = setA.iterator(); while(it.hasNext()) { Object o = it.next(); diff --git a/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java b/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java index 457129fd9542..a6c3a0e969bf 100644 --- a/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java +++ b/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java @@ -83,9 +83,11 @@ public void tearDown() throws Exception { assertU((commit())); } + @SuppressWarnings({"unchecked"}) private String getStringVal(SolrQueryRequest sqr, String field, int doc) throws IOException { SchemaField sf = sqr.getSchema().getField(field); ValueSource vs = sf.getType().getValueSource(sf, null); + @SuppressWarnings({"rawtypes"}) Map context = ValueSource.newContext(sqr.getSearcher()); vs.createWeight(context, sqr.getSearcher()); IndexReaderContext topReaderContext = sqr.getSearcher().getTopReaderContext(); @@ -138,6 +140,7 @@ public void testReopen() throws Exception { assertEquals(1, baseRefCount); Map metrics = h.getCore().getCoreMetricManager().getRegistry().getMetrics(); + @SuppressWarnings({"unchecked"}) Gauge g = (Gauge)metrics.get("SEARCHER.searcher.registeredAt"); Date sr3SearcherRegAt = g.getValue(); assertU(commit()); // nothing has changed @@ -247,6 +250,7 @@ private void createCoreAndValidateListeners(int numTimesCalled, int numTimesCall addDummyDoc(newCore); // Open a new searcher, this should call the newSearcherListeners + @SuppressWarnings({"rawtypes"}) Future[] future = new Future[1]; newCore.getSearcher(true, false, future); future[0].get(); @@ -435,7 +439,7 @@ static class MockSearcherListener implements SolrEventListener { static AtomicInteger numberOfTimesCalledFirstSearcher; @Override - public void init(NamedList args) {} + public void init(@SuppressWarnings({"rawtypes"})NamedList args) {} @Override public void postCommit() {} @@ -459,7 +463,7 @@ static class SlowSearcherListener implements SolrEventListener { static CountDownLatch latch; @Override - public void init(NamedList args) {} + public void init(@SuppressWarnings({"rawtypes"})NamedList args) {} @Override public void postCommit() {} diff --git a/solr/core/src/test/org/apache/solr/search/TestMaxScoreQueryParser.java b/solr/core/src/test/org/apache/solr/search/TestMaxScoreQueryParser.java index 1800ddb2f85b..3f99fa641586 100644 --- a/solr/core/src/test/org/apache/solr/search/TestMaxScoreQueryParser.java +++ b/solr/core/src/test/org/apache/solr/search/TestMaxScoreQueryParser.java @@ -48,6 +48,7 @@ public void testFallbackToLucene() { assertEquals(new BoostQuery(new TermQuery(new Term("text", "foo")), 3f), q); q = parse("price:[0 TO 10]"); + @SuppressWarnings({"rawtypes"}) Class expected = LegacyNumericRangeQuery.class; if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) { expected = PointRangeQuery.class; diff --git a/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java index b3e01f278437..96ccee2d41c7 100644 --- a/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java @@ -17,9 +17,12 @@ package org.apache.solr.search; import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.IntStream; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.metrics.MetricsMap; import org.apache.solr.metrics.SolrMetricManager; @@ -598,10 +601,12 @@ public void testRerankQueryParsingShouldFailWithoutMandatoryReRankQueryParameter params.add("start", "0"); params.add("rows", "2"); + ignoreException("reRankQuery parameter is mandatory"); SolrException se = expectThrows(SolrException.class, "A syntax error should be thrown when "+ReRankQParserPlugin.RERANK_QUERY+" parameter is not specified", () -> h.query(req(params)) ); assertTrue(se.code() == SolrException.ErrorCode.BAD_REQUEST.code); + unIgnoreException("reRankQuery parameter is mandatory"); } @@ -645,5 +650,95 @@ public void testReRankQueriesWithDefType() throws Exception { ); } } + + @Test + public void testMinExactCount() throws Exception { + + assertU(delQ("*:*")); + assertU(commit()); + + int numDocs = 200; + + for (int i = 0 ; i < numDocs ; i ++) { + assertU(adoc( + "id", String.valueOf(i), + "id_p_i", String.valueOf(i), + "field_t", IntStream.range(0, numDocs).mapToObj(val -> Integer.toString(val)).collect(Collectors.joining(" ")))); + } + assertU(commit()); + + ModifiableSolrParams params = new ModifiableSolrParams(); + params.add("q", "field_t:0"); + params.add("start", "0"); + params.add("rows", "10"); + params.add("fl", "id"); + assertQ(req(params), + "*[count(//doc)=10]", + "//result[@numFound='" + numDocs + "']", + "//result[@numFoundExact='true']", + "//result/doc[1]/str[@name='id'][.='0']", + "//result/doc[2]/str[@name='id'][.='1']", + "//result/doc[3]/str[@name='id'][.='2']", + "//result/doc[4]/str[@name='id'][.='3']", + "//result/doc[5]/str[@name='id'][.='4']", + "//result/doc[6]/str[@name='id'][.='5']", + "//result/doc[7]/str[@name='id'][.='6']", + "//result/doc[8]/str[@name='id'][.='7']", + "//result/doc[9]/str[@name='id'][.='8']", + "//result/doc[10]/str[@name='id'][.='9']" + ); + + params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rrq "+ReRankQParserPlugin.RERANK_DOCS+"=20}"); + params.add("rrq", "id:10"); + assertQ(req(params), + "*[count(//doc)=10]", + "//result[@numFound='" + numDocs + "']", + "//result[@numFoundExact='true']", + "//result/doc[1]/str[@name='id'][.='10']", + "//result/doc[2]/str[@name='id'][.='0']", + "//result/doc[3]/str[@name='id'][.='1']", + "//result/doc[4]/str[@name='id'][.='2']", + "//result/doc[5]/str[@name='id'][.='3']", + "//result/doc[6]/str[@name='id'][.='4']", + "//result/doc[7]/str[@name='id'][.='5']", + "//result/doc[8]/str[@name='id'][.='6']", + "//result/doc[9]/str[@name='id'][.='7']", + "//result/doc[10]/str[@name='id'][.='8']" + ); + + params.add(CommonParams.MIN_EXACT_COUNT, "2"); + assertQ(req(params), + "*[count(//doc)=10]", + "//result[@numFound<='" + numDocs + "']", + "//result[@numFoundExact='false']", + "//result/doc[1]/str[@name='id'][.='10']", + "//result/doc[2]/str[@name='id'][.='0']", + "//result/doc[3]/str[@name='id'][.='1']", + "//result/doc[4]/str[@name='id'][.='2']", + "//result/doc[5]/str[@name='id'][.='3']", + "//result/doc[6]/str[@name='id'][.='4']", + "//result/doc[7]/str[@name='id'][.='5']", + "//result/doc[8]/str[@name='id'][.='6']", + "//result/doc[9]/str[@name='id'][.='7']", + "//result/doc[10]/str[@name='id'][.='8']" + ); + + params.add("sort", "score desc, id_p_i asc"); + assertQ(req(params), + "*[count(//doc)=10]", + "//result[@numFound<='" + numDocs + "']", + "//result[@numFoundExact='false']", + "//result/doc[1]/str[@name='id'][.='10']", + "//result/doc[2]/str[@name='id'][.='0']", + "//result/doc[3]/str[@name='id'][.='1']", + "//result/doc[4]/str[@name='id'][.='2']", + "//result/doc[5]/str[@name='id'][.='3']", + "//result/doc[6]/str[@name='id'][.='4']", + "//result/doc[7]/str[@name='id'][.='5']", + "//result/doc[8]/str[@name='id'][.='6']", + "//result/doc[9]/str[@name='id'][.='7']", + "//result/doc[10]/str[@name='id'][.='8']" + ); + } } diff --git a/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java b/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java index 40aea9f0ac4e..30cb450e1762 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java +++ b/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java @@ -71,7 +71,7 @@ public void testGetRealtime() throws Exception { ); assertJQ(req("qt","/get","ids","1", "fl","id") ,"=={" + - " 'response':{'numFound':1,'start':0,'docs':[" + + " 'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[" + " {" + " 'id':'1'}]" + " }}}" @@ -98,7 +98,7 @@ public void testGetRealtime() throws Exception { ); assertJQ(req("qt","/get","ids","1", "fl","id") ,"=={" + - " 'response':{'numFound':1,'start':0,'docs':[" + + " 'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[" + " {" + " 'id':'1'}]" + " }}}" @@ -113,7 +113,7 @@ public void testGetRealtime() throws Exception { ,"=={'doc':null}" ); assertJQ(req("qt","/get","ids","1") - ,"=={'response':{'numFound':0,'start':0,'docs':[]}}" + ,"=={'response':{'numFound':0,'start':0,'numFoundExact':true,'docs':[]}}" ); @@ -715,7 +715,9 @@ public void run() { } String response = h.query(sreq); + @SuppressWarnings({"rawtypes"}) Map rsp = (Map) Utils.fromJSONString(response); + @SuppressWarnings({"rawtypes"}) List doclist = (List)(((Map)rsp.get("response")).get("docs")); if (doclist.size() == 0) { // there's no info we can get back with a delete, so not much we can check without further synchronization diff --git a/solr/core/src/test/org/apache/solr/search/TestRecovery.java b/solr/core/src/test/org/apache/solr/search/TestRecovery.java index f4df24c94837..4e7e12ec0afc 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRecovery.java +++ b/solr/core/src/test/org/apache/solr/search/TestRecovery.java @@ -234,10 +234,13 @@ public void testLogReplay() throws Exception { assertEquals(UpdateLog.State.REPLAYING, h.getCore().getUpdateHandler().getUpdateLog().getState()); // check metrics + @SuppressWarnings({"unchecked"}) Gauge state = (Gauge)metrics.get("TLOG.state"); assertEquals(UpdateLog.State.REPLAYING.ordinal(), state.getValue().intValue()); + @SuppressWarnings({"unchecked"}) Gauge replayingLogs = (Gauge)metrics.get("TLOG.replay.remaining.logs"); assertTrue(replayingLogs.getValue().intValue() > 0); + @SuppressWarnings({"unchecked"}) Gauge replayingDocs = (Gauge)metrics.get("TLOG.replay.remaining.bytes"); assertTrue(replayingDocs.getValue().longValue() > 0); Meter replayDocs = (Meter)metrics.get("TLOG.replay.ops"); @@ -570,8 +573,10 @@ public void testBuffering() throws Exception { ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); + @SuppressWarnings({"unchecked"}) Gauge state = (Gauge)metrics.get("TLOG.state"); assertEquals(UpdateLog.State.BUFFERING.ordinal(), state.getValue().intValue()); + @SuppressWarnings({"unchecked"}) Gauge bufferedOps = (Gauge)metrics.get("TLOG.buffered.ops"); int initialOps = bufferedOps.getValue(); Meter applyingBuffered = (Meter)metrics.get("TLOG.applyingBuffered.ops"); @@ -1676,17 +1681,22 @@ void deleteLogs() throws Exception { private static Long getVer(SolrQueryRequest req) throws Exception { String response = JQ(req); + @SuppressWarnings({"rawtypes"}) Map rsp = (Map) Utils.fromJSONString(response); + @SuppressWarnings({"rawtypes"}) Map doc = null; if (rsp.containsKey("doc")) { doc = (Map)rsp.get("doc"); } else if (rsp.containsKey("docs")) { + @SuppressWarnings({"rawtypes"}) List lst = (List)rsp.get("docs"); if (lst.size() > 0) { doc = (Map)lst.get(0); } } else if (rsp.containsKey("response")) { + @SuppressWarnings({"rawtypes"}) Map responseMap = (Map)rsp.get("response"); + @SuppressWarnings({"rawtypes"}) List lst = (List)responseMap.get("docs"); if (lst.size() > 0) { doc = (Map)lst.get(0); diff --git a/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java b/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java index fa146ea80edc..2786aca94d7f 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java +++ b/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java @@ -1001,17 +1001,22 @@ void deleteLogs() throws Exception { private static Long getVer(SolrQueryRequest req) throws Exception { String response = JQ(req); + @SuppressWarnings({"rawtypes"}) Map rsp = (Map) Utils.fromJSONString(response); + @SuppressWarnings({"rawtypes"}) Map doc = null; if (rsp.containsKey("doc")) { doc = (Map)rsp.get("doc"); } else if (rsp.containsKey("docs")) { + @SuppressWarnings({"rawtypes"}) List lst = (List)rsp.get("docs"); if (lst.size() > 0) { doc = (Map)lst.get(0); } } else if (rsp.containsKey("response")) { + @SuppressWarnings({"rawtypes"}) Map responseMap = (Map)rsp.get("response"); + @SuppressWarnings({"rawtypes"}) List lst = (List)responseMap.get("docs"); if (lst.size() > 0) { doc = (Map)lst.get(0); diff --git a/solr/core/src/test/org/apache/solr/search/TestReloadDeadlock.java b/solr/core/src/test/org/apache/solr/search/TestReloadDeadlock.java index 32c91db29bfc..4bcc00467ec0 100644 --- a/solr/core/src/test/org/apache/solr/search/TestReloadDeadlock.java +++ b/solr/core/src/test/org/apache/solr/search/TestReloadDeadlock.java @@ -59,7 +59,7 @@ public static void ifVerbose(Object... args) { sb.append(' '); sb.append(o == null ? "(null)" : o.toString()); } - log.info(sb.toString()); + log.info("{}", sb); } } diff --git a/solr/core/src/test/org/apache/solr/search/TestSmileRequest.java b/solr/core/src/test/org/apache/solr/search/TestSmileRequest.java index 765e9841d120..15b1b328b5b9 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSmileRequest.java +++ b/solr/core/src/test/org/apache/solr/search/TestSmileRequest.java @@ -77,6 +77,7 @@ public void assertJQ(SolrClient client, SolrParams args, String... tests) throws query.setPath(path); } NamedList rsp = client.request(query); + @SuppressWarnings({"rawtypes"}) Map m = rsp.asMap(5); String jsonStr = Utils.toJSONString(m); SolrTestCaseHS.matchJSON(jsonStr, tests); @@ -98,6 +99,7 @@ public String getWriterType() { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public NamedList processResponse(InputStream body, String encoding) { try { Map m = (Map) SmileWriterTest.decodeSmile(body); diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrCachePerf.java b/solr/core/src/test/org/apache/solr/search/TestSolrCachePerf.java index cc9cf736f0e9..f46ba60dc091 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSolrCachePerf.java +++ b/solr/core/src/test/org/apache/solr/search/TestSolrCachePerf.java @@ -37,6 +37,7 @@ @LuceneTestCase.Slow public class TestSolrCachePerf extends SolrTestCaseJ4 { + @SuppressWarnings({"unchecked", "rawtypes"}) private static final Class[] IMPLS = new Class[] { CaffeineCache.class }; @@ -85,9 +86,11 @@ private void assertGreaterThanOrEqual(String message, double greater, double sma static final String VALUE = "foo"; + @SuppressWarnings({"rawtypes"}) private void doTestGetPutCompute(Map ratioStats, Map timeStats, int numThreads, boolean useCompute) throws Exception { for (Class clazz : IMPLS) { SolrMetricManager metricManager = new SolrMetricManager(); + @SuppressWarnings({"unchecked"}) SolrCache cache = clazz.getDeclaredConstructor().newInstance(); Map params = new HashMap<>(); params.put("size", "" + NUM_KEYS); diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrJ.java b/solr/core/src/test/org/apache/solr/search/TestSolrJ.java index 1fd485f92920..ede71b4ebb12 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSolrJ.java +++ b/solr/core/src/test/org/apache/solr/search/TestSolrJ.java @@ -105,6 +105,7 @@ public void run(){ // need a server.close()!!! } + @SuppressWarnings({"unchecked"}) public static SolrInputDocument getDocument(int docnum) { SolrInputDocument doc = new SolrInputDocument(); doc.setField(idField, docnum ); @@ -120,6 +121,7 @@ public static SolrInputDocument getDocument(int docnum) { int golden = (int)2654435761L; int h = docnum * golden; int n = (h & 0xff) + 1; + @SuppressWarnings({"rawtypes"}) List lst = new ArrayList(n); for (int i=0; i clients, final String sta } + @SuppressWarnings({"unchecked", "rawtypes"}) private void sanityCheckIndividualShards() throws Exception { // sanity check that our expectations about each shard (non-distrib) are correct @@ -184,6 +185,7 @@ private void sanityCheckIndividualShards() throws Exception { } } + @SuppressWarnings({"unchecked", "rawtypes"}) private void checkRefinementAndOverrequesting() throws Exception { // // distributed queries // // @@ -372,6 +374,7 @@ private void checkSubFacetStats() throws Exception { private void checkSubFacetStats(String extraJson) throws Exception { String commonJson = "type: terms, " + extraJson; + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList all_facets = (NamedList) queryServer ( params( "q", "*:*", "shards", getShardsString(), "rows" , "0", "json.facet", "{ foo : { " + commonJson + " field: foo_s, facet: { " + @@ -383,8 +386,10 @@ private void checkSubFacetStats(String extraJson) throws Exception { assertNotNull(all_facets); - List foo_buckets = (List) ((NamedList)all_facets.get("foo")).get("buckets"); + @SuppressWarnings({"unchecked", "rawtypes"}) + List foo_buckets = (List) (all_facets.get("foo")).get("buckets"); + @SuppressWarnings({"rawtypes"}) NamedList aaa0_Bucket = foo_buckets.get(0); assertEquals(ALL_STATS.size() + 3, aaa0_Bucket.size()); // val,count,facet assertEquals("aaa0", aaa0_Bucket.get("val")); @@ -402,6 +407,7 @@ private void checkSubFacetStats(String extraJson) throws Exception { assertEquals(284L, aaa0_Bucket.get("unique")); assertEquals(284L, aaa0_Bucket.get("hll")); + @SuppressWarnings({"rawtypes"}) NamedList tail_Bucket = foo_buckets.get(5); assertEquals(ALL_STATS.size() + 3, tail_Bucket.size()); // val,count,facet assertEquals("tail", tail_Bucket.get("val")); @@ -419,8 +425,10 @@ private void checkSubFacetStats(String extraJson) throws Exception { assertEquals(45L, tail_Bucket.get("unique")); assertEquals(45L, tail_Bucket.get("hll")); + @SuppressWarnings({"unchecked", "rawtypes"}) List tail_bar_buckets = (List) ((NamedList)tail_Bucket.get("bar")).get("buckets"); + @SuppressWarnings({"rawtypes"}) NamedList tailB_Bucket = tail_bar_buckets.get(0); assertEquals(ALL_STATS.size() + 3, tailB_Bucket.size()); // val,count,skg ... NO SUB FACETS assertEquals("tailB", tailB_Bucket.get("val")); @@ -439,6 +447,7 @@ private void checkSubFacetStats(String extraJson) throws Exception { assertEquals(6L, tailB_Bucket.get("hll")); // check the SKG stats on our tailB bucket + @SuppressWarnings({"rawtypes"}) NamedList tailB_skg = (NamedList) tailB_Bucket.get("skg"); assertEquals(tailB_skg.toString(), 3, tailB_skg.size()); diff --git a/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java b/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java index f9419de4d92b..f5c75c51df9d 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java +++ b/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java @@ -78,6 +78,7 @@ public class RangeFacetCloudTest extends SolrCloudTestCase { * the array indexes represent values in our numeric field, while the array values * track the mapping from string field terms to facet counts for docs that have that numeric value */ + @SuppressWarnings({"unchecked", "rawtypes"}) private static final Map[] TERM_MODEL = new Map[NUM_RANGE_VALUES]; @BeforeClass @@ -138,7 +139,9 @@ public void testInclude_Lower() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 4, buckets.size()); @@ -170,7 +173,9 @@ public void testInclude_Lower_Gap2() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 3, buckets.size()); @@ -201,7 +206,9 @@ public void testInclude_Lower_Gap2_hardend() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 3, buckets.size()); @@ -241,7 +248,9 @@ public void testInclude_Upper() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 4, buckets.size()); @@ -269,7 +278,9 @@ public void testInclude_Upper_Gap2() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 2, buckets.size()); @@ -298,7 +309,9 @@ public void testInclude_Edge() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 3, buckets.size()); @@ -333,7 +346,9 @@ public void testInclude_EdgeLower() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 3, buckets.size()); @@ -366,7 +381,9 @@ public void testInclude_EdgeUpper() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 3, buckets.size()); @@ -399,7 +416,9 @@ public void testInclude_EdgeLowerUpper() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 3, buckets.size()); @@ -434,7 +453,9 @@ public void testInclude_All() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 3, buckets.size()); @@ -481,7 +502,9 @@ public void testInclude_All_Gap2() throws Exception { QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 2, buckets.size()); @@ -516,7 +539,9 @@ public void testInclude_All_Gap2() throws Exception { rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); if (null == expected_mincount_bucket_val) { @@ -559,7 +584,9 @@ public void testInclude_All_Gap2_hardend() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 2, buckets.size()); @@ -591,7 +618,9 @@ public void testRangeWithInterval() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>) rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 4, buckets.size()); @@ -621,7 +650,9 @@ public void testRangeWithOldIntervalFormat() throws Exception { final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>) rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 4, buckets.size()); @@ -654,7 +685,9 @@ public void testIntervalWithMincount() throws Exception { QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 2, buckets.size()); @@ -688,7 +721,9 @@ public void testIntervalWithMincount() throws Exception { rsp = cluster.getSolrClient().query(solrQuery); try { + @SuppressWarnings({"unchecked"}) final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); if (null == expected_mincount_bucket_val) { @@ -727,6 +762,7 @@ private static void assertBucket(final String label, assertNotNull("null bucket", actualBucket); assertNotNull("expectedRangeValues", expectedRangeValues); assertTrue("bucket is not a NamedList", actualBucket instanceof NamedList); + @SuppressWarnings({"unchecked"}) final NamedList bucket = (NamedList) actualBucket; if (null != expectedVal) { @@ -751,11 +787,13 @@ private static void assertBucket(final String label, if (null == subFacetLimitUsed || 0 == expectedCount) { assertNull("unexpected subfacets", bucket.get("bar")); } else { + @SuppressWarnings({"unchecked"}) NamedList bar = ((NamedList)bucket.get("bar")); assertNotNull("can't find subfacet 'bar'", bar); final int numBucketsExpected = subFacetLimitUsed < 0 ? expectedTermCounts.size() : Math.min(subFacetLimitUsed, expectedTermCounts.size()); + @SuppressWarnings({"unchecked"}) final List> subBuckets = (List>) bar.get("buckets"); // we should either have filled out the expected limit, or assertEquals("num subfacet buckets", numBucketsExpected, subBuckets.size()); @@ -877,7 +915,7 @@ private static final List> buildListOfFacetRangeOtherOp // // NOTE: Don't mix "ALL" or "NONE" with other options so we don't have to make assertBeforeAfterBetween // overly complicated - ArrayList> results = new ArrayList(5); + ArrayList> results = new ArrayList<>(5); results.add(EnumSet.of(FacetRangeOther.ALL)); results.add(EnumSet.of(FacetRangeOther.BEFORE, FacetRangeOther.AFTER, FacetRangeOther.BETWEEN)); results.add(EnumSet.of(FacetRangeOther.BEFORE, FacetRangeOther.AFTER)); diff --git a/solr/core/src/test/org/apache/solr/search/facet/SpatialHeatmapFacetsTest.java b/solr/core/src/test/org/apache/solr/search/facet/SpatialHeatmapFacetsTest.java index 5af7bb6a0a6a..a1777671c25b 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/SpatialHeatmapFacetsTest.java +++ b/solr/core/src/test/org/apache/solr/search/facet/SpatialHeatmapFacetsTest.java @@ -104,6 +104,7 @@ public void testClassicFacets() throws Exception { // AKA SimpleFacets // ----- Search // this test simply has some 0's, nulls, 1's and a 2 in there. + @SuppressWarnings({"rawtypes"}) NamedList hmObj = getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_GEOM, "[\"50 20\" TO \"180 90\"]", FacetParams.FACET_HEATMAP_LEVEL, "4"))); @@ -232,6 +233,7 @@ public void testJsonFacets() throws Exception { // ----- Search // this test simply has some 0's, nulls, 1's and a 2 in there. + @SuppressWarnings({"rawtypes"}) NamedList hmObj = getHmObj(query(params(baseParams, "json.facet", "{f1:{type:heatmap, f:" + FIELD + " geom:'[\"50 20\" TO \"180 90\"]', gridLevel:4}}"))); List> counts = (List>) hmObj.get("counts_ints2D"); @@ -265,8 +267,10 @@ public void testJsonFacets() throws Exception { "q2:{type:query, q:'id:4', " + jsonHeatmap + " } " + "}")); { + @SuppressWarnings({"rawtypes"}) final NamedList q1Res = (NamedList) response.getResponse().findRecursive("facets", "q1"); assertEquals("1", q1Res.get("count").toString()); + @SuppressWarnings({"rawtypes"}) final NamedList q2Res = (NamedList) response.getResponse().findRecursive("facets", "q2"); assertEquals("1", q2Res.get("count").toString()); // essentially, these will differ only in the heatmap counts but otherwise will be the same @@ -303,6 +307,7 @@ public void testJsonFacets() throws Exception { //good enough for this test method } + @SuppressWarnings({"rawtypes"}) private NamedList getHmObj(QueryResponse response) { // classic faceting final NamedList classicResp = (NamedList) response.getResponse().findRecursive("facet_counts", "facet_heatmaps", FIELD); diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetJoinDomain.java b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetJoinDomain.java index 70f4ad28b5eb..4c877461e87b 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetJoinDomain.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetJoinDomain.java @@ -208,6 +208,7 @@ public void testMalformedGivesError() throws Exception { SolrException e = expectThrows(SolrException.class, () -> { final SolrParams req = params("q", "*:*", "json.facet", "{ x : { type:terms, field:x_s, domain: { join:"+join+" } } }"); + @SuppressWarnings({"rawtypes"}) final NamedList trash = getRandClient(random()).request(new QueryRequest(req)); }); assertEquals(join + " -> " + e, SolrException.ErrorCode.BAD_REQUEST.code, e.code()); @@ -440,6 +441,7 @@ private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck, QueryResponse rsp = null; // JSON Facets not (currently) available from QueryResponse... + @SuppressWarnings({"rawtypes"}) NamedList topNamedList = null; try { rsp = (new QueryRequest(initParams)).process(getRandClient(random())); @@ -451,6 +453,7 @@ private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck, e.getMessage(), e); } try { + @SuppressWarnings({"rawtypes"}) final NamedList facetResponse = (NamedList) topNamedList.get("facets"); assertNotNull("null facet results?", facetResponse); assertEquals("numFound mismatch with top count?", @@ -474,13 +477,15 @@ private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck, private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck, final Map expected, final SolrParams baseParams, - final NamedList actualFacetResponse) throws SolrServerException, IOException { + @SuppressWarnings({"rawtypes"})final NamedList actualFacetResponse) throws SolrServerException, IOException { for (Map.Entry entry : expected.entrySet()) { final String facetKey = entry.getKey(); final TermFacet facet = entry.getValue(); + @SuppressWarnings({"rawtypes"}) final NamedList results = (NamedList) actualFacetResponse.get(facetKey); assertNotNull(facetKey + " key missing from: " + actualFacetResponse, results); + @SuppressWarnings({"unchecked", "rawtypes"}) final List buckets = (List) results.get("buckets"); assertNotNull(facetKey + " has null buckets: " + actualFacetResponse, buckets); @@ -492,7 +497,7 @@ private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck, 0, docsWithField); } - for (NamedList bucket : buckets) { + for (@SuppressWarnings({"rawtypes"})NamedList bucket : buckets) { final long count = ((Number) bucket.get("count")).longValue(); final String fieldVal = bucket.get("val").toString(); // int or stringified int diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java index 32f37080c71c..0992af80ab0e 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java @@ -21,6 +21,8 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; @@ -46,6 +48,10 @@ import static org.apache.solr.search.facet.RelatednessAgg.computeRelatedness; import static org.apache.solr.search.facet.RelatednessAgg.roundTo5Digits; +import org.noggit.JSONUtil; +import org.noggit.JSONWriter; +import org.noggit.JSONWriter.Writable; + import org.junit.AfterClass; import org.junit.BeforeClass; import org.slf4j.Logger; @@ -73,6 +79,7 @@ *

    * * @see TestCloudJSONFacetJoinDomain + * @see TestCloudJSONFacetSKGEquiv */ @Slow public class TestCloudJSONFacetSKG extends SolrCloudTestCase { @@ -87,9 +94,18 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase { private static final int UNIQUE_FIELD_VALS = 50; /** Multivalued string field suffixes that can be randomized for testing diff facet/join code paths */ - private static final String[] STR_FIELD_SUFFIXES = new String[] { "_ss", "_sds", "_sdsS" }; + private static final String[] MULTI_STR_FIELD_SUFFIXES = new String[] + { "_multi_ss", "_multi_sds", "_multi_sdsS" }; /** Multivalued int field suffixes that can be randomized for testing diff facet/join code paths */ - private static final String[] INT_FIELD_SUFFIXES = new String[] { "_is", "_ids", "_idsS" }; + private static final String[] MULTI_INT_FIELD_SUFFIXES = new String[] + { "_multi_is", "_multi_ids", "_multi_idsS" }; + + /** Single Valued string field suffixes that can be randomized for testing diff facet code paths */ + private static final String[] SOLO_STR_FIELD_SUFFIXES = new String[] + { "_solo_s", "_solo_sd", "_solo_sdS" }; + /** Single Valued int field suffixes that can be randomized for testing diff facet code paths */ + private static final String[] SOLO_INT_FIELD_SUFFIXES = new String[] + { "_solo_i", "_solo_id", "_solo_idS" }; /** A basic client for operations at the cloud level, default collection will be set */ private static CloudSolrClient CLOUD_CLIENT; @@ -100,7 +116,10 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase { private static void createMiniSolrCloudCluster() throws Exception { // sanity check constants assertTrue("bad test constants: some suffixes will never be tested", - (STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && (INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM)); + (MULTI_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && + (MULTI_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && + (SOLO_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && + (SOLO_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM)); // we need DVs on point fields to compute stats & facets if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); @@ -152,9 +171,14 @@ private static void createMiniSolrCloudCluster() throws Exception { for (int v = 0; v < numValsThisDoc; v++) { final String fieldValue = randFieldValue(fieldNum); - // for each fieldNum, there are actaully two fields: one string, and one integer - doc.addField(field(STR_FIELD_SUFFIXES, fieldNum), fieldValue); - doc.addField(field(INT_FIELD_SUFFIXES, fieldNum), fieldValue); + // multi valued: one string, and one integer + doc.addField(multiStrField(fieldNum), fieldValue); + doc.addField(multiIntField(fieldNum), fieldValue); + } + { // single valued: one string, and one integer + final String fieldValue = randFieldValue(fieldNum); + doc.addField(soloStrField(fieldNum), fieldValue); + doc.addField(soloIntField(fieldNum), fieldValue); } } CLOUD_CLIENT.add(doc); @@ -172,8 +196,8 @@ private static void createMiniSolrCloudCluster() throws Exception { * Given a (random) number, and a (static) array of possible suffixes returns a consistent field name that * uses that number and one of hte specified suffixes in it's name. * - * @see #STR_FIELD_SUFFIXES - * @see #INT_FIELD_SUFFIXES + * @see #MULTI_STR_FIELD_SUFFIXES + * @see #MULTI_INT_FIELD_SUFFIXES * @see #MAX_FIELD_NUM * @see #randFieldValue */ @@ -183,11 +207,21 @@ private static String field(final String[] suffixes, final int fieldNum) { final String suffix = suffixes[fieldNum % suffixes.length]; return "field_" + fieldNum + suffix; } - private static String strfield(final int fieldNum) { - return field(STR_FIELD_SUFFIXES, fieldNum); + /** Given a (random) number, returns a consistent field name for a multi valued string field */ + private static String multiStrField(final int fieldNum) { + return field(MULTI_STR_FIELD_SUFFIXES, fieldNum); } - private static String intfield(final int fieldNum) { - return field(INT_FIELD_SUFFIXES, fieldNum); + /** Given a (random) number, returns a consistent field name for a multi valued int field */ + private static String multiIntField(final int fieldNum) { + return field(MULTI_INT_FIELD_SUFFIXES, fieldNum); + } + /** Given a (random) number, returns a consistent field name for a single valued string field */ + private static String soloStrField(final int fieldNum) { + return field(SOLO_STR_FIELD_SUFFIXES, fieldNum); + } + /** Given a (random) number, returns a consistent field name for a single valued int field */ + private static String soloIntField(final int fieldNum) { + return field(SOLO_INT_FIELD_SUFFIXES, fieldNum); } /** @@ -224,19 +258,19 @@ private static void afterClass() throws Exception { public void testBespoke() throws Exception { { // trivial single level facet Map facets = new LinkedHashMap<>(); - TermFacet top = new TermFacet(strfield(9), UNIQUE_FIELD_VALS, 0, null); + TermFacet top = new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, null); facets.put("top1", top); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS); - assertFacetSKGsAreCorrect(maxBuckets, facets, strfield(7)+":11", strfield(5)+":9", "*:*"); + assertFacetSKGsAreCorrect(maxBuckets, facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS); } { // trivial single level facet w/sorting on skg Map facets = new LinkedHashMap<>(); - TermFacet top = new TermFacet(strfield(9), UNIQUE_FIELD_VALS, 0, "skg desc"); + TermFacet top = new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, "skg desc"); facets.put("top2", top); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS); - assertFacetSKGsAreCorrect(maxBuckets, facets, strfield(7)+":11", strfield(5)+":9", "*:*"); + assertFacetSKGsAreCorrect(maxBuckets, facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS); } @@ -249,9 +283,20 @@ public void testBespoke() throws Exception { // because it causes FacetField.returnsPartial() to be "true" for (int limit : new int[] { 999999999, -1 }) { Map facets = new LinkedHashMap<>(); - facets.put("top_facet_limit__" + limit, new TermFacet(strfield(9), limit, 0, "skg desc")); + facets.put("top_facet_limit__" + limit, new TermFacet(multiStrField(9), limit, 0, "skg desc")); + final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS); + assertFacetSKGsAreCorrect(maxBuckets, facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS); + } + } + { // allBuckets should have no impact... + for (Boolean allBuckets : Arrays.asList( null, false, true )) { + Map facets = new LinkedHashMap<>(); + facets.put("allb__" + allBuckets, new TermFacet(multiStrField(9), + map("allBuckets", allBuckets, + "sort", "skg desc"))); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS); - assertFacetSKGsAreCorrect(maxBuckets, facets, strfield(7)+":11", strfield(5)+":9", "*:*"); + assertFacetSKGsAreCorrect(maxBuckets, facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS); } } @@ -294,7 +339,7 @@ private static String buildRandomQuery() { for (int c = 0; c < numClauses; c++) { final int fieldNum = random().nextInt(MAX_FIELD_NUM); // keep queries simple, just use str fields - not point of test - clauses[c] = strfield(fieldNum) + ":" + randFieldValue(fieldNum); + clauses[c] = multiStrField(fieldNum) + ":" + randFieldValue(fieldNum); } return buildORQuery(clauses); } @@ -319,13 +364,14 @@ private void assertFacetSKGsAreCorrect(final AtomicInteger maxBucketsToCheck, final SolrParams baseParams = params("rows","0", "fore", foreQ, "back", backQ); final SolrParams facetParams = params("q", query, - "json.facet", ""+TermFacet.toJSONFacetParamValue(expected,null)); + "json.facet", ""+TermFacet.toJSONFacetParamValue(expected)); final SolrParams initParams = SolrParams.wrapAppended(facetParams, baseParams); log.info("Doing full run: {}", initParams); QueryResponse rsp = null; // JSON Facets not (currently) available from QueryResponse... + @SuppressWarnings({"rawtypes"}) NamedList topNamedList = null; try { rsp = (new QueryRequest(initParams)).process(getRandClient(random())); @@ -337,6 +383,7 @@ private void assertFacetSKGsAreCorrect(final AtomicInteger maxBucketsToCheck, e.getMessage(), e); } try { + @SuppressWarnings({"rawtypes"}) final NamedList facetResponse = (NamedList) topNamedList.get("facets"); assertNotNull("null facet results?", facetResponse); assertEquals("numFound mismatch with top count?", @@ -359,16 +406,29 @@ private void assertFacetSKGsAreCorrect(final AtomicInteger maxBucketsToCheck, * Recursive helper method that walks the actual facet response, comparing the SKG results to * the expected output based on the equivalent filters generated from the original TermFacet. */ + @SuppressWarnings({"unchecked"}) private void assertFacetSKGsAreCorrect(final AtomicInteger maxBucketsToCheck, final Map expected, final SolrParams baseParams, - final NamedList actualFacetResponse) throws SolrServerException, IOException { + @SuppressWarnings({"rawtypes"})final NamedList actualFacetResponse) throws SolrServerException, IOException { for (Map.Entry entry : expected.entrySet()) { final String facetKey = entry.getKey(); final TermFacet facet = entry.getValue(); + + @SuppressWarnings({"rawtypes"}) final NamedList results = (NamedList) actualFacetResponse.get(facetKey); assertNotNull(facetKey + " key missing from: " + actualFacetResponse, results); + + if (null != results.get("allBuckets")) { + // if the response includes an allBuckets bucket, then there must not be an skg value + + // 'skg' key must not exist in th allBuckets bucket + assertEquals(facetKey + " has skg in allBuckets: " + results.get("allBuckets"), + Collections.emptyList(), + ((NamedList)results.get("allBuckets")).getAll("skg")); + } + @SuppressWarnings({"rawtypes"}) final List buckets = (List) results.get("buckets"); assertNotNull(facetKey + " has null buckets: " + actualFacetResponse, buckets); @@ -386,7 +446,7 @@ private void assertFacetSKGsAreCorrect(final AtomicInteger maxBucketsToCheck, // NOTE: it's important that we do this depth first -- not just because it's the easiest way to do it, // but because it means that our maxBucketsToCheck will ensure we do a lot of deep sub-bucket checking, // not just all the buckets of the top level(s) facet(s) - for (NamedList bucket : buckets) { + for (@SuppressWarnings({"rawtypes"})NamedList bucket : buckets) { final String fieldVal = bucket.get("val").toString(); // int or stringified int verifySKGResults(facetKey, facet, baseParams, fieldVal, bucket); @@ -406,6 +466,7 @@ private void assertFacetSKGsAreCorrect(final AtomicInteger maxBucketsToCheck, { // make sure we don't have any facet keys we don't expect // a little hackish because subfacets have extra keys... + @SuppressWarnings({"rawtypes"}) final LinkedHashSet expectedKeys = new LinkedHashSet(expected.keySet()); expectedKeys.add("count"); if (0 <= actualFacetResponse.indexOf("val",0)) { @@ -429,6 +490,7 @@ private void verifySKGResults(String facetKey, TermFacet facet, SolrParams filte throws SolrServerException, IOException { final String bucketQ = facet.field+":"+fieldVal; + @SuppressWarnings({"unchecked"}) final NamedList skgBucket = (NamedList) bucket.get("skg"); assertNotNull(facetKey + "/bucket:" + bucket.toString(), skgBucket); @@ -464,74 +526,78 @@ private void verifySKGResults(String facetKey, TermFacet facet, SolrParams filte skgBucket.get("relatedness")); } - - + /** * Trivial data structure for modeling a simple terms facet that can be written out as a json.facet param. * * Doesn't do any string escaping or quoting, so don't use whitespace or reserved json characters */ - private static final class TermFacet { - public final String field; + private static final class TermFacet implements Writable { + + /** non-skg subfacets for use in verification */ public final Map subFacets = new LinkedHashMap<>(); - public final Integer limit; // may be null - public final Integer overrequest; // may be null - public final String sort; // may be null + + private final Map jsonData = new LinkedHashMap<>(); + + public final String field; + /** + * @param field must be non null + * @param options can set any of options used in a term facet other then field or (sub) facets + */ + public TermFacet(final String field, final Map options) { + assert null != field; + this.field = field; + + jsonData.putAll(options); + + // we don't allow these to be overridden by options, so set them now... + jsonData.put("type", "terms"); + jsonData.put("field", field); + // see class javadocs for why we always use refine:true & the query:'*:*' domain for this test. + jsonData.put("refine", true); + jsonData.put("domain", map("query","*:*")); + + } + + /** all params except field can be null */ + public TermFacet(String field, Integer limit, Integer overrequest, String sort) { + this(field, map("limit", limit, "overrequest", overrequest, "sort", sort)); + } + /** Simplified constructor asks for limit = # unique vals */ public TermFacet(String field) { this(field, UNIQUE_FIELD_VALS, 0, "skg desc"); } - public TermFacet(String field, Integer limit, Integer overrequest, String sort) { - assert null != field; - this.field = field; - this.limit = limit; - this.overrequest = overrequest; - this.sort = sort; + @Override + public void write(JSONWriter writer) { + // we need to include both our "real" subfacets, along with our SKG stat and 'processEmpty' + // (we don't put these in 'subFacets' to help keep the verification code simpler + final Map sub = map("processEmpty", true, + "skg", "relatedness($fore,$back)"); + sub.putAll(subFacets); + + final Map out = map("facet", sub); + out.putAll(jsonData); + + writer.write(out); } - /** - * recursively generates the json.facet param value to use for testing this facet - */ - private CharSequence toJSONFacetParamValue() { - final String limitStr = (null == limit) ? "" : (", limit:" + limit); - final String overrequestStr = (null == overrequest) ? "" : (", overrequest:" + overrequest); - final String sortStr = (null == sort) ? "" : (", sort: '" + sort + "'"); - final StringBuilder sb - = new StringBuilder("{ type:terms, field:" + field + limitStr + overrequestStr + sortStr); - - // see class javadocs for why we always use refine:true & the query:'*:*' domain for this test. - sb.append(", refine: true, domain: { query: '*:*' }, facet:"); - sb.append(toJSONFacetParamValue(subFacets, "skg : 'relatedness($fore,$back)'")); - sb.append("}"); - return sb; - } - /** * Given a set of (possibly nested) facets, generates a suitable json.facet param value to * use for testing them against in a solr request. */ - public static CharSequence toJSONFacetParamValue(final Map facets, - final String extraJson) { + public static String toJSONFacetParamValue(final Map facets) { assert null != facets; - if (0 == facets.size() && null == extraJson) { - return ""; - } - - StringBuilder sb = new StringBuilder("{ processEmpty: true, "); - for (String key : facets.keySet()) { - sb.append(key).append(" : ").append(facets.get(key).toJSONFacetParamValue()); - sb.append(" ,"); - } - if (null == extraJson) { - sb.setLength(sb.length() - 1); - } else { - sb.append(extraJson); - } - sb.append("}"); - return sb; + assert ! facets.isEmpty(); + + // see class javadocs for why we always want processEmpty + final Map jsonData = map("processEmpty", true); + jsonData.putAll(facets); + + return JSONUtil.toJSON(jsonData, -1); // no newlines } - + /** * Factory method for generating some random facets. * @@ -545,12 +611,84 @@ public static Map buildRandomFacets() { final int maxDepth = TestUtil.nextInt(random(), 0, (usually() ? 2 : 3)); return buildRandomFacets(keyCounter, maxDepth); } + + /** + * picks a random field to facet on. + * + * @see #field + * @return field name, never null + */ + public static String randomFacetField(final Random r) { + final int fieldNum = r.nextInt(MAX_FIELD_NUM); + switch(r.nextInt(4)) { + case 0: return multiStrField(fieldNum); + case 1: return multiIntField(fieldNum); + case 2: return soloStrField(fieldNum); + case 3: return soloIntField(fieldNum); + default: throw new RuntimeException("Broken case statement"); + } + } + + /** + * picks a random value for the "perSeg" param, biased in favor of interesting test cases + * + * @return a Boolean, may be null + */ + public static Boolean randomPerSegParam(final Random r) { + + switch(r.nextInt(4)) { + case 0: return true; + case 1: return false; + case 2: + case 3: return null; + default: throw new RuntimeException("Broken case statement"); + } + } + + /** + * picks a random value for the "prefix" param, biased in favor of interesting test cases + * + * @return a valid prefix value, may be null + */ + public static String randomPrefixParam(final Random r, final String facetField) { + + if (facetField.contains("multi_i") || facetField.contains("solo_i")) { + // never used a prefix on a numeric field + return null; + } + assert (facetField.contains("multi_s") || facetField.contains("solo_s")) + : "possible facet fields have changed, breaking test"; + + switch(r.nextInt(5)) { + case 0: return "2"; + case 1: return "3"; + case 2: + case 3: + case 4: return null; + default: throw new RuntimeException("Broken case statement"); + } + } + + /** + * picks a random value for the "prelim_sort" param, biased in favor of interesting test cases. + * + * @return a sort string (w/direction), or null to specify nothing (trigger default behavior) + * @see #randomSortParam + */ + public static String randomPrelimSortParam(final Random r, final String sort) { + if (null != sort && sort.startsWith("skg") && 1 == TestUtil.nextInt(random(), 0, 3)) { + return "count desc"; + } + return null; + } /** * picks a random value for the "sort" param, biased in favor of interesting test cases * * @return a sort string (w/direction), or null to specify nothing (trigger default behavior) * @see #randomLimitParam + * @see #randomAllBucketsParam + * @see #randomPrelimSortParam */ public static String randomSortParam(Random r) { @@ -618,6 +756,35 @@ public static Integer randomOverrequestParam(Random r) { // else.... either leave param unspecified (or redundently specify the -1 default) return r.nextBoolean() ? null : -1; } + + /** + * picks a random value for the "allBuckets" param, biased in favor of interesting test cases. + * This bucket should be ignored by relatedness, but inclusion should not cause any problems + * (or change the results) + * + *

    + * NOTE: allBuckets is meaningless in conjunction with the STREAM processor, so + * this method always returns null if sort is index asc. + *

    + * + * + * @return a Boolean, may be null + * @see SOLR-14514: allBuckets ignored by method:stream + */ + public static Boolean randomAllBucketsParam(final Random r, final String sort) { + + if ("index asc".equals(sort)) { + return null; + } + + switch(r.nextInt(4)) { + case 0: return true; + case 1: return false; + case 2: + case 3: return null; + default: throw new RuntimeException("Broken case statement"); + } + } /** * recursive helper method for building random facets @@ -632,12 +799,18 @@ private static Map buildRandomFacets(AtomicInteger keyCounter, if (keyCounter.get() < 3) { // a hard limit on the total number of facets (regardless of depth) to reduce OOM risk final String sort = randomSortParam(random()); - final Integer limit = randomLimitParam(random(), sort); - final Integer overrequest = randomOverrequestParam(random()); - final TermFacet facet = new TermFacet(field((random().nextBoolean() - ? STR_FIELD_SUFFIXES : INT_FIELD_SUFFIXES), - random().nextInt(MAX_FIELD_NUM)), - limit, overrequest, sort); + final String facetField = randomFacetField(random()); + final TermFacet facet = new TermFacet(facetField, + map("sort", sort, + "prelim_sort", randomPrelimSortParam(random(), sort), + "limit", randomLimitParam(random(), sort), + "overrequest", randomOverrequestParam(random()), + "prefix", randomPrefixParam(random(), facetField), + "allBuckets", randomAllBucketsParam(random(), sort), + "perSeg", randomPerSegParam(random()))); + + + results.put("facet_" + keyCounter.incrementAndGet(), facet); if (0 < maxDepth) { // if we're going wide, don't go deep @@ -675,5 +848,20 @@ public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exce client.getZkStateReader(), true, true, 330); } + + /** helper macro: fails on null keys, skips pairs with null values */ + public static Map map(Object... pairs) { + if (0 != pairs.length % 2) throw new IllegalArgumentException("uneven number of arguments"); + final Map map = new LinkedHashMap<>(); + for (int i = 0; i < pairs.length; i+=2) { + final Object key = pairs[i]; + final Object val = pairs[i+1]; + if (null == key) throw new NullPointerException("arguemnt " + i); + if (null == val) continue; + + map.put(key.toString(), val); + } + return map; + } } diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKGEquiv.java b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKGEquiv.java new file mode 100644 index 000000000000..cab7df4bdcea --- /dev/null +++ b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKGEquiv.java @@ -0,0 +1,1001 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.search.facet; + +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.lucene.util.TestUtil; +import org.apache.solr.BaseDistributedSearchTestCase; +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.embedded.JettySolrRunner; +import org.apache.solr.client.solrj.impl.CloudSolrClient; +import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.client.solrj.request.CollectionAdminRequest; +import org.apache.solr.client.solrj.request.QueryRequest; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.cloud.AbstractDistribZkTestBase; +import org.apache.solr.cloud.SolrCloudTestCase; +import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.NamedList; +import static org.apache.solr.search.facet.FacetField.FacetMethod; + +import org.noggit.JSONUtil; +import org.noggit.JSONWriter; +import org.noggit.JSONWriter.Writable; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +/** + *

    + * A randomized test of nested facets using the relatedness() function, that asserts the + * results are consistent and equivilent regardless of what method (ie: FacetFieldProcessor) + * is requested. + *

    + *

    + * This test is based on {@link TestCloudJSONFacetSKG} but does not + * force refine: true nor specify a domain: { 'query':'*:*' } for every facet, + * because this test does not attempt to prove the results with validation requests. + *

    + *

    + * This test only concerns itself with the equivilency of results + *

    + * + * @see TestCloudJSONFacetSKG + */ +public class TestCloudJSONFacetSKGEquiv extends SolrCloudTestCase { + + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private static final String DEBUG_LABEL = MethodHandles.lookup().lookupClass().getName(); + private static final String COLLECTION_NAME = DEBUG_LABEL + "_collection"; + + private static final int DEFAULT_LIMIT = FacetField.DEFAULT_FACET_LIMIT; + private static final int MAX_FIELD_NUM = 15; + private static final int UNIQUE_FIELD_VALS = 50; + + /** Multi-Valued string field suffixes that can be randomized for testing diff facet code paths */ + private static final String[] MULTI_STR_FIELD_SUFFIXES = new String[] + { "_multi_ss", "_multi_sds", "_multi_sdsS" }; + /** Multi-Valued int field suffixes that can be randomized for testing diff facet code paths */ + private static final String[] MULTI_INT_FIELD_SUFFIXES = new String[] + { "_multi_is", "_multi_ids", "_multi_idsS" }; + + /** Single Valued string field suffixes that can be randomized for testing diff facet code paths */ + private static final String[] SOLO_STR_FIELD_SUFFIXES = new String[] + { "_solo_s", "_solo_sd", "_solo_sdS" }; + /** Single Valued int field suffixes that can be randomized for testing diff facet code paths */ + private static final String[] SOLO_INT_FIELD_SUFFIXES = new String[] + { "_solo_i", "_solo_id", "_solo_idS" }; + + /** A basic client for operations at the cloud level, default collection will be set */ + private static CloudSolrClient CLOUD_CLIENT; + /** One client per node */ + private static final ArrayList CLIENTS = new ArrayList<>(5); + + @BeforeClass + private static void createMiniSolrCloudCluster() throws Exception { + // sanity check constants + assertTrue("bad test constants: some suffixes will never be tested", + (MULTI_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && + (MULTI_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && + (SOLO_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && + (SOLO_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM)); + + // we need DVs on point fields to compute stats & facets + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + + // multi replicas should not matter... + final int repFactor = usually() ? 1 : 2; + // ... but we definitely want to test multiple shards + final int numShards = TestUtil.nextInt(random(), 1, (usually() ? 2 :3)); + final int numNodes = (numShards * repFactor); + + final String configName = DEBUG_LABEL + "_config-set"; + final Path configDir = Paths.get(TEST_HOME(), "collection1", "conf"); + + configureCluster(numNodes).addConfig(configName, configDir).configure(); + + Map collectionProperties = new LinkedHashMap<>(); + collectionProperties.put("config", "solrconfig-tlog.xml"); + collectionProperties.put("schema", "schema_latest.xml"); + CollectionAdminRequest.createCollection(COLLECTION_NAME, configName, numShards, repFactor) + .setProperties(collectionProperties) + .process(cluster.getSolrClient()); + + CLOUD_CLIENT = cluster.getSolrClient(); + CLOUD_CLIENT.setDefaultCollection(COLLECTION_NAME); + + waitForRecoveriesToFinish(CLOUD_CLIENT); + + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { + CLIENTS.add(getHttpSolrClient(jetty.getBaseUrl() + "/" + COLLECTION_NAME + "/")); + } + + final int numDocs = atLeast(100); + for (int id = 0; id < numDocs; id++) { + SolrInputDocument doc = sdoc("id", ""+id); + + // NOTE: for each fieldNum, there are actaully 4 fields: multi(str+int) + solo(str+int) + for (int fieldNum = 0; fieldNum < MAX_FIELD_NUM; fieldNum++) { + // NOTE: Some docs may not have any value in some fields + final int numValsThisDoc = TestUtil.nextInt(random(), 0, (usually() ? 5 : 10)); + for (int v = 0; v < numValsThisDoc; v++) { + final String fieldValue = randFieldValue(fieldNum); + + // multi valued: one string, and one integer + doc.addField(multiStrField(fieldNum), fieldValue); + doc.addField(multiIntField(fieldNum), fieldValue); + } + if (3 <= numValsThisDoc) { // use num values in multivalue to inform sparseness of single value + final String fieldValue = randFieldValue(fieldNum); + doc.addField(soloStrField(fieldNum), fieldValue); + doc.addField(soloIntField(fieldNum), fieldValue); + } + } + CLOUD_CLIENT.add(doc); + if (random().nextInt(100) < 1) { + CLOUD_CLIENT.commit(); // commit 1% of the time to create new segments + } + if (random().nextInt(100) < 5) { + CLOUD_CLIENT.add(doc); // duplicate the doc 5% of the time to create deleted docs + } + } + CLOUD_CLIENT.commit(); + + log.info("Created {} using numNodes={}, numShards={}, repFactor={}, numDocs={}", + COLLECTION_NAME, numNodes, numShards, repFactor, numDocs); + } + + /** + * Given a (random) number, and a (static) array of possible suffixes returns a consistent field name that + * uses that number and one of hte specified suffixes in it's name. + * + * @see #MULTI_STR_FIELD_SUFFIXES + * @see #MULTI_INT_FIELD_SUFFIXES + * @see #MAX_FIELD_NUM + * @see #randFieldValue + */ + private static String field(final String[] suffixes, final int fieldNum) { + assert fieldNum < MAX_FIELD_NUM; + + final String suffix = suffixes[fieldNum % suffixes.length]; + return "field_" + fieldNum + suffix; + } + /** Given a (random) number, returns a consistent field name for a multi valued string field */ + private static String multiStrField(final int fieldNum) { + return field(MULTI_STR_FIELD_SUFFIXES, fieldNum); + } + /** Given a (random) number, returns a consistent field name for a multi valued int field */ + private static String multiIntField(final int fieldNum) { + return field(MULTI_INT_FIELD_SUFFIXES, fieldNum); + } + /** Given a (random) number, returns a consistent field name for a single valued string field */ + private static String soloStrField(final int fieldNum) { + return field(SOLO_STR_FIELD_SUFFIXES, fieldNum); + } + /** Given a (random) number, returns a consistent field name for a single valued int field */ + private static String soloIntField(final int fieldNum) { + return field(SOLO_INT_FIELD_SUFFIXES, fieldNum); + } + + /** + * Given a (random) field number, returns a random (integer based) value for that field. + * NOTE: The number of unique values in each field is constant acording to {@link #UNIQUE_FIELD_VALS} + * but the precise range of values will vary for each unique field number, such that cross field joins + * will match fewer documents based on how far apart the field numbers are. + * + * @see #UNIQUE_FIELD_VALS + * @see #field + */ + private static String randFieldValue(final int fieldNum) { + return "" + (fieldNum + TestUtil.nextInt(random(), 1, UNIQUE_FIELD_VALS)); + } + + + @AfterClass + private static void afterClass() throws Exception { + if (null != CLOUD_CLIENT) { + CLOUD_CLIENT.close(); + CLOUD_CLIENT = null; + } + for (HttpSolrClient client : CLIENTS) { + client.close(); + } + CLIENTS.clear(); + } + + /** + * Sanity check that our method of varying the method param + * works and can be verified by inspecting the debug output of basic requests. + */ + public void testWhiteboxSanityMethodProcessorDebug() throws Exception { + // NOTE: json.facet debugging output can be wonky, particularly when dealing with cloud + // so for these queries we keep it simple: + // - only one "top" facet per request + // - no refinement + // even with those constraints in place, a single facet can (may/sometimes?) produce multiple debug + // blocks - aparently due to shard merging? So... + // - only inspect the "first" debug NamedList in the results + // + + // simple individual facet that sorts on an skg stat... + final TermFacet f = new TermFacet(soloStrField(9), 10, 0, "skg desc", null); + final Map facets = new LinkedHashMap<>(); + facets.put("str", f); + + final SolrParams facetParams = params("rows","0", + "debug","true", // SOLR-14451 + // *:* is the only "safe" query for this test, + // to ensure we always have at least one bucket for every facet + // so we can be confident in getting the debug we expect... + "q", "*:*", + "fore", multiStrField(7)+":11", + "back", "*:*", + "json.facet", Facet.toJSONFacetParamValue(facets)); + + { // dv + final SolrParams params = SolrParams.wrapDefaults(params("method_val", "dv"), + facetParams); + final NamedList debug = getFacetDebug(params); + assertEquals(FacetFieldProcessorByArrayDV.class.getSimpleName(), debug.get("processor")); + } + { // dvhash + final SolrParams params = SolrParams.wrapDefaults(params("method_val", "dvhash"), + facetParams); + final NamedList debug = getFacetDebug(params); + assertEquals(FacetFieldProcessorByHashDV.class.getSimpleName(), debug.get("processor")); + } + } + + /** + * returns the FIRST NamedList (under the implicit 'null' FacetQuery) in the "facet-trace" output + * of the request. Should not be used with multiple "top level" facets + * (the output is too confusing in cloud mode to be confident where/qhy each NamedList comes from) + */ + private NamedList getFacetDebug(final SolrParams params) { + try { + final QueryResponse rsp = (new QueryRequest(params)).process(getRandClient(random())); + assertNotNull(params + " is null rsp?", rsp); + @SuppressWarnings({"rawtypes"}) + final NamedList topNamedList = rsp.getResponse(); + assertNotNull(params + " is null topNamedList?", topNamedList); + + // skip past the (implicit) top Facet query to get it's "sub-facets" (the real facets)... + @SuppressWarnings({"unchecked"}) + final List> facetDebug = + (List>) topNamedList.findRecursive("debug", "facet-trace", "sub-facet"); + assertNotNull(topNamedList + " ... null facet debug?", facetDebug); + assertFalse(topNamedList + " ... not even one facet debug?", facetDebug.isEmpty()); + return facetDebug.get(0); + } catch (Exception e) { + throw new RuntimeException("query failed: " + params + ": " + + e.getMessage(), e); + } + + } + + /** + * Test some small, hand crafted, but non-trivial queries that are + * easier to trace/debug then a pure random monstrosity. + * (ie: if something obvious gets broken, this test may fail faster and in a more obvious way then testRandom) + */ + public void testBespoke() throws Exception { + { // two trivial single level facets + Map facets = new LinkedHashMap<>(); + facets.put("str", new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, null, null)); + facets.put("int", new TermFacet(multiIntField(9), UNIQUE_FIELD_VALS, 0, null, null)); + assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + } + + { // trivial single level facet w/sorting on skg and refinement explicitly disabled + Map facets = new LinkedHashMap<>(); + facets.put("xxx", new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, "skg desc", false)); + assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + } + + { // trivial single level facet w/ perSeg + Map facets = new LinkedHashMap<>(); + facets.put("xxx", new TermFacet(multiStrField(9), + map("perSeg", true))); + + assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + } + + { // trivial single level facet w/ prefix + Map facets = new LinkedHashMap<>(); + facets.put("xxx", new TermFacet(multiStrField(9), + map("prefix", "2"))); + + + assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + } + + { // trivial single level facet w/ 2 diff ways to request "limit = (effectively) Infinite" + // to sanity check refinement of buckets missing from other shard in both cases + + // NOTE that these two queries & facets *should* effectively identical given that the + // very large limit value is big enough no shard will ever return that may terms, + // but the "limit=-1" case it actaully triggers slightly different code paths + // because it causes FacetField.returnsPartial() to be "true" + for (int limit : new int[] { 999999999, -1 }) { + Map facets = new LinkedHashMap<>(); + facets.put("top_facet_limit__" + limit, new TermFacet(multiStrField(9), limit, 0, "skg desc", true)); + assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + } + } + + { // multi-valued facet field w/infinite limit and an extra (non-SKG) stat + final TermFacet xxx = new TermFacet(multiStrField(12), -1, 0, "count asc", false); + xxx.subFacets.put("sum", new SumFacet(multiIntField(4))); + final Map facets = new LinkedHashMap<>(); + facets.put("xxx", xxx); + assertFacetSKGsAreConsistent(facets, + buildORQuery(multiStrField(13) + ":26", + multiStrField(6) + ":33", + multiStrField(9) + ":24"), + buildORQuery(multiStrField(4) + ":27", + multiStrField(12) + ":18", + multiStrField(2) + ":28", + multiStrField(13) + ":50"), + "*:*"); + } + } + + public void testBespokeAllBuckets() throws Exception { + { // single level facet w/sorting on skg and allBuckets + Map facets = new LinkedHashMap<>(); + facets.put("xxx", new TermFacet(multiStrField(9), map("sort", "skg desc", + "allBuckets", true))); + + assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + } + } + + public void testBespokePrefix() throws Exception { + { // trivial single level facet w/ prefix + Map facets = new LinkedHashMap<>(); + facets.put("xxx", new TermFacet(multiStrField(9), + map("sort", "skg desc", + "limit", -1, + "prefix", "2"))); + + assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + } + } + + /** + * Given a few explicit "structures" of requests, test many permutations of various params/options. + * This is more complex then {@link #testBespoke} but should still be easier to trace/debug then + * a pure random monstrosity. + */ + public void testBespokeStructures() throws Exception { + // we don't need to test every field, just make sure we test enough fields to hit every suffix.. + final int maxFacetFieldNum = Collections.max(Arrays.asList(MULTI_STR_FIELD_SUFFIXES.length, + MULTI_INT_FIELD_SUFFIXES.length, + SOLO_STR_FIELD_SUFFIXES.length, + SOLO_INT_FIELD_SUFFIXES.length)); + + for (int facetFieldNum = 0; facetFieldNum < maxFacetFieldNum; facetFieldNum++) { + for (String facetFieldName : Arrays.asList(soloStrField(facetFieldNum), multiStrField(facetFieldNum))) { + for (int limit : Arrays.asList(10, -1)) { + for (String sort : Arrays.asList("count desc", "skg desc", "index asc")) { + for (Boolean refine : Arrays.asList(false, true)) { + { // 1 additional (non-SKG) stat + final TermFacet xxx = new TermFacet(facetFieldName, map("limit", limit, + "overrequest", 0, + "sort", sort, + "refine", refine)); + xxx.subFacets.put("sum", new SumFacet(soloIntField(3))); + final Map facets = new LinkedHashMap<>(); + facets.put("xxx1", xxx); + assertFacetSKGsAreConsistent(facets, + buildORQuery(multiStrField(11) + ":55", + multiStrField(0) + ":46"), + multiStrField(5)+":9", "*:*"); + } + { // multiple SKGs + final TermFacet xxx = new TermFacet(facetFieldName, map("limit", limit, + "overrequest", 0, + "sort", sort, + "refine", refine)); + xxx.subFacets.put("skg2", new RelatednessFacet(multiStrField(2)+":9", "*:*")); + final Map facets = new LinkedHashMap<>(); + facets.put("xxx2", xxx); + assertFacetSKGsAreConsistent(facets, + buildORQuery(multiStrField(11) + ":55", + multiStrField(0) + ":46"), + multiStrField(5)+":9", "*:*"); + } + { // multiple SKGs and a multiple non-SKG stats + final TermFacet xxx = new TermFacet(facetFieldName, map("limit", limit, + "overrequest", 0, + "sort", sort, + "refine", refine)); + xxx.subFacets.put("minAAA", new SumFacet(soloIntField(3))); + xxx.subFacets.put("skg2", new RelatednessFacet(multiStrField(2)+":9", "*:*")); + xxx.subFacets.put("minBBB", new SumFacet(soloIntField(2))); + final Map facets = new LinkedHashMap<>(); + facets.put("xxx3", xxx); + assertFacetSKGsAreConsistent(facets, + buildORQuery(multiStrField(11) + ":55", + multiStrField(0) + ":46"), + multiStrField(5)+":9", "*:*"); + } + } + } + } + } + } + } + + public void testRandom() throws Exception { + + final int numIters = atLeast(10); + for (int iter = 0; iter < numIters; iter++) { + assertFacetSKGsAreConsistent(TermFacet.buildRandomFacets(), + buildRandomQuery(), buildRandomQuery(), buildRandomQuery()); + } + } + + /** + * Generates a random query string across the randomized fields/values in the index + * + * @see #randFieldValue + * @see #field + */ + private static String buildRandomQuery() { + if (0 == TestUtil.nextInt(random(), 0,10)) { + return "*:*"; + } + final int numClauses = TestUtil.nextInt(random(), 3, 10); + return buildRandomORQuery(numClauses); + } + /** The more clauses, the more docs it's likely to match */ + private static String buildRandomORQuery(final int numClauses) { + final String[] clauses = new String[numClauses]; + for (int c = 0; c < numClauses; c++) { + final int fieldNum = random().nextInt(MAX_FIELD_NUM); + // keep queries simple, just use str fields - not point of test + clauses[c] = multiStrField(fieldNum) + ":" + randFieldValue(fieldNum); + } + return buildORQuery(clauses); + } + + private static String buildORQuery(String... clauses) { + assert 0 < clauses.length; + return "(" + String.join(" OR ", clauses) + ")"; + } + + + /** + * Given a set of term facets, and top level query strings, asserts that + * the results of these queries are identical even when varying the method_val param + */ + private void assertFacetSKGsAreConsistent(final Map facets, + final String query, + final String foreQ, + final String backQ) throws SolrServerException, IOException { + final SolrParams basicParams = params("rows","0", + "q", query, "fore", foreQ, "back", backQ, + "json.facet", Facet.toJSONFacetParamValue(facets)); + + log.info("Doing full run: {}", basicParams); + try { + + // start by recording the results of the purely "default" behavior... + @SuppressWarnings({"rawtypes"}) + final NamedList expected = getFacetResponse(basicParams); + + // now loop over all processors and compare them to the "default"... + for (FacetMethod method : EnumSet.allOf(FacetMethod.class)) { + ModifiableSolrParams options = params("method_val", method.toString().toLowerCase(Locale.ROOT)); + + @SuppressWarnings({"rawtypes"}) + final NamedList actual = getFacetResponse(SolrParams.wrapAppended(options, basicParams)); + + // we can't rely on a trivial assertEquals() comparison... + // + // the order of the sub-facet keys can change between + // processors. (notably: method:enum vs method:smart when sort:"index asc") + // + // NOTE: this doesn't ignore the order of the buckets, + // it ignores the order of the keys in each bucket... + final String pathToMismatch = BaseDistributedSearchTestCase.compare + (expected, actual, 0, + Collections.singletonMap("buckets", BaseDistributedSearchTestCase.UNORDERED)); + if (null != pathToMismatch) { + log.error("{}: expected = {}", options, expected); + log.error("{}: actual = {}", options, actual); + fail("Mismatch: " + pathToMismatch + " using " + options); + } + } + } catch (AssertionError e) { + throw new AssertionError(basicParams + " ===> " + e.getMessage(), e); + } finally { + log.info("Ending full run"); + } + } + + /** + * We ignore {@link QueryResponse#getJsonFacetingResponse()} because it isn't as useful for + * doing a "deep equals" comparison across requests + */ + @SuppressWarnings({"rawtypes"}) + private NamedList getFacetResponse(final SolrParams params) { + try { + final QueryResponse rsp = (new QueryRequest(params)).process(getRandClient(random())); + assertNotNull(params + " is null rsp?", rsp); + final NamedList topNamedList = rsp.getResponse(); + assertNotNull(params + " is null topNamedList?", topNamedList); + final NamedList facetResponse = (NamedList) topNamedList.get("facets"); + assertNotNull("null facet results?", facetResponse); + assertEquals("numFound mismatch with top count?", + rsp.getResults().getNumFound(), ((Number)facetResponse.get("count")).longValue()); + + return facetResponse; + + } catch (Exception e) { + throw new RuntimeException("query failed: " + params + ": " + + e.getMessage(), e); + } + } + + private static interface Facet { // Mainly just a Marker Interface + + /** + * Given a set of (possibly nested) facets, generates a suitable json.facet param value to + * use for testing them against in a solr request. + */ + public static String toJSONFacetParamValue(final Map facets) { + assert null != facets; + assert ! facets.isEmpty(); + + return JSONUtil.toJSON(facets, -1); // no newlines + } + } + + /** + * trivial facet that is not SKG (and doesn't have any of it's special behavior) for the purposes + * of testing how TermFacet behaves with a mix of sub-facets. + */ + private static final class SumFacet implements Facet { + private final String field; + public SumFacet(final String field) { + this.field = field; + } + @Override + public String toString() { // used in JSON by default + return "sum(" + field + ")"; + } + public static SumFacet buildRandom() { + final int fieldNum = random().nextInt(MAX_FIELD_NUM); + final boolean multi = random().nextBoolean(); + return new SumFacet(multi ? multiIntField(fieldNum) : soloIntField(fieldNum)); + } + } + + /** + * Trivial data structure for modeling a simple relatedness() facet that can be written out as a json.facet param. + * + * Doesn't do any string escaping or quoting, so don't use whitespace or reserved json characters + * + * The specified fore/back queries will be wrapped in localparam syntax in the resulting json, + * unless they are 'null' in which case $fore and $back refs will be used + * in their place, and must be set as request params (this allows "random" facets to still easily + * trigger the "nested facets re-using the same fore/back set for SKG situation) + */ + private static final class RelatednessFacet implements Facet, Writable { + public final Map jsonData = new LinkedHashMap<>(); + + /** Assumes null for fore/back queries w/no options */ + public RelatednessFacet() { + this(null, null, map()); + } + /** Assumes no options */ + public RelatednessFacet(final String foreQ, final String backQ) { + this(foreQ, backQ, map()); + } + public RelatednessFacet(final String foreQ, final String backQ, + final Map options) { + assert null != options; + + final String f = null == foreQ ? "$fore" : "{!v='"+foreQ+"'}"; + final String b = null == backQ ? "$back" : "{!v='"+backQ+"'}"; + + jsonData.putAll(options); + + // we don't allow these to be overridden by options, so set them now... + jsonData.put("type", "func"); + jsonData.put("func", "relatedness("+f+","+b+")"); + + } + @Override + public void write(JSONWriter writer) { + writer.write(jsonData); + } + + public static RelatednessFacet buildRandom() { + + final Map options = new LinkedHashMap<>(); + if (random().nextBoolean()) { + options.put("min_popularity", "0.001"); + } + + // bias this in favor of null fore/back since that's most realistic for typical nested facets + final boolean simple = random().nextBoolean(); + final String fore = simple ? null : buildRandomORQuery(TestUtil.nextInt(random(), 1, 5)); + final String back = simple ? null : buildRandomORQuery(TestUtil.nextInt(random(), 1, 9)); + + return new RelatednessFacet(fore, back, options); + } + } + + /** + * Trivial data structure for modeling a simple terms facet that can be written out as a json.facet param. + * Since the point of this test is SKG, every TermFacet implicitly has one fixed "skg" subFacet, but that + * can be overridden by the caller + * + * Doesn't do any string escaping or quoting, so don't use whitespace or reserved json characters + * + * The resulting facets all specify a method of ${method_val:smart} which may be + * overridden via request params. + */ + private static final class TermFacet implements Facet, Writable { + + public final Map jsonData = new LinkedHashMap<>(); + public final Map subFacets = new LinkedHashMap<>(); + + /** + * @param field must be non null + * @param options can set any of options used in a term facet other then field or (sub) facets + */ + public TermFacet(final String field, final Map options) { + assert null != field; + + jsonData.put("method", "${method_val:smart}"); + + jsonData.putAll(options); + + // we don't allow these to be overridden by options, so set them now... + jsonData.put("type", "terms"); + jsonData.put("field",field); + jsonData.put("facet", subFacets); + + subFacets.put("skg", new RelatednessFacet()); + } + + /** all params except field can be null */ + public TermFacet(String field, Integer limit, Integer overrequest, String sort, Boolean refine) { + this(field, map("limit", limit, "overrequest", overrequest, "sort", sort, "refine", refine)); + } + + @Override + public void write(JSONWriter writer) { + writer.write(jsonData); + } + + /** + * Generates a random TermFacet that does not contai nany random sub-facets + * beyond a single consistent "skg" stat) + */ + public static TermFacet buildRandom() { + final String sort = randomSortParam(random()); + final String facetField = randomFacetField(random()); + return new TermFacet(facetField, + map("limit", randomLimitParam(random()), + "overrequest", randomOverrequestParam(random()), + "prefix", randomPrefixParam(random(), facetField), + "perSeg", randomPerSegParam(random()), + "sort", sort, + "prelim_sort", randomPrelimSortParam(random(), sort), + "allBuckets", randomAllBucketsParam(random(), sort), + "refine", randomRefineParam(random()))); + } + + /** + * Factory method for generating some random facets. + * + * For simplicity, each facet will have a unique key name. + */ + public static Map buildRandomFacets() { + // for simplicity, use a unique facet key regardless of depth - simplifies verification + // and let's us enforce a hard limit on the total number of facets in a request + AtomicInteger keyCounter = new AtomicInteger(0); + + final int maxDepth = TestUtil.nextInt(random(), 0, (usually() ? 2 : 3)); + return buildRandomFacets(keyCounter, maxDepth); + } + + /** + * picks a random field to facet on. + * + * @see #field + * @return field name, never null + */ + public static String randomFacetField(final Random r) { + final int fieldNum = r.nextInt(MAX_FIELD_NUM); + switch(r.nextInt(4)) { + case 0: return multiStrField(fieldNum); + case 1: return multiIntField(fieldNum); + case 2: return soloStrField(fieldNum); + case 3: return soloIntField(fieldNum); + default: throw new RuntimeException("Broken case statement"); + } + } + + /** + * picks a random value for the "allBuckets" param, biased in favor of interesting test cases + * This bucket should be ignored by relatedness, but inclusion should not cause any problems + * (or change the results) + * + *

    + * NOTE: allBuckets is meaningless in conjunction with the STREAM processor, so + * this method always returns null if sort is index asc. + *

    + * + * @return a Boolean, may be null + * @see SOLR-14514: allBuckets ignored by method:stream + */ + public static Boolean randomAllBucketsParam(final Random r, final String sort) { + + if ("index asc".equals(sort)) { + return null; + } + + switch(r.nextInt(4)) { + case 0: return true; + case 1: return false; + case 2: + case 3: return null; + default: throw new RuntimeException("Broken case statement"); + } + } + + /** + * picks a random value for the "refine" param, biased in favor of interesting test cases + * + * @return a Boolean, may be null + */ + public static Boolean randomRefineParam(final Random r) { + + switch(r.nextInt(3)) { + case 0: return null; + case 1: return true; + case 2: return false; + default: throw new RuntimeException("Broken case statement"); + } + } + + /** + * picks a random value for the "perSeg" param, biased in favor of interesting test cases + * + * @return a Boolean, may be null + */ + public static Boolean randomPerSegParam(final Random r) { + + switch(r.nextInt(4)) { + case 0: return true; + case 1: return false; + case 2: + case 3: return null; + default: throw new RuntimeException("Broken case statement"); + } + } + + /** + * picks a random value for the "prefix" param, biased in favor of interesting test cases + * + * @return a valid prefix value, may be null + */ + public static String randomPrefixParam(final Random r, final String facetField) { + + if (facetField.contains("multi_i") || facetField.contains("solo_i")) { + // never used a prefix on a numeric field + return null; + } + assert (facetField.contains("multi_s") || facetField.contains("solo_s")) + : "possible facet fields have changed, breaking test"; + + switch(r.nextInt(5)) { + case 0: return "2"; + case 1: return "3"; + case 2: + case 3: + case 4: return null; + default: throw new RuntimeException("Broken case statement"); + } + } + + /** + * picks a random value for the "sort" param, biased in favor of interesting test cases. + * Assumes every TermFacet will have at least one "skg" stat + * + * @return a sort string (w/direction), or null to specify nothing (trigger default behavior) + * @see #randomAllBucketsParam + * @see #randomPrelimSortParam + */ + public static String randomSortParam(final Random r) { + + final String dir = random().nextBoolean() ? "asc" : "desc"; + switch(r.nextInt(4)) { + case 0: return null; + case 1: return "count " + dir; + case 2: return "skg " + dir; + case 3: return "index " + dir; + default: throw new RuntimeException("Broken case statement"); + } + } + /** + * picks a random value for the "prelim_sort" param, biased in favor of interesting test cases. + * + * @return a sort string (w/direction), or null to specify nothing (trigger default behavior) + * @see #randomSortParam + */ + public static String randomPrelimSortParam(final Random r, final String sort) { + + if (null != sort && sort.startsWith("skg") && 1 == TestUtil.nextInt(random(), 0, 3)) { + return "count desc"; + } + return null; + } + /** + * picks a random value for the "limit" param, biased in favor of interesting test cases + * + * @return a number to specify in the request, or null to specify nothing (trigger default behavior) + * @see #UNIQUE_FIELD_VALS + */ + public static Integer randomLimitParam(final Random r) { + + final int limit = 1 + r.nextInt((int) (UNIQUE_FIELD_VALS * 1.5F)); + + if (1 == TestUtil.nextInt(random(), 0, 3)) { + // bias in favor of just using default + return null; + } + + if (limit >= UNIQUE_FIELD_VALS && r.nextBoolean()) { + return -1; // unlimited + } + + return limit; + } + + /** + * picks a random value for the "overrequest" param, biased in favor of interesting test cases. + * + * @return a number to specify in the request, or null to specify nothing (trigger default behavior) + * @see #UNIQUE_FIELD_VALS + */ + public static Integer randomOverrequestParam(final Random r) { + switch(r.nextInt(10)) { + case 0: + case 1: + case 2: + case 3: + return 0; // 40% of the time, disable overrequest to better stress refinement + case 4: + case 5: + return r.nextInt(UNIQUE_FIELD_VALS); // 20% ask for less them what's needed + case 6: + return r.nextInt(Integer.MAX_VALUE); // 10%: completley random value, statisticaly more then enough + default: break; + } + // else.... either leave param unspecified (or redundently specify the -1 default) + return r.nextBoolean() ? null : -1; + } + + /** + * recursive helper method for building random facets + * + * @param keyCounter used to ensure every generated facet has a unique key name + * @param maxDepth max possible depth allowed for the recusion, a lower value may be used depending on how many facets are returned at the current level. + */ + private static Map buildRandomFacets(AtomicInteger keyCounter, int maxDepth) { + final int numFacets = Math.max(1, TestUtil.nextInt(random(), -1, 3)); // 3/5th chance of being '1' + Map results = new LinkedHashMap<>(); + for (int i = 0; i < numFacets; i++) { + if (keyCounter.get() < 3) { // a hard limit on the total number of facets (regardless of depth) to reduce OOM risk + + final TermFacet facet = TermFacet.buildRandom(); + + results.put("facet_" + keyCounter.incrementAndGet(), facet); + if (0 < maxDepth) { + // if we're going wide, don't go deep + final int nextMaxDepth = Math.max(0, maxDepth - numFacets); + facet.subFacets.putAll(buildRandomFacets(keyCounter, TestUtil.nextInt(random(), 0, nextMaxDepth))); + } + + // we get one implicit RelatednessFacet automatically, + // randomly add 1 or 2 more ... 3/5th chance of being '0' + final int numExtraSKGStats = Math.max(0, TestUtil.nextInt(random(), -2, 2)); + for (int skgId = 0; skgId < numExtraSKGStats; skgId++) { + // sometimes we overwrite the trivial defualt "skg" with this one... + final String key = (0 == skgId && 0 == TestUtil.nextInt(random(), 0, 5)) ? "skg" : "skg" + skgId; + facet.subFacets.put(key, RelatednessFacet.buildRandom()); + } + + if (1 == TestUtil.nextInt(random(), 0, 4)) { + // occasionally add in a non-SKG related stat... + facet.subFacets.put("sum", SumFacet.buildRandom()); + } + } + } + return results; + } + } + + /** + * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed + * at a node in our cluster + */ + public static SolrClient getRandClient(Random rand) { + int numClients = CLIENTS.size(); + int idx = TestUtil.nextInt(rand, 0, numClients); + + return (idx == numClients) ? CLOUD_CLIENT : CLIENTS.get(idx); + } + + /** + * Uses a random SolrClient to execture a request and returns only the numFound + * @see #getRandClient + */ + public static long getNumFound(final SolrParams req) throws SolrServerException, IOException { + return getRandClient(random()).query(req).getResults().getNumFound(); + } + + public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exception { + assert null != client.getDefaultCollection(); + AbstractDistribZkTestBase.waitForRecoveriesToFinish(client.getDefaultCollection(), + client.getZkStateReader(), + true, true, 330); + } + + /** helper macro: fails on null keys, skips pairs with null values */ + public static Map map(Object... pairs) { + if (0 != pairs.length % 2) throw new IllegalArgumentException("uneven number of arguments"); + final Map map = new LinkedHashMap<>(); + for (int i = 0; i < pairs.length; i+=2) { + final Object key = pairs[i]; + final Object val = pairs[i+1]; + if (null == key) throw new NullPointerException("arguemnt " + i); + if (null == val) continue; + + map.put(key.toString(), val); + } + return map; + } +} diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java index db6095538cc9..1112f0b21a34 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java @@ -118,11 +118,13 @@ public static Object fromJSON(String json) throws IOException { JSONParser parser = new JSONParser(json); ObjectBuilder ob = new ObjectBuilder(parser) { @Override + @SuppressWarnings({"rawtypes"}) public Object newObject() throws IOException { return new SimpleOrderedMap(); } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void addKeyVal(Object map, Object key, Object val) throws IOException { ((SimpleOrderedMap) map).add(key.toString(), val); } @@ -136,7 +138,8 @@ void doTestRefine(String facet, String... responsesAndTests) throws Exception { try { int nShards = responsesAndTests.length / 2; Object jsonFacet = Utils.fromJSONString(facet); - FacetParser parser = new FacetTopParser(req); + @SuppressWarnings({"rawtypes"}) + FacetParser parser = new FacetParser.FacetTopParser(req); FacetRequest facetRequest = parser.parse(jsonFacet); FacetMerger merger = null; @@ -1082,7 +1085,7 @@ public void doBasicRefinement(ModifiableSolrParams p) throws Exception { , "facets=={foo:555}" ); ****/ - for (String method : new String[]{"","dvhash","stream","uif","enum","stream","smart"}) { + for (String method : new String[]{"","dv", "dvhash","stream","uif","enum","stream","smart"}) { if (method.equals("")) { p.remove("terms"); } else { @@ -1212,10 +1215,13 @@ public void doBasicRefinement(ModifiableSolrParams p) throws Exception { "{ type:func, func:'relatedness($fore,$back)', min_popularity:0.2 }")) { client.testJQ(params(p, "rows", "0", "q", "*:*", "fore", "${xy_s}:X", "back", "${num_d}:[0 TO 100]", "json.facet", "{" - + " cat0:{ ${terms} type:terms, field: ${cat_s}, " + + " cat0:{ ${terms} type:terms, field: ${cat_s}, allBuckets:true, " + " sort:'count desc', limit:1, overrequest:0, refine:true, " + " facet:{ s:"+s+"} } }") - , "facets=={ count:8, cat0:{ buckets:[ " + , "facets=={ count:8, cat0:{ " + // 's' key must not exist in the allBuckets bucket + + " allBuckets: { count:8 }" + + " buckets:[ " + " { val:A, count:4, " + " s : { relatedness: 0.00496, " //+ " foreground_count: 3, " @@ -1231,11 +1237,14 @@ public void doBasicRefinement(ModifiableSolrParams p) throws Exception { // same query with a high min_pop should result in a -Infinity relatedness score client.testJQ(params(p, "rows", "0", "q", "*:*", "fore", "${xy_s}:X", "back", "${num_d}:[0 TO 100]", "json.facet", "{" - + " cat0:{ ${terms} type:terms, field: ${cat_s}, " + + " cat0:{ ${terms} type:terms, field: ${cat_s}, allBuckets:true," + " sort:'count desc', limit:1, overrequest:0, refine:true, " + " facet:{ s:{ type:func, func:'relatedness($fore,$back)', " + " min_popularity:0.6 } } } }") - , "facets=={ count:8, cat0:{ buckets:[ " + , "facets=={ count:8, cat0:{ " + // 's' key must not exist in the allBuckets bucket + + " allBuckets: { count:8 }" + + " buckets:[ " + " { val:A, count:4, " + " s : { relatedness: '-Infinity', " //+ " foreground_count: 3, " @@ -1248,6 +1257,22 @@ public void doBasicRefinement(ModifiableSolrParams p) throws Exception { "}" ); + // really special case: allBuckets when there are no regular buckets... + for (String refine : Arrays.asList("", "refine: true,", "refine:false,")) { + client.testJQ(params(p, "rows", "0", "q", "*:*", "fore", "${xy_s}:X", "back", "${num_d}:[0 TO 100]", + "json.facet", "{" + + " cat0:{ ${terms} type:terms, field: bogus_field_s, allBuckets:true, " + + refine + + " facet:{ s:{ type:func, func:'relatedness($fore,$back)' } } } }") + , "facets=={ count:8, cat0:{ " + // 's' key must not exist in the allBuckets bucket + + " allBuckets: { count:0 }" + + " buckets:[ ]" + + "} }" + ); + } + + // SKG under nested facet where some terms only exist on one shard { // sub-bucket order should change as sort direction changes @@ -1362,17 +1387,24 @@ public void doBasicRefinement(ModifiableSolrParams p) throws Exception { ); // test filling in missing "allBuckets" - client.testJQ(params(p, "q", "*:*", + client.testJQ(params(p, "q", "*:*", "json.facet", "{" + - " cat :{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:false} } }" + + " cat0:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:false} } }" + + ", cat1:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, allBuckets:true, sort:'min asc', facet:{ min:'min(${num_d})' } }" + ", cat2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true } } }" + - ", cat3:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true , facet:{f:'sum(${num_d})'} } } }" + + ", cat3:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true , facet:{sum:'sum(${num_d})'} } } }" + + ", cat4:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true , sort:'sum asc', facet:{sum:'sum(${num_d})'} } } }" + + // using overrefine only so we aren't fooled by 'local maximum' and ask all shards for 'B' + ", cat5:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, overrefine:2, allBuckets:true, sort:'min desc' facet:{ min:'min(${num_d})', xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true, facet:{sum:'sum(${num_d})'} } } }" + "}" ) , "facets=={ count:8" + - ", cat:{ allBuckets:{count:8}, buckets:[ {val:A, count:3, xy:{buckets:[{count:2, val:X}], allBuckets:{count:3}}}] }" + + ",cat0:{ allBuckets:{count:8}, buckets:[ {val:A, count:3, xy:{buckets:[{count:2, val:X}], allBuckets:{count:3}}}] }" + + ",cat1:{ allBuckets:{count:8, min:-19.0 }, buckets:[ {val:A, count:4, min:-19.0 }] }" + ",cat2:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:3, val:X}], allBuckets:{count:4}}}] }" + - ",cat3:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:3, val:X, f:23.0}], allBuckets:{count:4, f:4.0}}}] }" + + ",cat3:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:3, val:X, sum:23.0}], allBuckets:{count:4, sum:4.0}}}] }" + + ",cat4:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:1, val:Y, sum:-19.0}], allBuckets:{count:4, sum:4.0}}}] }" + + ",cat5:{ allBuckets:{count:8, min:-19.0 }, buckets:[ {val:B, count:4, min:-11.0, xy:{buckets:[{count:2, val:X, sum:6.0}], allBuckets:{count:4, sum:-2.0}}}] }" + "}" ); diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java index aea04b3bd259..d12c27fc7c52 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java @@ -239,6 +239,39 @@ public void indexSimple(Client client) throws Exception { client.commit(); } + public void testMultiValuedBucketReHashing() throws Exception { + Client client = Client.localClient(); + client.deleteByQuery("*:*", null); + // we want a domain with a small number of documents, and more facet (point) values then docs so + // that we force dvhash to increase the number of slots via resize... + // (NOTE: normal resizing won't happen w/o at least 1024 slots, but test static overrides this to '2') + client.add(sdoc("id", "1", + "f_sd", "qqq", + "f_ids", "4", "f_ids", "2", "f_ids", "999", + "x_ids", "3", "x_ids", "5", "x_ids", "7", + "z_ids", "42"), null); + client.add(sdoc("id", "2", + "f_sd", "nnn", + "f_ids", "44", "f_ids", "22", "f_ids", "999", + "x_ids", "33", "x_ids", "55", "x_ids", "77", + "z_ids", "666"), null); + client.add(sdoc("id", "3", + "f_sd", "ggg", + "f_ids", "444", "f_ids", "222", "f_ids", "999", + "x_ids", "333", "x_ids", "555", "x_ids", "777", + "z_ids", "1010101"), null); + client.commit(); + + // faceting on a multivalued point field sorting on a stat... + assertJQ(req("rows", "0", "q", "id:[1 TO 2]", "json.facet" + , "{ f : { type: terms, field: f_ids, limit: 1, sort: 'x desc', " + + " facet: { x : 'sum(x_ids)', z : 'min(z_ids)' } } }") + , "response/numFound==2" + , "facets/count==2" + , "facets/f=={buckets:[{ val:999, count:2, x:180.0, z:42 }]}" + ); + } + public void testBehaviorEquivilenceOfUninvertibleFalse() throws Exception { Client client = Client.localClient(); indexSimple(client); @@ -407,32 +440,117 @@ public void testSimpleSKG() throws Exception { // So all of these re/sort options should produce identical output (since the num buckets is < limit) // - Testing "index" sort allows the randomized use of "stream" processor as default to be tested. // - Testing (re)sorts on other stats sanity checks code paths where relatedness() is a "defered" Agg - assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", - "fore", "where_s:NY", "back", "*:*", - "json.facet", "" - + "{x: { type: terms, field: 'cat_s', "+sort+", " - + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }") - , "facets=={count:5, x:{ buckets:[" - + " { val:'A', count:2, y:5.0, z:2, " - + " skg : { relatedness: 0.00554, " - //+ " foreground_count: 1, " - //+ " foreground_size: 2, " - //+ " background_count: 2, " - //+ " background_size: 6," - + " foreground_popularity: 0.16667," - + " background_popularity: 0.33333, }," - + " }, " - + " { val:'B', count:3, y:-3.0, z:-5, " - + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated - //+ " foreground_count: 1, " - //+ " foreground_size: 2, " - //+ " background_count: 3, " - //+ " background_size: 6," - + " foreground_popularity: 0.16667," - + " background_popularity: 0.5 }," - + " } ] } } " - ); + for (String limit : Arrays.asList(", ", ", limit:5, ", ", limit:-1, ")) { + // results shouldn't change regardless of our limit param" + assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", + "fore", "where_s:NY", "back", "*:*", + "json.facet", "" + + "{x: { type: terms, field: 'cat_s', "+sort + limit + + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }") + , "facets=={count:5, x:{ buckets:[" + + " { val:'A', count:2, y:5.0, z:2, " + + " skg : { relatedness: 0.00554, " + //+ " foreground_count: 1, " + //+ " foreground_size: 2, " + //+ " background_count: 2, " + //+ " background_size: 6," + + " foreground_popularity: 0.16667," + + " background_popularity: 0.33333, }," + + " }, " + + " { val:'B', count:3, y:-3.0, z:-5, " + + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated + //+ " foreground_count: 1, " + //+ " foreground_size: 2, " + //+ " background_count: 3, " + //+ " background_size: 6," + + " foreground_popularity: 0.16667," + + " background_popularity: 0.5 }," + + " } ] } } " + ); + // same query with a prefix of 'B' should produce only a single bucket with exact same results + assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", + "fore", "where_s:NY", "back", "*:*", + "json.facet", "" + + "{x: { type: terms, field: 'cat_s', prefix:'B', "+sort + limit + + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }") + , "facets=={count:5, x:{ buckets:[" + + " { val:'B', count:3, y:-3.0, z:-5, " + + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated + //+ " foreground_count: 1, " + //+ " foreground_size: 2, " + //+ " background_count: 3, " + //+ " background_size: 6," + + " foreground_popularity: 0.16667," + + " background_popularity: 0.5 }," + + " } ] } } " + ); + } } + + // relatedness shouldn't be computed for allBuckets, but it also shouldn't cause any problems + // + // NOTE: we can't test this with 'index asc' because STREAM processor + // (which test may randomize as default) doesn't support allBuckets + // see: https://issues.apache.org/jira/browse/SOLR-14514 + // + for (String sort : Arrays.asList("sort:'y desc'", + "sort:'z desc'", + "sort:'skg desc'", + "prelim_sort:'count desc', sort:'skg desc'")) { + // the relatedness score of each of our cat_s values is (conviniently) also alphabetical order, + // (and the same order as 'sum(num_i) desc' & 'min(num_i) desc') + // + // So all of these re/sort options should produce identical output (since the num buckets is < limit) + // - Testing "index" sort allows the randomized use of "stream" processor as default to be tested. + // - Testing (re)sorts on other stats sanity checks code paths where relatedness() is a "defered" Agg + for (String limit : Arrays.asList(", ", ", limit:5, ", ", limit:-1, ")) { + // results shouldn't change regardless of our limit param" + assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", + "fore", "where_s:NY", "back", "*:*", + "json.facet", "" + + "{x: { type: terms, field: 'cat_s', allBuckets:true, "+sort + limit + + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }") + , "facets=={count:5, x:{ " + // 'skg' key must not exist in th allBuckets bucket + + " allBuckets: { count:5, y:2.0, z:-5 }," + + "buckets:[" + + " { val:'A', count:2, y:5.0, z:2, " + + " skg : { relatedness: 0.00554, " + //+ " foreground_count: 1, " + //+ " foreground_size: 2, " + //+ " background_count: 2, " + //+ " background_size: 6," + + " foreground_popularity: 0.16667," + + " background_popularity: 0.33333, }," + + " }, " + + " { val:'B', count:3, y:-3.0, z:-5, " + + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated + //+ " foreground_count: 1, " + //+ " foreground_size: 2, " + //+ " background_count: 3, " + //+ " background_size: 6," + + " foreground_popularity: 0.16667," + + " background_popularity: 0.5 }," + + " } ] } } " + ); + + // really special case: allBuckets when there are no regular buckets... + assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", + "fore", "where_s:NY", "back", "*:*", + "json.facet", "" + + "{x: { type: terms, field: 'bogus_field_s', allBuckets:true, "+sort + limit + + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }") + , "facets=={count:5, x:{ " + // 'skg' key (as well as 'z' since it's a min) must not exist in the allBuckets bucket + + " allBuckets: { count:0, y:0.0 }," + + "buckets:[ ]" + + " } } " + ); + + + } + } + // trivial sanity check that we can (re)sort on SKG after pre-sorting on count... // ...and it's only computed for the top N buckets (based on our pre-sort) @@ -3131,7 +3249,7 @@ public void testUniquesForMethod() throws Exception { " }" + "}" ) - , "response=={numFound:10,start:0,docs:[]}" + , "response=={numFound:10,start:0,numFoundExact:true,docs:[]}" , "facets=={ count:10," + "types:{" + " buckets:[ {val:page, count:10, in_books:2, via_field:2, via_query:2 } ]}" + diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsStatsParsing.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsStatsParsing.java index 886ad022458a..24ca10f0e935 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsStatsParsing.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsStatsParsing.java @@ -58,6 +58,7 @@ public void testEquality() throws IOException { // NOTE: we don't bother trying to test 'min(foo_i)' because of SOLR-12559 // ...once that bug is fixed, several assertions below will need to change + @SuppressWarnings({"unchecked"}) final FacetRequest fr = FacetRequest.parse (req, (Map) Utils.fromJSONString ("{ " + @@ -98,6 +99,7 @@ public void testEquality() throws IOException { } } + @SuppressWarnings({"unchecked"}) public void testVerboseSyntaxWithLocalParams() throws IOException { // some parsers may choose to use "global" req params as defaults/shadows for // local params, but DebugAgg does not -- so use these to test that the diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsWithNestedObjects.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsWithNestedObjects.java index 6e05491bb34b..1e5b20357356 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsWithNestedObjects.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsWithNestedObjects.java @@ -130,7 +130,7 @@ public void testFacetingOnParents() throws Exception { " }" + "}" ) - , "response=={numFound:2,start:0,docs:[" + + , "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + " {id:book1_c1," + " comment_t:\"A great start to what looks like an epic series!\"}," + " {id:book2_c1," + @@ -167,7 +167,7 @@ public void testFacetingOnChildren() throws Exception { " }" + "}" ) - , "response=={numFound:2,start:0,docs:[" + + , "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + " {id:book1," + " title_t:\"The Way of Kings\"}," + " {id:book2," + @@ -213,7 +213,7 @@ public void testExplicitFilterExclusions() throws Exception { " facet: {" + " in_books: \"unique(_root_)\" }}}}}" ) - , "response=={numFound:2,start:0,docs:[" + + , "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + " {id:book1," + " title_t:\"The Way of Kings\"}," + " {id:book2," + @@ -267,7 +267,7 @@ public void testChildLevelFilterExclusions() throws Exception { " facet: {" + " in_books: \"unique(_root_)\" }}}}}" ) - , "response=={numFound:2,start:0,docs:[" + + , "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + " {id:book1," + " title_t:\"The Way of Kings\"}," + " {id:book2," + @@ -324,7 +324,7 @@ public void testDomainFilterExclusionsInFilters() throws Exception { " in_books: \"unique(_root_)\" }}"+ "}" ) - , "response=={numFound:0,start:0,docs:[]}" + , "response=={numFound:0,start:0,'numFoundExact':true,docs:[]}" , "facets=={ count:0," + "comments_for_author:{" + " buckets:[ {val:mary, count:1, in_books:1} ]}," + @@ -364,7 +364,7 @@ public void testUniqueBlock() throws Exception { " }" + "}" ) - , "response=={numFound:2,start:0,docs:[]}" + , "response=={numFound:2,start:0,'numFoundExact':true,docs:[]}" , "facets=={ count:2," + "types:{" + " buckets:[ {val:review, count:5, in_books1:2, in_books2:2, " diff --git a/solr/core/src/test/org/apache/solr/search/function/NvlValueSourceParser.java b/solr/core/src/test/org/apache/solr/search/function/NvlValueSourceParser.java index 72e2cc68c984..e9d945323423 100644 --- a/solr/core/src/test/org/apache/solr/search/function/NvlValueSourceParser.java +++ b/solr/core/src/test/org/apache/solr/search/function/NvlValueSourceParser.java @@ -70,7 +70,7 @@ protected float func(int doc, FunctionValues vals) throws IOException { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { /* initialize the value to consider as null */ Float nvlFloatValueArg = (Float) args.get("nvlFloatValue"); if (nvlFloatValueArg != null) { diff --git a/solr/core/src/test/org/apache/solr/search/function/TestMinMaxOnMultiValuedField.java b/solr/core/src/test/org/apache/solr/search/function/TestMinMaxOnMultiValuedField.java index f273ba598522..8993b257c7d0 100644 --- a/solr/core/src/test/org/apache/solr/search/function/TestMinMaxOnMultiValuedField.java +++ b/solr/core/src/test/org/apache/solr/search/function/TestMinMaxOnMultiValuedField.java @@ -416,6 +416,7 @@ public void testBadRequests() { public void testRandom() throws Exception { + @SuppressWarnings({"rawtypes"}) Comparable[] vals = new Comparable[TestUtil.nextInt(random(), 1, 17)]; // random ints @@ -532,17 +533,20 @@ protected void testSimpleDouble(final String fieldname) { } /** Tests a single doc with a few explicit values, as well as testing exists with and w/o values */ - protected void testSimpleValues(final String fieldname, final Class clazz, final Comparable... vals) { + @SuppressWarnings({"unchecked"}) + protected void testSimpleValues(final String fieldname, final Class clazz, + @SuppressWarnings({"rawtypes"})final Comparable... vals) { clearIndex(); assert 0 < vals.length; - + @SuppressWarnings({"rawtypes"}) Comparable min = vals[0]; + @SuppressWarnings({"rawtypes"}) Comparable max = vals[0]; final String type = clazz.getName(); final SolrInputDocument doc1 = sdoc("id", "1"); - for (Comparable v : vals) { + for (@SuppressWarnings({"rawtypes"})Comparable v : vals) { doc1.addField(fieldname, v); if (0 < min.compareTo(v)) { min = v; @@ -610,7 +614,8 @@ protected void testSimpleValues(final String fieldname, final Class clazz, fi * @param positive a "positive" value for this field (ie: in a function context, is more then the "0") */ protected void testSimpleSort(final String fieldname, - final Comparable negative, final Comparable positive) { + @SuppressWarnings({"rawtypes"})final Comparable negative, + @SuppressWarnings({"rawtypes"})final Comparable positive) { clearIndex(); int numDocsExpected = 1; diff --git a/solr/core/src/test/org/apache/solr/search/join/TestCloudNestedDocsSort.java b/solr/core/src/test/org/apache/solr/search/join/TestCloudNestedDocsSort.java index 04b8ed5b632d..8ecd19867b4d 100644 --- a/solr/core/src/test/org/apache/solr/search/join/TestCloudNestedDocsSort.java +++ b/solr/core/src/test/org/apache/solr/search/join/TestCloudNestedDocsSort.java @@ -107,8 +107,8 @@ public static void setupCluster() throws Exception { final boolean canPickMatchingChild = !chVals.isEmpty() && !parentFilter.isEmpty(); final boolean haveNtPickedMatchingChild = matchingParent==null ||matchingChild==null; if (canPickMatchingChild && haveNtPickedMatchingChild && usually()) { - matchingParent = (String) parentFilter.iterator().next(); - matchingChild = (String) chVals.iterator().next(); + matchingParent = parentFilter.iterator().next(); + matchingChild = chVals.iterator().next(); } } maxDocs += parent.getChildDocumentCount()+1; diff --git a/solr/core/src/test/org/apache/solr/search/join/TestNestedDocsSort.java b/solr/core/src/test/org/apache/solr/search/join/TestNestedDocsSort.java index b8c853d60362..c224fde55860 100644 --- a/solr/core/src/test/org/apache/solr/search/join/TestNestedDocsSort.java +++ b/solr/core/src/test/org/apache/solr/search/join/TestNestedDocsSort.java @@ -123,6 +123,7 @@ private SortField parse(String a) { public void testCachehits(){ final SolrQueryRequest req = req(); try { + @SuppressWarnings({"rawtypes"}) final SolrCache cache = req.getSearcher().getCache("perSegFilter"); assertNotNull(cache); final Map state = cache.getSolrMetricsContext().getMetricsSnapshot(); diff --git a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java index afa45b9243f8..59c481bae22e 100644 --- a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java +++ b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java @@ -78,41 +78,41 @@ public void testJoin() throws Exception { // ); assertJQ(req("q","{!join from=dept_ss to=dept_id_s"+whateverScore()+"}title_s:MTS", "fl","id") - ,"/response=={'numFound':3,'start':0,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" + ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" ); // empty from assertJQ(req("q","{!join from=noexist_s to=dept_id_s"+whateverScore()+"}*:*", "fl","id") - ,"/response=={'numFound':0,'start':0,'docs':[]}" + ,"/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}" ); // empty to assertJQ(req("q","{!join from=dept_ss to=noexist_s"+whateverScore()+"}*:*", "fl","id") - ,"/response=={'numFound':0,'start':0,'docs':[]}" + ,"/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}" ); // self join... return everyone with she same title as Dave assertJQ(req("q","{!join from=title_s to=title_s"+whateverScore()+"}name_s:dave", "fl","id") - ,"/response=={'numFound':2,'start':0,'docs':[{'id':'3'},{'id':'4'}]}" + ,"/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}" ); // find people that develop stuff assertJQ(req("q","{!join from=dept_id_s to=dept_ss"+whateverScore()+"}text_t:develop", "fl","id") - ,"/response=={'numFound':3,'start':0,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" + ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" ); // self join on multivalued text_t field assertJQ(req("q","{!join from=title_s to=title_s"+whateverScore()+"}name_s:dave", "fl","id") - ,"/response=={'numFound':2,'start':0,'docs':[{'id':'3'},{'id':'4'}]}" + ,"/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}" ); assertJQ(req("q","{!join from=dept_ss to=dept_id_s"+whateverScore()+"}title_s:MTS", "fl","id", "debugQuery","true") - ,"/response=={'numFound':3,'start':0,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" + ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" ); // expected outcome for a sub query matching dave joined against departments final String davesDepartments = - "/response=={'numFound':2,'start':0,'docs':[{'id':'10'},{'id':'13'}]}"; + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'13'}]}"; // straight forward query assertJQ(req("q","{!join from=dept_ss to=dept_id_s"+whateverScore()+"}name_s:dave", @@ -144,17 +144,17 @@ public void testJoin() throws Exception { // find people that develop stuff - but limit via filter query to a name of "john" // this tests filters being pushed down to queries (SOLR-3062) assertJQ(req("q","{!join from=dept_id_s to=dept_ss"+whateverScore()+"}text_t:develop", "fl","id", "fq", "name_s:john") - ,"/response=={'numFound':1,'start':0,'docs':[{'id':'1'}]}" + ,"/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'}]}" ); assertJQ(req("q","{!join from=dept_ss to=dept_id_s"+whateverScore()+"}title_s:MTS", "fl","id" ) - ,"/response=={'numFound':3,'start':0,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}"); + ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}"); // find people that develop stuff, even if it's requested as single value assertJQ(req("q","{!join from=dept_id_s to=dept_ss"+whateverScore()+"}text_t:develop", "fl","id") - ,"/response=={'numFound':3,'start':0,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" + ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" ); } @@ -278,6 +278,7 @@ public void testRandomJoin() throws Exception { Map resultSet = new LinkedHashMap(); resultSet.put("numFound", docList.size()); resultSet.put("start", 0); + resultSet.put("numFoundExact", true); resultSet.put("docs", sortedDocs); // todo: use different join queries for better coverage @@ -308,13 +309,13 @@ public void testRandomJoin() throws Exception { , "sort", "_docid_ asc" ,"rows","0" ); - log.error("faceting on from field: "+h.query(f)); + log.error("faceting on from field: {}", h.query(f)); } { final Map ps = ((MapSolrParams)req.getParams()).getMap(); final String q = ps.get("q"); ps.put("q", q.replaceAll("join score=none", "join")); - log.error("plain join: "+h.query(req)); + log.error("plain join: {}", h.query(req)); ps.put("q", q); } diff --git a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java index 0dc0ccf2f72c..8813b64ce49c 100644 --- a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java +++ b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java @@ -88,7 +88,7 @@ public void testSimple() throws Exception { // Search for product assertJQ(req("q", "{!join from=" + idField + " to=" + toField + " score=None}name:name2", "fl", "id") - , "/response=={'numFound':2,'start':0,'docs':[{'id':'5'},{'id':'6'}]}"); + , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'5'},{'id':'6'}]}"); /*Query joinQuery = JoinUtil.createJoinQuery(idField, false, toField, new TermQuery(new Term("name", "name2")), indexSearcher, ScoreMode.None); @@ -99,7 +99,7 @@ public void testSimple() throws Exception { assertEquals(5, result.scoreDocs[1].doc); */ assertJQ(req("q", "{!join from=" + idField + " to=" + toField + " score=None}name:name1", "fl", "id") - , "/response=={'numFound':2,'start':0,'docs':[{'id':'2'},{'id':'3'}]}"); + , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'2'},{'id':'3'}]}"); /*joinQuery = JoinUtil.createJoinQuery(idField, false, toField, new TermQuery(new Term("name", "name1")), indexSearcher, ScoreMode.None); result = indexSearcher.search(joinQuery, 10); @@ -109,7 +109,7 @@ public void testSimple() throws Exception { // Search for offer assertJQ(req("q", "{!join from=" + toField + " to=" + idField + " score=None}id:5", "fl", "id") - , "/response=={'numFound':1,'start':0,'docs':[{'id':'4'}]}"); + , "/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'4'}]}"); /*joinQuery = JoinUtil.createJoinQuery(toField, false, idField, new TermQuery(new Term("id", "5")), indexSearcher, ScoreMode.None); result = indexSearcher.search(joinQuery, 10); assertEquals(1, result.totalHits); @@ -122,10 +122,10 @@ public void testSimple() throws Exception { public void testDeleteByScoreJoinQuery() throws Exception { indexDataForScorring(); String joinQuery = "{!join from=" + toField + " to=" + idField + " score=Max}title:random"; - assertJQ(req("q", joinQuery, "fl", "id"), "/response=={'numFound':2,'start':0,'docs':[{'id':'1'},{'id':'4'}]}"); + assertJQ(req("q", joinQuery, "fl", "id"), "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'}]}"); assertU(delQ(joinQuery)); assertU(commit()); - assertJQ(req("q", joinQuery, "fl", "id"), "/response=={'numFound':0,'start':0,'docs':[]}"); + assertJQ(req("q", joinQuery, "fl", "id"), "/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}"); } public void testSimpleWithScoring() throws Exception { @@ -133,7 +133,7 @@ public void testSimpleWithScoring() throws Exception { // Search for movie via subtitle assertJQ(req("q", "{!join from=" + toField + " to=" + idField + " score=Max}title:random", "fl", "id") - , "/response=={'numFound':2,'start':0,'docs':[{'id':'1'},{'id':'4'}]}"); + , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'}]}"); //dump(req("q","{!scorejoin from="+toField+" to="+idField+" score=Max}title:random", "fl","id,score", "debug", "true")); /* Query joinQuery = @@ -149,7 +149,7 @@ public void testSimpleWithScoring() throws Exception { // dump(req("q","title:movie", "fl","id,score", "debug", "true")); assertJQ(req("q", "{!join from=" + toField + " to=" + idField + " score=Max}title:movie", "fl", "id") - , "/response=={'numFound':2,'start':0,'docs':[{'id':'4'},{'id':'1'}]}"); + , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'4'},{'id':'1'}]}"); /*joinQuery = JoinUtil.createJoinQuery(toField, false, idField, new TermQuery(new Term("title", "movie")), indexSearcher, ScoreMode.Max); result = indexSearcher.search(joinQuery, 10); @@ -159,7 +159,7 @@ public void testSimpleWithScoring() throws Exception { // Score mode total assertJQ(req("q", "{!join from=" + toField + " to=" + idField + " score=Total}title:movie", "fl", "id") - , "/response=={'numFound':2,'start':0,'docs':[{'id':'1'},{'id':'4'}]}"); + , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'}]}"); /* joinQuery = JoinUtil.createJoinQuery(toField, false, idField, new TermQuery(new Term("title", "movie")), indexSearcher, ScoreMode.Total); result = indexSearcher.search(joinQuery, 10); assertEquals(2, result.totalHits); @@ -168,7 +168,7 @@ public void testSimpleWithScoring() throws Exception { */ //Score mode avg assertJQ(req("q", "{!join from=" + toField + " to=" + idField + " score=Avg}title:movie", "fl", "id") - , "/response=={'numFound':2,'start':0,'docs':[{'id':'4'},{'id':'1'}]}"); + , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'4'},{'id':'1'}]}"); /* joinQuery = JoinUtil.createJoinQuery(toField, false, idField, new TermQuery(new Term("title", "movie")), indexSearcher, ScoreMode.Avg); result = indexSearcher.search(joinQuery, 10); diff --git a/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java b/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java index 96ac205a9b32..d5044e6c548e 100644 --- a/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java +++ b/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java @@ -46,7 +46,7 @@ public void testAbilityToCreateBJQfromAnotherPackage() throws IOException { TermQuery childQuery = new TermQuery(new Term("child_s", "l")); Query parentQuery = new WildcardQuery(new Term("parent_s", "*")); ToParentBlockJoinQuery tpbjq = new ToParentBlockJoinQuery(childQuery, - BlockJoinParentQParser.getCachedFilter(req,parentQuery).getFilter(), ScoreMode.Max); + BlockJoinParentQParser.getCachedBitSetProducer(req,parentQuery), ScoreMode.Max); Assert.assertEquals(6, req.getSearcher().search(tpbjq,10).totalHits.value); } } diff --git a/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java b/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java index 2ab19e98ac34..420730599b43 100644 --- a/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java +++ b/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java @@ -24,6 +24,7 @@ import org.apache.solr.SolrTestCaseHS; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; +import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.search.CaffeineCache; import org.apache.solr.search.DocSet; @@ -31,6 +32,8 @@ import org.junit.BeforeClass; import org.junit.Test; +import static org.hamcrest.core.StringContains.containsString; + @LuceneTestCase.SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Lucene45","Appending"}) public class TestJsonRequest extends SolrTestCaseHS { @@ -83,6 +86,8 @@ public void testDistribJsonRequest() throws Exception { public static void doJsonRequest(Client client, boolean isDistrib) throws Exception { addDocs(client); + ignoreException("Expected JSON"); + // test json param client.testJQ( params("json","{query:'cat_s:A'}") , "response/numFound==2" @@ -91,6 +96,7 @@ public static void doJsonRequest(Client client, boolean isDistrib) throws Except // invalid value SolrException ex = expectThrows(SolrException.class, () -> client.testJQ(params("q", "*:*", "json", "5"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertThat(ex.getMessage(), containsString("Expected JSON Object but got Long=5")); // this is to verify other json params are not affected client.testJQ( params("q", "cat_s:A", "json.limit", "1"), @@ -179,7 +185,7 @@ public static void doJsonRequest(Client client, boolean isDistrib) throws Except , "response/docs==[{id:'5', x:5.5},{id:'4', x:5.5}]" ); - + doParamRefDslTest(client); // test templating before parsing JSON client.testJQ( params("json","${OPENBRACE} query:'cat_s:A' ${CLOSEBRACE}", "json","${OPENBRACE} filter:'where_s:NY'${CLOSEBRACE}", "OPENBRACE","{", "CLOSEBRACE","}") @@ -388,23 +394,55 @@ public static void doJsonRequest(Client client, boolean isDistrib) throws Except , "response/numFound==3", isDistrib? "" : "response/docs==[{id:'4'},{id:'1'},{id:'5'}]" ); - try { - client.testJQ(params("json", "{query:{'lucene':'foo_s:ignore_exception'}}")); // TODO: this seems like a reasonable capability that we would want to support in the future. It should be OK to make this pass. - fail(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("foo_s")); - } + // TODO: this seems like a reasonable capability that we would want to support in the future. It should be OK to make this pass. + Exception e = expectThrows(Exception.class, () -> { + client.testJQ(params("json", "{query:{'lucene':'foo_s:ignore_exception'}}")); + }); + assertThat(e.getMessage(), containsString("foo_s")); - try { - // test failure on unknown parameter - client.testJQ(params("json", "{query:'cat_s:A', foobar_ignore_exception:5}") - , "response/numFound==2" - ); - fail(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("foobar")); - } + // test failure on unknown parameter + e = expectThrows(Exception.class, () -> { + client.testJQ(params("json", "{query:'cat_s:A', foobar_ignore_exception:5}"), "response/numFound==2"); + }); + assertThat(e.getMessage(), containsString("foobar")); + resetExceptionIgnores(); + } + + private static void doParamRefDslTest(Client client) throws Exception { + // referencing in dsl //nestedqp + client.testJQ( params("json","{query: {query: {param:'ref1'}}}", "ref1","{!field f=cat_s}A") + , "response/numFound==2" + ); + // referencing json string param + client.testJQ( params("json", random().nextBoolean() ? + "{query:{query:{param:'ref1'}}}" // nestedqp + : "{query: {query: {query:{param:'ref1'}}}}", // nestedqp, v local param + "json",random().nextBoolean() + ? "{params:{ref1:'{!field f=cat_s}A'}}" // string param + : "{queries:{ref1:{field:{f:cat_s,query:A}}}}" ) // qdsl + , "response/numFound==2" + ); + { // shortest top level ref + final ModifiableSolrParams params = params("json","{query:{param:'ref1'}}"); + if (random().nextBoolean()) { + params.add("ref1","cat_s:A"); // either to plain string + } else { + params.add("json","{queries:{ref1:{field:{f:cat_s,query:A}}}}");// or to qdsl + } + client.testJQ( params, "response/numFound==2"); + } // ref in bool must + client.testJQ( params("json","{query:{bool: {must:[{param:fq1},{param:fq2}]}}}", + "json","{params:{fq1:'cat_s:A', fq2:'where_s:NY'}}", "json.fields", "id") + , "response/docs==[{id:'1'}]" + );// referencing dsl&strings from filters objs&array + client.testJQ( params("json.filter","{param:fq1}","json.filter","{param:fq2}", + "json", random().nextBoolean() ? + "{queries:{fq1:{lucene:{query:'cat_s:A'}}, fq2:{lucene:{query:'where_s:NY'}}}}" : + "{params:{fq1:'cat_s:A', fq2:'where_s:NY'}}", + "json.fields", "id", "q", "*:*") + , "response/docs==[{id:'1'}]" + ); } private static void testFilterCachingLocally(Client client) throws Exception { diff --git a/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java b/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java index f6968c5af77a..3de80490779a 100644 --- a/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java +++ b/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java @@ -162,6 +162,7 @@ public void testBoost() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testMinDF() throws Exception { QueryResponse queryResponse = cluster.getSolrClient().query(COLLECTION, diff --git a/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java index c116bbf760b0..1d538e2cb934 100644 --- a/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java @@ -162,7 +162,7 @@ public void testAsyncQueueDrain() throws Exception { gate.release(preShutdownEventsAllowed); runThreeTestAdminCommands(); - final List events = new ArrayList + final List events = new ArrayList<> (harness.receiver.waitForAuditEvents(preShutdownEventsAllowed)); assertEquals(preShutdownEventsAllowed, events.size()); @@ -435,6 +435,10 @@ private void setupCluster(boolean async, String semaphoreName, boolean enableAut /** * Listening for socket callbacks in background thread from the custom CallbackAuditLoggerPlugin */ + // we don't really care about the InterruptedException that could be thrown from close in test code + // This all goes back to MiniSolrCloudCluster.close, which really _can_ throw + // an InterruptedException + @SuppressWarnings({"try"}) private class CallbackReceiver implements Runnable, AutoCloseable { private final ServerSocket serverSocket; private BlockingQueue queue = new LinkedBlockingDeque<>(); @@ -450,7 +454,9 @@ public int getPort() { @Override public void run() { try { - log.info("Listening for audit callbacks on on port {}", serverSocket.getLocalPort()); + if (log.isInfoEnabled()) { + log.info("Listening for audit callbacks on on port {}", serverSocket.getLocalPort()); + } Socket socket = serverSocket.accept(); BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8)); while (!Thread.currentThread().isInterrupted()) { @@ -489,6 +495,10 @@ public List waitForAuditEvents(final int expected) throws Interrupte } } + // we don't really care about the InterruptedException that could be thrown from close in test code + // This all goes back to MiniSolrCloudCluster.close, which really _can_ throw + // an InterruptedException + @SuppressWarnings({"try"}) private class AuditTestHarness implements AutoCloseable { CallbackReceiver receiver; int callbackPort; diff --git a/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java b/solr/core/src/test/org/apache/solr/security/BaseTestRuleBasedAuthorizationPlugin.java similarity index 79% rename from solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java rename to solr/core/src/test/org/apache/solr/security/BaseTestRuleBasedAuthorizationPlugin.java index 4e1e9aad3e71..e52b09a695d1 100644 --- a/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/BaseTestRuleBasedAuthorizationPlugin.java @@ -26,11 +26,13 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import org.apache.http.auth.BasicUserPrincipal; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.CommandOperation; import org.apache.solr.common.util.Utils; import org.apache.solr.handler.DumpRequestHandler; import org.apache.solr.handler.ReplicationHandler; @@ -43,7 +45,6 @@ import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.security.AuthorizationContext.CollectionRequest; import org.apache.solr.security.AuthorizationContext.RequestType; -import org.apache.solr.common.util.CommandOperation; import org.hamcrest.core.IsInstanceOf; import org.hamcrest.core.IsNot; import org.junit.Test; @@ -51,39 +52,55 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; +import static org.apache.solr.common.util.CommandOperation.captureErrors; import static org.apache.solr.common.util.Utils.getObjectByPath; import static org.apache.solr.common.util.Utils.makeMap; -import static org.apache.solr.common.util.CommandOperation.captureErrors; - -public class TestRuleBasedAuthorizationPlugin extends SolrTestCaseJ4 { - private static final int STATUS_OK = 200; - private static final int FORBIDDEN = 403; - private static final int PROMPT_FOR_CREDENTIALS = 401; - - String permissions = "{" + - " user-role : {" + - " steve: [dev,user]," + - " tim: [dev,admin]," + - " joe: [user]," + - " noble:[dev,user]" + - " }," + - " permissions : [" + - " {name:'schema-edit'," + - " role:admin}," + - " {name:'collection-admin-read'," + - " role:null}," + - " {name:collection-admin-edit ," + - " role:admin}," + - " {name:mycoll_update," + - " collection:mycoll," + - " path:'/update/*'," + - " role:[dev,admin]" + - " }," + - "{name:read , role:dev }," + - "{name:freeforall, path:'/foo', role:'*'}]}"; +/** + * Base class for testing RBAC. This will test the {@link RuleBasedAuthorizationPlugin} implementation + * but also serves as a base class for testing other sub classes + */ +@SuppressWarnings("unchecked") +public class BaseTestRuleBasedAuthorizationPlugin extends SolrTestCaseJ4 { + @SuppressWarnings({"rawtypes"}) + protected Map rules; + + final int STATUS_OK = 200; + final int FORBIDDEN = 403; + final int PROMPT_FOR_CREDENTIALS = 401; + + @Override + public void setUp() throws Exception { + super.setUp(); + resetPermissionsAndRoles(); + } + protected void resetPermissionsAndRoles() { + String permissions = "{" + + " user-role : {" + + " steve: [dev,user]," + + " tim: [dev,admin]," + + " joe: [user]," + + " noble:[dev,user]" + + " }," + + " permissions : [" + + " {name:'schema-edit'," + + " role:admin}," + + " {name:'collection-admin-read'," + + " role:null}," + + " {name:collection-admin-edit ," + + " role:admin}," + + " {name:mycoll_update," + + " collection:mycoll," + + " path:'/update/*'," + + " role:[dev,admin]" + + " }," + + "{name:read, role:dev }," + + "{name:freeforall, path:'/foo', role:'*'}]}"; + rules = (Map) Utils.fromJSONString(permissions); + } + @Test public void testBasicPermissions() { checkRules(makeMap("resource", "/update/json/docs", "httpMethod", "POST", @@ -99,7 +116,6 @@ public void testBasicPermissions() { "handler", new UpdateRequestHandler()) , STATUS_OK); - checkRules(makeMap("resource", "/update/json/docs", "httpMethod", "POST", "collectionRequests", "mycoll", @@ -117,8 +133,7 @@ public void testBasicPermissions() { "userPrincipal", "somebody", "collectionRequests", "mycoll", "httpMethod", "GET", - "handler", new SchemaHandler() - ) + "handler", new SchemaHandler()) , STATUS_OK); checkRules(makeMap("resource", "/schema/fields", @@ -169,7 +184,6 @@ public void testBasicPermissions() { "params", new MapSolrParams(singletonMap("action", "RELOAD"))) , PROMPT_FOR_CREDENTIALS); - checkRules(makeMap("resource", "/admin/collections", "userPrincipal", "somebody", "requestType", RequestType.ADMIN, @@ -193,24 +207,22 @@ public void testBasicPermissions() { "userPrincipal", "joe") , FORBIDDEN); - - Map rules = (Map) Utils.fromJSONString(permissions); - ((Map)rules.get("user-role")).put("cio","su"); - ((List)rules.get("permissions")).add( makeMap("name", "all", "role", "su")); + setUserRole("cio", "su"); + addPermission("all", "su"); checkRules(makeMap("resource", ReplicationHandler.PATH, "httpMethod", "POST", "userPrincipal", "tim", "handler", new ReplicationHandler(), "collectionRequests", singletonList(new CollectionRequest("mycoll")) ) - , FORBIDDEN, rules); + , FORBIDDEN); checkRules(makeMap("resource", ReplicationHandler.PATH, "httpMethod", "POST", "userPrincipal", "cio", "handler", new ReplicationHandler(), "collectionRequests", singletonList(new CollectionRequest("mycoll")) ) - , STATUS_OK, rules); + , STATUS_OK); checkRules(makeMap("resource", "/admin/collections", "userPrincipal", "tim", @@ -218,14 +230,13 @@ public void testBasicPermissions() { "collectionRequests", null, "handler", new CollectionsHandler(), "params", new MapSolrParams(singletonMap("action", "CREATE"))) - , STATUS_OK, rules); + , STATUS_OK); - rules = (Map) Utils.fromJSONString(permissions); - ((List)rules.get("permissions")).add( makeMap("name", "core-admin-edit", "role", "su")); - ((List)rules.get("permissions")).add( makeMap("name", "core-admin-read", "role", "user")); - ((Map)rules.get("user-role")).put("cio","su"); - ((List)rules.get("permissions")).add( makeMap("name", "all", "role", "su")); - permissions = Utils.toJSONString(rules); + resetPermissionsAndRoles(); + addPermission("core-admin-edit", "su"); + addPermission("core-admin-read", "user"); + setUserRole("cio", "su"); + addPermission("all", "su"); checkRules(makeMap("resource", "/admin/cores", "userPrincipal", null, @@ -243,7 +254,7 @@ public void testBasicPermissions() { "params", new MapSolrParams(singletonMap("action", "CREATE"))) , FORBIDDEN); - checkRules(makeMap("resource", "/admin/cores", + checkRules(makeMap("resource", "/admin/cores", "userPrincipal", "joe", "requestType", RequestType.ADMIN, "collectionRequests", null, @@ -257,14 +268,10 @@ public void testBasicPermissions() { "collectionRequests", null, "handler", new CoreAdminHandler(null), "params", new MapSolrParams(singletonMap("action", "CREATE"))) - ,STATUS_OK ); + ,STATUS_OK); - rules = (Map) Utils.fromJSONString(permissions); - List permissions = (List) rules.get("permissions"); - permissions.remove(permissions.size() -1);//remove the 'all' permission - permissions.add(makeMap("name", "test-params", "role", "admin", "path", "/x", "params", - makeMap("key", Arrays.asList("REGEX:(?i)val1", "VAL2")))); - this.permissions = Utils.toJSONString(rules); + resetPermissionsAndRoles(); + addPermission("test-params", "admin", "/x", makeMap("key", Arrays.asList("REGEX:(?i)val1", "VAL2"))); checkRules(makeMap("resource", "/x", "userPrincipal", null, @@ -289,6 +296,7 @@ public void testBasicPermissions() { "handler", new DumpRequestHandler(), "params", new MapSolrParams(singletonMap("key", "Val1"))) , PROMPT_FOR_CREDENTIALS); + checkRules(makeMap("resource", "/x", "userPrincipal", "joe", "requestType", RequestType.UNKNOWN, @@ -304,6 +312,7 @@ public void testBasicPermissions() { "handler", new DumpRequestHandler(), "params", new MapSolrParams(singletonMap("key", "Val2"))) , STATUS_OK); + checkRules(makeMap("resource", "/x", "userPrincipal", "joe", "requestType", RequestType.UNKNOWN, @@ -312,20 +321,24 @@ public void testBasicPermissions() { "params", new MapSolrParams(singletonMap("key", "VAL2"))) , FORBIDDEN); + Map customRules = (Map) Utils.fromJSONString( + "{permissions:[" + + " {name:update, role:[admin_role,update_role]}," + + " {name:read, role:[admin_role,update_role,read_role]}" + + "]}"); + + clearUserRoles(); + setUserRole("admin", "admin_role"); + setUserRole("update", "update_role"); + setUserRole("solr", "read_role"); + checkRules(makeMap("resource", "/update", "userPrincipal", "solr", "requestType", RequestType.UNKNOWN, "collectionRequests", "go", "handler", new UpdateRequestHandler(), "params", new MapSolrParams(singletonMap("key", "VAL2"))) - , FORBIDDEN, (Map) Utils.fromJSONString( "{user-role:{" + - " admin:[admin_role]," + - " update:[update_role]," + - " solr:[read_role]}," + - " permissions:[" + - " {name:update, role:[admin_role,update_role]}," + - " {name:read, role:[admin_role,update_role,read_role]}" + - "]}")); + , FORBIDDEN, customRules); } /* @@ -337,19 +350,16 @@ public void testBasicPermissions() { public void testAllPermissionAllowsActionsWhenUserHasCorrectRole() { SolrRequestHandler handler = new UpdateRequestHandler(); assertThat(handler, new IsInstanceOf(PermissionNameProvider.class)); + setUserRole("dev", "dev"); + setUserRole("admin", "admin"); + addPermission("all", "dev", "admin"); checkRules(makeMap("resource", "/update", "userPrincipal", "dev", "requestType", RequestType.UNKNOWN, "collectionRequests", "go", "handler", handler, "params", new MapSolrParams(singletonMap("key", "VAL2"))) - , STATUS_OK, (Map) Utils.fromJSONString( "{" + - " user-role:{" + - " dev:[dev_role]," + - " admin:[admin_role]}," + - " permissions:[" + - " {name:all, role:[dev_role, admin_role]}" + - "]}")); + , STATUS_OK); handler = new PropertiesRequestHandler(); assertThat(handler, new IsNot<>(new IsInstanceOf(PermissionNameProvider.class))); @@ -359,13 +369,7 @@ public void testAllPermissionAllowsActionsWhenUserHasCorrectRole() { "collectionRequests", "go", "handler", handler, "params", new MapSolrParams(emptyMap())) - , STATUS_OK, (Map) Utils.fromJSONString( "{" + - " user-role:{" + - " dev:[dev_role]," + - " admin:[admin_role]}," + - " permissions:[" + - " {name:all, role:[dev_role, admin_role]}" + - "]}")); + , STATUS_OK); } @@ -378,19 +382,16 @@ public void testAllPermissionAllowsActionsWhenUserHasCorrectRole() { public void testAllPermissionAllowsActionsWhenAssociatedRoleIsWildcard() { SolrRequestHandler handler = new UpdateRequestHandler(); assertThat(handler, new IsInstanceOf(PermissionNameProvider.class)); + setUserRole("dev", "dev"); + setUserRole("admin", "admin"); + addPermission("all", "*"); checkRules(makeMap("resource", "/update", "userPrincipal", "dev", "requestType", RequestType.UNKNOWN, "collectionRequests", "go", "handler", new UpdateRequestHandler(), "params", new MapSolrParams(singletonMap("key", "VAL2"))) - , STATUS_OK, (Map) Utils.fromJSONString( "{" + - " user-role:{" + - " dev:[dev_role]," + - " admin:[admin_role]}," + - " permissions:[" + - " {name:all, role:'*'}" + - "]}")); + , STATUS_OK); handler = new PropertiesRequestHandler(); assertThat(handler, new IsNot<>(new IsInstanceOf(PermissionNameProvider.class))); @@ -400,13 +401,7 @@ public void testAllPermissionAllowsActionsWhenAssociatedRoleIsWildcard() { "collectionRequests", "go", "handler", handler, "params", new MapSolrParams(emptyMap())) - , STATUS_OK, (Map) Utils.fromJSONString( "{" + - " user-role:{" + - " dev:[dev_role]," + - " admin:[admin_role]}," + - " permissions:[" + - " {name:all, role:'*'}" + - "]}")); + , STATUS_OK); } /* @@ -418,19 +413,16 @@ public void testAllPermissionAllowsActionsWhenAssociatedRoleIsWildcard() { public void testAllPermissionDeniesActionsWhenUserIsNotCorrectRole() { SolrRequestHandler handler = new UpdateRequestHandler(); assertThat(handler, new IsInstanceOf(PermissionNameProvider.class)); + setUserRole("dev", "dev"); + setUserRole("admin", "admin"); + addPermission("all", "admin"); checkRules(makeMap("resource", "/update", "userPrincipal", "dev", "requestType", RequestType.UNKNOWN, "collectionRequests", "go", "handler", new UpdateRequestHandler(), "params", new MapSolrParams(singletonMap("key", "VAL2"))) - , FORBIDDEN, (Map) Utils.fromJSONString( "{" + - " user-role:{" + - " dev:[dev_role]," + - " admin:[admin_role]}," + - " permissions:[" + - " {name:all, role:'admin_role'}" + - "]}")); + , FORBIDDEN); handler = new PropertiesRequestHandler(); assertThat(handler, new IsNot<>(new IsInstanceOf(PermissionNameProvider.class))); @@ -440,13 +432,29 @@ public void testAllPermissionDeniesActionsWhenUserIsNotCorrectRole() { "collectionRequests", "go", "handler", handler, "params", new MapSolrParams(emptyMap())) - , FORBIDDEN, (Map) Utils.fromJSONString( "{" + - " user-role:{" + - " dev:[dev_role]," + - " admin:[admin_role]}," + - " permissions:[" + - " {name:all, role:'admin_role'}" + - "]}")); + , FORBIDDEN); + } + + void addPermission(String permissionName, String role, String path, Map params) { + ((List)rules.get("permissions")).add( makeMap("name", permissionName, "role", role, "path", path, "params", params)); + } + + void removePermission(String name) { + List> oldPerm = ((List) rules.get("permissions")); + List> newPerm = oldPerm.stream().filter(p -> !p.get("name").equals(name)).collect(Collectors.toList()); + rules.put("permissions", newPerm); + } + + protected void addPermission(String permissionName, String... roles) { + ((List)rules.get("permissions")).add( makeMap("name", permissionName, "role", Arrays.asList(roles))); + } + + void clearUserRoles() { + rules.put("user-role", new HashMap()); + } + + protected void setUserRole(String user, String role) { + ((Map)rules.get("user-role")).put(user, role); } public void testEditRules() throws IOException { @@ -457,6 +465,7 @@ public void testEditRules() throws IOException { assertEquals("admin", perms.getVal("permissions[0]/role")); perms.runCmd("{set-permission : {name: config-edit, role: [admin, dev], index:2 } }", false); perms.runCmd("{set-permission : {name: config-edit, role: [admin, dev], index:1}}", true); + @SuppressWarnings({"rawtypes"}) Collection roles = (Collection) perms.getVal("permissions[0]/role"); assertEquals(2, roles.size()); assertTrue(roles.contains("admin")); @@ -477,15 +486,18 @@ public void testEditRules() throws IOException { } static class Perms { + @SuppressWarnings({"rawtypes"}) Map conf = new HashMap<>(); ConfigEditablePlugin plugin = new RuleBasedAuthorizationPlugin(); List parsedCommands; public void runCmd(String cmds, boolean failOnError) throws IOException { parsedCommands = CommandOperation.parse(new StringReader(cmds)); + @SuppressWarnings({"rawtypes"}) LinkedList ll = new LinkedList(); Map edited = plugin.edit(conf, parsedCommands); if(edited!= null) conf = edited; + @SuppressWarnings({"rawtypes"}) List maps = captureErrors(parsedCommands); if(failOnError){ assertTrue("unexpected error ,"+maps , maps.isEmpty()); @@ -498,13 +510,13 @@ public Object getVal(String path){ } } - private void checkRules(Map values, int expected) { - checkRules(values,expected,(Map) Utils.fromJSONString(permissions)); + void checkRules(Map values, int expected) { + checkRules(values, expected, rules); } - private void checkRules(Map values, int expected, Map permissions) { - AuthorizationContext context = new MockAuthorizationContext(values); - try (RuleBasedAuthorizationPlugin plugin = new RuleBasedAuthorizationPlugin()) { + void checkRules(Map values, int expected, Map permissions) { + AuthorizationContext context = getMockContext(values); + try (RuleBasedAuthorizationPluginBase plugin = createPlugin()) { plugin.init(permissions); AuthorizationResponse authResp = plugin.authorize(context); assertEquals(expected, authResp.statusCode); @@ -513,23 +525,31 @@ private void checkRules(Map values, int expected, Map values) { + return new MockAuthorizationContext(values) { + @Override + public Principal getUserPrincipal() { + Object userPrincipal = values.get("userPrincipal"); + return userPrincipal == null ? null : new BasicUserPrincipal(String.valueOf(userPrincipal)); + } + }; + } + + protected abstract class MockAuthorizationContext extends AuthorizationContext { private final Map values; - private MockAuthorizationContext(Map values) { + public MockAuthorizationContext(Map values) { this.values = values; } @Override public SolrParams getParams() { SolrParams params = (SolrParams) values.get("params"); - return params == null ? new MapSolrParams(new HashMap()) : params; - } - - @Override - public Principal getUserPrincipal() { - Object userPrincipal = values.get("userPrincipal"); - return userPrincipal == null ? null : new BasicUserPrincipal(String.valueOf(userPrincipal)); + return params == null ? new MapSolrParams(new HashMap<>()) : params; } @Override @@ -538,6 +558,7 @@ public String getHttpHeader(String header) { } @Override + @SuppressWarnings({"rawtypes"}) public Enumeration getHeaderNames() { return null; } diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java index 09e2c0a2f092..9b977eeb55bf 100644 --- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java @@ -139,6 +139,7 @@ public void testBasicAuth() throws Exception { "'set-user': {'harry':'HarryIsCool'}\n" + "}"; + @SuppressWarnings({"rawtypes"}) final SolrRequest genericReq; if (isUseV2Api) { genericReq = new V2Request.Builder("/cluster/security/authentication").withMethod(SolrRequest.METHOD.POST).build(); @@ -256,14 +257,15 @@ public void testBasicAuth() throws Exception { try { System.setProperty("basicauth", "harry:HarryIsUberCool"); tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs)); + @SuppressWarnings({"rawtypes"}) Map obj = (Map) Utils.fromJSON(new ByteArrayInputStream(baos.toByteArray())); assertTrue(obj.containsKey("version")); assertTrue(obj.containsKey("startTime")); assertTrue(obj.containsKey("uptime")); assertTrue(obj.containsKey("memory")); } catch (Exception e) { - log.error("RunExampleTool failed due to: " + e + - "; stdout from tool prior to failure: " + baos.toString(StandardCharsets.UTF_8.name())); + log.error("RunExampleTool failed due to: {}; stdout from tool prior to failure: {}" + , e, baos.toString(StandardCharsets.UTF_8.name())); // logOk } SolrParams params = new MapSolrParams(Collections.singletonMap("q", "*:*")); @@ -320,6 +322,7 @@ private void assertNumberOfMetrics(int num) { } private QueryResponse executeQuery(ModifiableSolrParams params, String user, String pass) throws IOException, SolrServerException { + @SuppressWarnings({"rawtypes"}) SolrRequest req = new QueryRequest(params); req.setBasicAuthCredentials(user, pass); QueryResponse resp = (QueryResponse) req.process(cluster.getSolrClient(), COLLECTION); diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java index bcfe60862b5a..766e09268046 100644 --- a/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java +++ b/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java @@ -19,11 +19,11 @@ import java.lang.invoke.MethodHandles; -import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.Http2SolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.cloud.SolrCloudAuthTestCase; +import org.junit.After; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; @@ -45,13 +45,13 @@ public void setupCluster() throws Exception { .setBasicAuthCredentials("solr", "solr") .process(cluster.getSolrClient()); cluster.waitForActiveCollection(COLLECTION, 4, 4); + } - JettySolrRunner jetty = cluster.getJettySolrRunner(0); - jetty.stop(); - cluster.waitForJettyToStop(jetty); - jetty.start(); - cluster.waitForAllNodes(30); - cluster.waitForActiveCollection(COLLECTION, 4, 4); + @Override + @After + public void tearDown() throws Exception { + cluster.shutdown(); + super.tearDown(); } @Test @@ -68,6 +68,36 @@ public void basicTest() throws Exception { } } + @Test + public void testDeleteSecurityJsonZnode() throws Exception { + try (Http2SolrClient client = new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) + .build()){ + try { + new QueryRequest(params("q", "*:*")).process(client, COLLECTION); + fail("Should throw exception due to authentication needed"); + } catch (Exception e) { /* Ignore */ } + + // Deleting security.json will disable security - before SOLR-9679 it would instead cause an exception + cluster.getZkClient().delete("/security.json", -1, false); + + int count = 0; + boolean done = false; + // Assert that security is turned off. This is async, so we retry up to 5s before failing the test + while (!done) { + try { + Thread.sleep(500); + count += 1; + new QueryRequest(params("q", "*:*")).process(client, COLLECTION); + done = true; + } catch (Exception e) { + if (count >= 10) { + fail("Failed 10 times to query without credentials after removing security.json"); + } + } + } + } + } + protected static final String STD_CONF = "{\n" + " \"authentication\":{\n" + " \"blockUnknown\": true,\n" + diff --git a/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java b/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java index c61a4c611285..cc6ad98c0e6f 100644 --- a/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java @@ -60,7 +60,9 @@ public void audit(AuditEvent event) { if (! out.checkError()) { log.error("Output stream has an ERROR!"); } - log.info("Sent audit callback {} to localhost:{}", formatter.formatEvent(event), callbackPort); + if (log.isInfoEnabled()) { + log.info("Sent audit callback {} to localhost:{}", formatter.formatEvent(event), callbackPort); + } } @Override diff --git a/solr/core/src/test/org/apache/solr/security/CertAuthPluginTest.java b/solr/core/src/test/org/apache/solr/security/CertAuthPluginTest.java new file mode 100644 index 000000000000..fb32a217cea5 --- /dev/null +++ b/solr/core/src/test/org/apache/solr/security/CertAuthPluginTest.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.security; + +import org.apache.solr.SolrTestCaseJ4; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import javax.security.auth.x500.X500Principal; +import javax.servlet.FilterChain; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import java.security.cert.X509Certificate; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class CertAuthPluginTest extends SolrTestCaseJ4 { + private CertAuthPlugin plugin; + + @BeforeClass + public static void setupMockito() { + SolrTestCaseJ4.assumeWorkingMockito(); + } + + @Before + public void setUp() throws Exception { + super.setUp(); + plugin = new CertAuthPlugin(); + } + + @Test + public void testAuthenticateOk() throws Exception { + X500Principal principal = new X500Principal("CN=NAME"); + X509Certificate certificate = mock(X509Certificate.class); + HttpServletRequest request = mock(HttpServletRequest.class); + + when(certificate.getSubjectX500Principal()).thenReturn(principal); + when(request.getAttribute(any())).thenReturn(new X509Certificate[] { certificate }); + + FilterChain chain = (req, rsp) -> assertEquals(principal, ((HttpServletRequest) req).getUserPrincipal()); + assertTrue(plugin.doAuthenticate(request, null, chain)); + + assertEquals(1, plugin.numAuthenticated.getCount()); + } + + @Test + public void testAuthenticateMissing() throws Exception { + HttpServletRequest request = mock(HttpServletRequest.class); + when(request.getAttribute(any())).thenReturn(null); + + HttpServletResponse response = mock(HttpServletResponse.class); + + assertFalse(plugin.doAuthenticate(request, response, null)); + verify(response).sendError(eq(401), anyString()); + + assertEquals(1, plugin.numMissingCredentials.getCount()); + } +} diff --git a/solr/core/src/test/org/apache/solr/security/HttpParamDelegationTokenPlugin.java b/solr/core/src/test/org/apache/solr/security/HttpParamDelegationTokenPlugin.java index bd1818e0e957..30badf1e47c4 100644 --- a/solr/core/src/test/org/apache/solr/security/HttpParamDelegationTokenPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/HttpParamDelegationTokenPlugin.java @@ -191,7 +191,7 @@ public HttpParamDelegationTokenAuthenticationHandler() { @Override public void init(Properties config) throws ServletException { Properties conf = new Properties(); - for (Map.Entry entry : config.entrySet()) { + for (@SuppressWarnings({"rawtypes"})Map.Entry entry : config.entrySet()) { conf.setProperty((String) entry.getKey(), (String) entry.getValue()); } conf.setProperty(TOKEN_KIND, KerberosPlugin.DELEGATION_TOKEN_TYPE_DEFAULT); diff --git a/solr/core/src/test/org/apache/solr/security/JWTAuthPluginTest.java b/solr/core/src/test/org/apache/solr/security/JWTAuthPluginTest.java index 5ed1032c2f34..7b04c95daefe 100644 --- a/solr/core/src/test/org/apache/solr/security/JWTAuthPluginTest.java +++ b/solr/core/src/test/org/apache/solr/security/JWTAuthPluginTest.java @@ -113,8 +113,8 @@ protected static JwtClaims generateClaims() { claims.setClaim("claim1", "foo"); // additional claims/attributes about the subject can be added claims.setClaim("claim2", "bar"); // additional claims/attributes about the subject can be added claims.setClaim("claim3", "foo"); // additional claims/attributes about the subject can be added - List groups = Arrays.asList("group-one", "other-group", "group-three"); - claims.setStringListClaim("groups", groups); // multi-valued claims work too and will end up as a JSON array + List roles = Arrays.asList("group-one", "other-group", "group-three"); + claims.setStringListClaim("roles", roles); // multi-valued claims work too and will end up as a JSON array return claims; } @@ -325,6 +325,7 @@ public void scope() { JWTAuthPlugin.JWTAuthenticationResponse resp = plugin.authenticate(testHeader); assertTrue(resp.getErrorMessage(), resp.isAuthenticated()); + // When 'rolesClaim' is not defined in config, then all scopes are registered as roles Principal principal = resp.getPrincipal(); assertTrue(principal instanceof VerifiedUserRoles); Set roles = ((VerifiedUserRoles)principal).getVerifiedRoles(); @@ -332,6 +333,23 @@ public void scope() { assertTrue(roles.contains("solr:read")); } + @Test + public void roles() { + testConfig.put("rolesClaim", "roles"); + plugin.init(testConfig); + JWTAuthPlugin.JWTAuthenticationResponse resp = plugin.authenticate(testHeader); + assertTrue(resp.getErrorMessage(), resp.isAuthenticated()); + + // When 'rolesClaim' is defined in config, then roles from that claim are used instead of claims + Principal principal = resp.getPrincipal(); + assertTrue(principal instanceof VerifiedUserRoles); + Set roles = ((VerifiedUserRoles)principal).getVerifiedRoles(); + assertEquals(3, roles.size()); + assertTrue(roles.contains("group-one")); + assertTrue(roles.contains("other-group")); + assertTrue(roles.contains("group-three")); + } + @Test public void wrongScope() { testConfig.put("scope", "wrong"); diff --git a/solr/core/src/test/org/apache/solr/security/JWTVerificationkeyResolverTest.java b/solr/core/src/test/org/apache/solr/security/JWTVerificationkeyResolverTest.java index 4b88787b0ea2..af87d59ac973 100644 --- a/solr/core/src/test/org/apache/solr/security/JWTVerificationkeyResolverTest.java +++ b/solr/core/src/test/org/apache/solr/security/JWTVerificationkeyResolverTest.java @@ -65,9 +65,11 @@ public class JWTVerificationkeyResolverTest extends SolrTestCaseJ4 { private KeyHolder k4; private KeyHolder k5; private List keysToReturnFromSecondJwk; + @SuppressWarnings({"rawtypes"}) private Iterator refreshSequenceForSecondJwk; @Before + @SuppressWarnings({"unchecked"}) public void setUp() throws Exception { super.setUp(); k1 = new KeyHolder("k1"); @@ -90,7 +92,7 @@ public void setUp() throws Exception { when(httpsJwksFactory.createList(anyList())).thenReturn(asList(firstJwkList, secondJwkList)); JWTIssuerConfig issuerConfig = new JWTIssuerConfig("primary").setIss("foo").setJwksUrl(asList("url1", "url2")); - issuerConfig.setHttpsJwksFactory(httpsJwksFactory); + JWTIssuerConfig.setHttpsJwksFactory(httpsJwksFactory); resolver = new JWTVerificationkeyResolver(Arrays.asList(issuerConfig), true); assumeWorkingMockito(); diff --git a/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java b/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java index f1c7abb05cce..db995a9d7337 100644 --- a/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java @@ -39,7 +39,9 @@ public class MockAuditLoggerPlugin extends AuditLoggerPlugin { public void audit(AuditEvent event) { events.add(event); incrementType(event.getEventType().name()); - log.info("#{} - {}", events.size(), typeCounts); + if (log.isInfoEnabled()) { + log.info("#{} - {}", events.size(), typeCounts); + } } private void incrementType(String type) { diff --git a/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java b/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java index 1062f60fcfb2..d58a4993f292 100644 --- a/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java @@ -48,7 +48,7 @@ public AuthorizationResponse authorize(AuthorizationContext context) { return new AuthorizationResponse(200); } if (uname == null) uname = context.getParams().get("uname"); - log.info("User request: " + uname); + log.info("User request: {}", uname); if (uname == null || denyUsers.contains(uname)) return new AuthorizationResponse(403); else diff --git a/solr/core/src/test/org/apache/solr/security/PrincipalWithUserRoles.java b/solr/core/src/test/org/apache/solr/security/PrincipalWithUserRoles.java new file mode 100644 index 000000000000..8d27d0b00d53 --- /dev/null +++ b/solr/core/src/test/org/apache/solr/security/PrincipalWithUserRoles.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.security; + +import java.security.Principal; +import java.util.Objects; +import java.util.Set; + +/** + * Type of Principal object that can contain also a list of roles the user has. + * One use case can be to keep track of user-role mappings in an Identity Server + * external to Solr and pass the information to Solr in a signed JWT token or in + * another secure manner. The role information can then be used to authorize + * requests without the need to maintain or lookup what roles each user belongs to. + */ +public class PrincipalWithUserRoles implements Principal, VerifiedUserRoles { + private final String username; + + private final Set roles; + + /** + * User principal with user name as well as one or more roles that he/she belong to + * @param username string with user name for user + * @param roles a set of roles that we know this user belongs to, or empty list for no roles + */ + public PrincipalWithUserRoles(final String username, Set roles) { + super(); + Objects.requireNonNull(username, "User name was null"); + Objects.requireNonNull(roles, "User roles was null"); + this.username = username; + this.roles = roles; + } + + /** + * Returns the name of this principal. + * + * @return the name of this principal. + */ + @Override + public String getName() { + return this.username; + } + + /** + * Gets the list of roles + */ + @Override + public Set getVerifiedRoles() { + return roles; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + PrincipalWithUserRoles that = (PrincipalWithUserRoles) o; + + if (!username.equals(that.username)) return false; + return roles.equals(that.roles); + } + + @Override + public int hashCode() { + int result = username.hashCode(); + result = 31 * result + roles.hashCode(); + return result; + } + + @Override + public String toString() { + return "PrincipalWithUserRoles{" + + "username='" + username + '\'' + + ", roles=" + roles + + '}'; + } +} diff --git a/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java b/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java index 086dd64dce1a..81c07e39a692 100644 --- a/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java +++ b/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java @@ -89,6 +89,7 @@ public void distribTearDown() throws Exception { } + @SuppressWarnings({"unchecked"}) public static void verifySecurityStatus(HttpClient cl, String url, String objPath, Object expected, int count) throws Exception { boolean success = false; String s = null; @@ -96,10 +97,12 @@ public static void verifySecurityStatus(HttpClient cl, String url, String objPat for (int i = 0; i < count; i++) { HttpGet get = new HttpGet(url); s = EntityUtils.toString(cl.execute(get, HttpClientUtil.createNewHttpClientRequestContext()).getEntity()); + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSONString(s); Object actual = Utils.getObjectByPath(m, true, hierarchy); if (expected instanceof Predicate) { + @SuppressWarnings({"rawtypes"}) Predicate predicate = (Predicate) expected; if (predicate.test(actual)) { success = true; diff --git a/solr/core/src/test/org/apache/solr/security/TestExternalRoleRuleBasedAuthorizationPlugin.java b/solr/core/src/test/org/apache/solr/security/TestExternalRoleRuleBasedAuthorizationPlugin.java new file mode 100644 index 000000000000..c36cc255307f --- /dev/null +++ b/solr/core/src/test/org/apache/solr/security/TestExternalRoleRuleBasedAuthorizationPlugin.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.security; + +import java.security.Principal; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; + +import org.apache.http.auth.BasicUserPrincipal; + +/** + * Tests {@link ExternalRoleRuleBasedAuthorizationPlugin} through simulating principals with roles attached + */ +public class TestExternalRoleRuleBasedAuthorizationPlugin extends BaseTestRuleBasedAuthorizationPlugin { + private HashMap principals; + + @Override + public void setUp() throws Exception { + super.setUp(); + + principals = new HashMap<>(); + setUserRoles("steve", "dev", "user"); + setUserRoles("tim", "dev", "admin"); + setUserRoles("joe", "user"); + setUserRoles("noble", "dev", "user"); + } + + protected void setUserRoles(String user, String... roles) { + principals.put(user, new PrincipalWithUserRoles(user, new HashSet<>(Arrays.asList(roles)))); + } + + @Override + protected void setUserRole(String user, String role) { + principals.put(user, new PrincipalWithUserRoles(user, Collections.singleton(role))); + } + + @Override + AuthorizationContext getMockContext(Map values) { + return new MockAuthorizationContext(values) { + @Override + public Principal getUserPrincipal() { + String userPrincipal = (String) values.get("userPrincipal"); + return userPrincipal == null ? null : + principals.get(userPrincipal) != null ? principals.get(userPrincipal) : + new BasicUserPrincipal(userPrincipal); + } + }; + } + + @Override + protected RuleBasedAuthorizationPluginBase createPlugin() { + return new ExternalRoleRuleBasedAuthorizationPlugin(); + } + + @Override + protected void resetPermissionsAndRoles() { + super.resetPermissionsAndRoles(); + rules.remove("user-role"); + } +} diff --git a/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java b/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java index c62354b58318..2d1752bd3819 100644 --- a/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java +++ b/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java @@ -35,8 +35,10 @@ public void testAuthenticate(){ String pwd = "Friendly"; String user = "marcus"; + @SuppressWarnings({"rawtypes"}) Map latestConf = createConfigMap(user, pwd); Map params = singletonMap(user, pwd); + @SuppressWarnings({"unchecked"}) Map result = zkAuthenticationProvider.edit(latestConf, Collections.singletonList(new CommandOperation("set-user",params ))); zkAuthenticationProvider = new Sha256AuthenticationProvider(); @@ -48,10 +50,12 @@ public void testAuthenticate(){ } + @SuppressWarnings({"unchecked"}) public void testBasicAuthCommands() throws IOException { try (BasicAuthPlugin basicAuthPlugin = new BasicAuthPlugin()) { basicAuthPlugin.init(createConfigMap("ignore", "me")); + @SuppressWarnings({"rawtypes"}) Map latestConf = createConfigMap("solr", "SolrRocks"); CommandOperation blockUnknown = new CommandOperation("set-property", singletonMap("blockUnknown", true)); diff --git a/solr/core/src/test/org/apache/solr/security/hadoop/ImpersonationUtil.java b/solr/core/src/test/org/apache/solr/security/hadoop/ImpersonationUtil.java index 00c2b714c493..f1ffe1d2b36d 100644 --- a/solr/core/src/test/org/apache/solr/security/hadoop/ImpersonationUtil.java +++ b/solr/core/src/test/org/apache/solr/security/hadoop/ImpersonationUtil.java @@ -50,6 +50,7 @@ static String getUsersFirstGroup() throws Exception { return group; } + @SuppressWarnings({"rawtypes"}) static SolrRequest getProxyRequest(String user, String doAs) { return new CollectionAdminRequest.List() { @Override diff --git a/solr/core/src/test/org/apache/solr/security/hadoop/TestDelegationWithHadoopAuth.java b/solr/core/src/test/org/apache/solr/security/hadoop/TestDelegationWithHadoopAuth.java index 9851710f817e..401bcd97f219 100644 --- a/solr/core/src/test/org/apache/solr/security/hadoop/TestDelegationWithHadoopAuth.java +++ b/solr/core/src/test/org/apache/solr/security/hadoop/TestDelegationWithHadoopAuth.java @@ -151,6 +151,7 @@ private void doSolrRequest(String token, int expectedStatusCode, HttpSolrClient assertEquals("Did not receieve excepted status code", expectedStatusCode, lastStatusCode); } + @SuppressWarnings({"rawtypes"}) private SolrRequest getAdminRequest(final SolrParams params) { return new CollectionAdminRequest.List() { @Override @@ -162,6 +163,7 @@ public SolrParams getParams() { }; } + @SuppressWarnings({"unchecked"}) private int getStatusCode(String token, final String user, final String op, HttpSolrClient client) throws Exception { SolrClient delegationTokenClient; @@ -182,6 +184,7 @@ private int getStatusCode(String token, final String user, final String op, Http ModifiableSolrParams p = new ModifiableSolrParams(); if (user != null) p.set(PseudoAuthenticator.USER_NAME, user); if (op != null) p.set("op", op); + @SuppressWarnings({"rawtypes"}) SolrRequest req = getAdminRequest(p); if (user != null || op != null) { Set queryParams = new HashSet<>(); @@ -200,7 +203,8 @@ private int getStatusCode(String token, final String user, final String op, Http } } - private void doSolrRequest(SolrClient client, SolrRequest request, + private void doSolrRequest(SolrClient client, + @SuppressWarnings({"rawtypes"})SolrRequest request, int expectedStatusCode) throws Exception { try { client.request(request); @@ -368,6 +372,7 @@ public void testDelegationTokenSolrClient() throws Exception { String token = getDelegationToken(null, USER_1, primarySolrClient); assertNotNull(token); + @SuppressWarnings({"rawtypes"}) SolrRequest request = getAdminRequest(new ModifiableSolrParams()); // test without token diff --git a/solr/core/src/test/org/apache/solr/security/hadoop/TestZkAclsWithHadoopAuth.java b/solr/core/src/test/org/apache/solr/security/hadoop/TestZkAclsWithHadoopAuth.java index 884665254063..481e0d88bef5 100644 --- a/solr/core/src/test/org/apache/solr/security/hadoop/TestZkAclsWithHadoopAuth.java +++ b/solr/core/src/test/org/apache/solr/security/hadoop/TestZkAclsWithHadoopAuth.java @@ -85,6 +85,7 @@ public static void tearDownClass() { } @Test + @SuppressWarnings({"try"}) public void testZkAcls() throws Exception { try (ZooKeeper keeper = new ZooKeeper(cluster.getZkServer().getZkAddress(), (int) TimeUnit.MINUTES.toMillis(1), arg0 -> {/* Do nothing */})) { diff --git a/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java b/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java index bc2aa198a06b..3a76690eaa7f 100644 --- a/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java +++ b/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java @@ -126,6 +126,7 @@ public void testStreamBody() throws Exception } @Test + @SuppressWarnings({"try"}) public void testStreamURL() throws Exception { URL url = getClass().getResource("/README"); @@ -149,6 +150,7 @@ public void testStreamURL() throws Exception } @Test + @SuppressWarnings({"try"}) public void testStreamFile() throws Exception { File file = getFile("README"); diff --git a/solr/core/src/test/org/apache/solr/spelling/DirectSolrSpellCheckerTest.java b/solr/core/src/test/org/apache/solr/spelling/DirectSolrSpellCheckerTest.java index 6106fb4bb78a..972f00b3751e 100644 --- a/solr/core/src/test/org/apache/solr/spelling/DirectSolrSpellCheckerTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/DirectSolrSpellCheckerTest.java @@ -32,6 +32,7 @@ * Simple tests for {@link DirectSolrSpellChecker} */ @SuppressTempFileChecks(bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") +@SuppressWarnings({"rawtypes"}) public class DirectSolrSpellCheckerTest extends SolrTestCaseJ4 { private static SpellingQueryConverter queryConverter; @@ -51,8 +52,10 @@ public static void beforeClass() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void test() throws Exception { DirectSolrSpellChecker checker = new DirectSolrSpellChecker(); + @SuppressWarnings({"rawtypes"}) NamedList spellchecker = new NamedList(); spellchecker.add("classname", DirectSolrSpellChecker.class.getName()); spellchecker.add(SolrSpellChecker.FIELD, "teststop"); diff --git a/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java b/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java index b52e411a4421..8fd0b4b903e3 100644 --- a/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java @@ -39,6 +39,7 @@ public class FileBasedSpellCheckerTest extends SolrTestCaseJ4 { private static SpellingQueryConverter queryConverter; @BeforeClass + @SuppressWarnings({"rawtypes"}) public static void beforeClass() throws Exception { initCore("solrconfig.xml","schema.xml"); //Index something with a title @@ -57,8 +58,10 @@ public static void afterClass() { } @Test + @SuppressWarnings({"unchecked"}) public void test() throws Exception { FileBasedSpellChecker checker = new FileBasedSpellChecker(); + @SuppressWarnings({"rawtypes"}) NamedList spellchecker = new NamedList(); spellchecker.add("classname", FileBasedSpellChecker.class.getName()); @@ -94,8 +97,10 @@ public void test() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testFieldType() throws Exception { FileBasedSpellChecker checker = new FileBasedSpellChecker(); + @SuppressWarnings({"rawtypes"}) NamedList spellchecker = new NamedList(); spellchecker.add("classname", FileBasedSpellChecker.class.getName()); spellchecker.add(SolrSpellChecker.DICTIONARY_NAME, "external"); @@ -138,8 +143,10 @@ public void testFieldType() throws Exception { * No indexDir location set */ @Test + @SuppressWarnings({"unchecked"}) public void testRAMDirectory() throws Exception { FileBasedSpellChecker checker = new FileBasedSpellChecker(); + @SuppressWarnings({"rawtypes"}) NamedList spellchecker = new NamedList(); spellchecker.add("classname", FileBasedSpellChecker.class.getName()); diff --git a/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java b/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java index 328d8630038d..1e6a864479de 100644 --- a/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java @@ -101,9 +101,11 @@ public void testComparator() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testSpelling() throws Exception { IndexBasedSpellChecker checker = new IndexBasedSpellChecker(); + @SuppressWarnings({"rawtypes"}) NamedList spellchecker = new NamedList(); spellchecker.add("classname", IndexBasedSpellChecker.class.getName()); @@ -175,8 +177,10 @@ public void testSpelling() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testExtendedResults() throws Exception { IndexBasedSpellChecker checker = new IndexBasedSpellChecker(); + @SuppressWarnings({"rawtypes"}) NamedList spellchecker = new NamedList(); spellchecker.add("classname", IndexBasedSpellChecker.class.getName()); @@ -229,8 +233,10 @@ public SpellChecker getSpellChecker(){ } @Test + @SuppressWarnings({"unchecked"}) public void testAlternateDistance() throws Exception { TestSpellChecker checker = new TestSpellChecker(); + @SuppressWarnings({"rawtypes"}) NamedList spellchecker = new NamedList(); spellchecker.add("classname", IndexBasedSpellChecker.class.getName()); @@ -256,6 +262,7 @@ public void testAlternateDistance() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testAlternateLocation() throws Exception { String[] ALT_DOCS = new String[]{ "jumpin jack flash", @@ -268,6 +275,7 @@ public void testAlternateLocation() throws Exception { }; IndexBasedSpellChecker checker = new IndexBasedSpellChecker(); + @SuppressWarnings({"rawtypes"}) NamedList spellchecker = new NamedList(); spellchecker.add("classname", IndexBasedSpellChecker.class.getName()); diff --git a/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java b/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java index efa7df231701..ebab43231855 100644 --- a/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java @@ -100,6 +100,7 @@ public static void beforeClass() throws Exception { } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testCollationWithRangeQuery() throws Exception { SolrCore core = h.getCore(); @@ -132,6 +133,7 @@ public void testCollationWithRangeQuery() throws Exception } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testCollationWithHypens() throws Exception { SolrCore core = h.getCore(); @@ -222,6 +224,7 @@ public void testCollateWithOverride() throws Exception } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testCollateWithFilter() throws Exception { SolrCore core = h.getCore(); @@ -257,6 +260,7 @@ public void testCollateWithFilter() throws Exception } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testCollateWithMultipleRequestHandlers() throws Exception { SolrCore core = h.getCore(); @@ -304,6 +308,7 @@ public void testCollateWithMultipleRequestHandlers() throws Exception } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testExtendedCollate() throws Exception { SolrCore core = h.getCore(); SearchComponent speller = core.getSearchComponent("spellcheck"); @@ -323,6 +328,7 @@ public void testExtendedCollate() throws Exception { // All words are "correct" per the dictionary, but this collation would // return no results if tried. SolrRequestHandler handler = core.getRequestHandler("/spellCheckCompRH"); + @SuppressWarnings({"rawtypes"}) SolrQueryResponse rsp = new SolrQueryResponse(); rsp.addResponseHeader(new SimpleOrderedMap()); SolrQueryRequest req = new LocalSolrQueryRequest(core, params); @@ -410,6 +416,7 @@ public void testExtendedCollate() throws Exception { } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testCollateWithGrouping() throws Exception { SolrCore core = h.getCore(); @@ -590,6 +597,7 @@ public void testEstimatedHitCounts() throws Exception { } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testZeroTries() throws Exception { SolrCore core = h.getCore(); @@ -617,6 +625,7 @@ public void testZeroTries() throws Exception assertTrue(collations.size() == 2); } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testWithCursorMark() throws Exception { SolrCore core = h.getCore(); diff --git a/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java b/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java index 04e1da9cc90e..d86f7c9156dd 100644 --- a/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java @@ -35,6 +35,7 @@ public class SpellingQueryConverterTest extends SolrTestCase { @Test + @SuppressWarnings({"rawtypes"}) public void test() throws Exception { SpellingQueryConverter converter = new SpellingQueryConverter(); converter.init(new NamedList()); @@ -45,6 +46,7 @@ public void test() throws Exception { } @Test + @SuppressWarnings({"rawtypes"}) public void testNumeric() throws Exception { SpellingQueryConverter converter = new SpellingQueryConverter(); converter.init(new NamedList()); @@ -59,6 +61,7 @@ public void testNumeric() throws Exception { } @Test + @SuppressWarnings({"rawtypes"}) public void testSpecialChars() { SpellingQueryConverter converter = new SpellingQueryConverter(); converter.init(new NamedList()); @@ -115,6 +118,7 @@ private boolean isOffsetCorrect(String s, Collection tokens) { } @Test + @SuppressWarnings({"rawtypes"}) public void testUnicode() { SpellingQueryConverter converter = new SpellingQueryConverter(); converter.init(new NamedList()); @@ -135,6 +139,7 @@ public void testUnicode() { } @Test + @SuppressWarnings({"rawtypes"}) public void testMultipleClauses() { SpellingQueryConverter converter = new SpellingQueryConverter(); converter.init(new NamedList()); @@ -152,6 +157,7 @@ public void testMultipleClauses() { } @Test + @SuppressWarnings({"rawtypes"}) public void testRequiredOrProhibitedFlags() { SpellingQueryConverter converter = new SpellingQueryConverter(); converter.init(new NamedList()); diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java b/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java index 6e3172d050f3..f692d42b6e23 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java @@ -99,15 +99,15 @@ public BytesRef next() throws IOException { ++emittedItems; next = new BytesRef(TestUtil.randomUnicodeString(LuceneTestCase.random(), MAX_LENGTH)); if (emittedItems % 1000 == 0) { - log.info(enabledSysProp + " emitted " + emittedItems + " items."); + log.info("{} emitted {} items", enabledSysProp, emittedItems); } } else { - log.info(enabledSysProp + " disabled after emitting " + emittedItems + " items."); + log.info("{} disabled after emitting {} items", enabledSysProp, emittedItems); System.clearProperty(enabledSysProp); // disable once maxItems has been reached emittedItems = 0L; } } else { - log.warn(enabledSysProp + " invoked when disabled"); + log.warn("{} invoked when disabled", enabledSysProp); emittedItems = 0L; } return next; diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java b/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java index 9168741e520e..dcfbe9e7313d 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java @@ -103,8 +103,10 @@ public void testRebuild() throws Exception { } // SOLR-2726 + @SuppressWarnings({"unchecked"}) public void testAnalyzer() throws Exception { Suggester suggester = new Suggester(); + @SuppressWarnings({"rawtypes"}) NamedList params = new NamedList(); params.add("field", "test_field"); params.add("lookupImpl", "org.apache.solr.spelling.suggest.tst.TSTLookupFactory"); diff --git a/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java b/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java index 2ea3bf078fd8..ea831d20cf05 100644 --- a/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java +++ b/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java @@ -269,7 +269,7 @@ public void testCacheConcurrent() throws Exception { return; } } - assertEquals("cache key differs from value's key", (Long) k, (Long) v.key); + assertEquals("cache key differs from value's key", k, (Long) v.key); if (!v.live.compareAndSet(true, false)) { throw new RuntimeException("listener called more than once! k=" + k + " v=" + v + " removalCause=" + removalCause); // return; // use this variant if listeners may be called more than once @@ -339,7 +339,7 @@ public void test() { Val v = cache.getIfPresent(k); if (v != null) { hits.incrementAndGet(); - assertEquals("cache key differs from value's key", (Long) k, (Long) v.key); + assertEquals("cache key differs from value's key", k, (Long) v.key); } if (v == null || odds(updateAnywayOdds)) { diff --git a/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java b/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java index baa328e88f05..093ba7bf0e9f 100644 --- a/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java +++ b/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java @@ -113,7 +113,7 @@ public void testWritingAndReadingAFile() throws IOException { IndexInput input1 = directory.openInput("testing.test", new IOContext()); - IndexInput input2 = (IndexInput) input1.clone(); + IndexInput input2 = input1.clone(); assertEquals(12345, input2.readInt()); input2.close(); diff --git a/solr/core/src/test/org/apache/solr/update/CdcrUpdateLogTest.java b/solr/core/src/test/org/apache/solr/update/CdcrUpdateLogTest.java index 53d142d92f6a..c1a97317cbb7 100644 --- a/solr/core/src/test/org/apache/solr/update/CdcrUpdateLogTest.java +++ b/solr/core/src/test/org/apache/solr/update/CdcrUpdateLogTest.java @@ -94,17 +94,22 @@ private void addDocs(int nDocs, int start, LinkedList versions) throws Exc } private static Long getVer(SolrQueryRequest req) throws Exception { + @SuppressWarnings({"rawtypes"}) Map rsp = (Map) fromJSONString(JQ(req)); + @SuppressWarnings({"rawtypes"}) Map doc = null; if (rsp.containsKey("doc")) { doc = (Map) rsp.get("doc"); } else if (rsp.containsKey("docs")) { + @SuppressWarnings({"rawtypes"}) List lst = (List) rsp.get("docs"); if (lst.size() > 0) { doc = (Map) lst.get(0); } } else if (rsp.containsKey("response")) { + @SuppressWarnings({"rawtypes"}) Map responseMap = (Map) rsp.get("response"); + @SuppressWarnings({"rawtypes"}) List lst = (List) responseMap.get("docs"); if (lst.size() > 0) { doc = (Map) lst.get(0); @@ -140,6 +145,7 @@ public void testLogReaderNext() throws Exception { Object o = reader.next(); assertNotNull(o); + @SuppressWarnings({"rawtypes"}) List entry = (List) o; int opAndFlags = (Integer) entry.get(0); assertEquals(UpdateLog.COMMIT, opAndFlags & UpdateLog.OPERATION_MASK); @@ -203,6 +209,7 @@ public void testLogReaderSeek() throws Exception { assertTrue(reader1.seek(targetVersion)); Object o = reader1.next(); assertNotNull(o); + @SuppressWarnings({"rawtypes"}) List entry = (List) o; long version = (Long) entry.get(1); @@ -579,6 +586,7 @@ public void testSubReader() throws Exception { subReader.close(); // After fast forward, the parent reader should be position on the doc15 + @SuppressWarnings({"rawtypes"}) List o = (List) reader.next(); assertNotNull(o); assertTrue("Expected SolrInputDocument but got" + o.toString() ,o.get(3) instanceof SolrInputDocument); diff --git a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java index 76d9acee6707..b3d3a5c916ef 100644 --- a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java @@ -100,6 +100,7 @@ public void testRequireUniqueKey() throws Exception { @Test + @SuppressWarnings({"unchecked"}) public void testBasics() throws Exception { // get initial metrics @@ -386,15 +387,21 @@ public void testPrepareCommit() throws Exception { DirectoryReader r = sr.getSearcher().getIndexReader(); Directory d = r.directory(); - log.info("FILES before addDoc="+ Arrays.asList(d.listAll())); + if (log.isInfoEnabled()) { + log.info("FILES before addDoc={}", Arrays.asList(d.listAll())); + } assertU(adoc("id", "1")); int nFiles = d.listAll().length; - log.info("FILES before prepareCommit="+ Arrays.asList(d.listAll())); + if (log.isInfoEnabled()) { + log.info("FILES before prepareCommit={}", Arrays.asList(d.listAll())); + } updateJ("", params("prepareCommit", "true")); - log.info("FILES after prepareCommit="+Arrays.asList(d.listAll())); + if (log.isInfoEnabled()) { + log.info("FILES after prepareCommit={}", Arrays.asList(d.listAll())); + } assertTrue( d.listAll().length > nFiles); // make sure new index files were actually written assertJQ(req("q", "id:1") @@ -460,7 +467,7 @@ public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher current } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { } } diff --git a/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java b/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java index 30e9141f6d09..0449224976e5 100644 --- a/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java +++ b/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java @@ -247,7 +247,7 @@ public MockEventListener() { private StringBuffer fail = new StringBuffer(); @Override - public void init(NamedList args) {} + public void init(@SuppressWarnings({"rawtypes"})NamedList args) {} @Override public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) { diff --git a/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java b/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java index c269c9efb461..bfa62218a095 100644 --- a/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java +++ b/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java @@ -69,7 +69,7 @@ public MockSolrClient(SolrClient solrClient) { } @Override - public NamedList request(SolrRequest request, String collection) + public NamedList request(@SuppressWarnings({"rawtypes"})SolrRequest request, String collection) throws SolrServerException, IOException { if (exp != null) { Exception e = exception(); diff --git a/solr/core/src/test/org/apache/solr/update/MockingHttp2SolrClient.java b/solr/core/src/test/org/apache/solr/update/MockingHttp2SolrClient.java index c9aded7cf4b6..0046c12839da 100644 --- a/solr/core/src/test/org/apache/solr/update/MockingHttp2SolrClient.java +++ b/solr/core/src/test/org/apache/solr/update/MockingHttp2SolrClient.java @@ -37,6 +37,7 @@ public enum Exp {CONNECT_EXCEPTION, SOCKET_EXCEPTION, BAD_REQUEST}; private volatile Exp exp = null; private boolean oneExpPerReq; + @SuppressWarnings({"rawtypes"}) private Set reqGotException; public MockingHttp2SolrClient(String baseSolrUrl, Builder builder) { @@ -85,7 +86,8 @@ private Exception exception() { } @Override - public NamedList request(SolrRequest request, String collection) + public NamedList request(@SuppressWarnings({"rawtypes"})SolrRequest request, + String collection) throws SolrServerException, IOException { if (request instanceof UpdateRequest) { UpdateRequest ur = (UpdateRequest) request; @@ -118,7 +120,8 @@ public NamedList request(SolrRequest request, String collection) return super.request(request, collection); } - public NamedList request(SolrRequest request, String collection, OnComplete onComplete) + public NamedList request(@SuppressWarnings({"rawtypes"})SolrRequest request, + String collection, OnComplete onComplete) throws SolrServerException, IOException { if (request instanceof UpdateRequest) { UpdateRequest ur = (UpdateRequest) request; diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java index 9e6fd603133d..54e816ca6c19 100644 --- a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java +++ b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java @@ -338,6 +338,7 @@ protected void validateDocs(Set docsAdded, SolrClient client0, SolrClie void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) throws IOException, SolrServerException { QueryRequest qr = new QueryRequest(params("qt","/get", "getVersions",Integer.toString(numVersions), "sync", StrUtils.join(Arrays.asList(syncWith), ','))); + @SuppressWarnings({"rawtypes"}) NamedList rsp = client.request(qr); assertEquals(expectedResult, (Boolean) rsp.get("sync")); } diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncWithBufferUpdatesTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncWithBufferUpdatesTest.java index eff120d3a53a..f836816c1416 100644 --- a/solr/core/src/test/org/apache/solr/update/PeerSyncWithBufferUpdatesTest.java +++ b/solr/core/src/test/org/apache/solr/update/PeerSyncWithBufferUpdatesTest.java @@ -216,6 +216,7 @@ void validateQACResponse(Set docsAdded, QueryResponse qacResponse) { void assertSync(SolrClient client, int numVersions, boolean expectedResult, String syncWith) throws IOException, SolrServerException { QueryRequest qr = new QueryRequest(params("qt","/get", "getVersions",Integer.toString(numVersions), "syncWithLeader", syncWith)); + @SuppressWarnings({"rawtypes"}) NamedList rsp = client.request(qr); assertEquals(expectedResult, (Boolean) rsp.get("syncWithLeader")); } diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java index 9617ff2a356c..9bece149d3e6 100644 --- a/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java +++ b/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java @@ -95,12 +95,14 @@ public void test() throws Exception { IndexFingerprint getFingerprint(SolrClient client, long maxVersion) throws IOException, SolrServerException { QueryRequest qr = new QueryRequest(params("qt","/get", "getFingerprint",Long.toString(maxVersion))); + @SuppressWarnings({"rawtypes"}) NamedList rsp = client.request(qr); return IndexFingerprint.fromObject(rsp.get("fingerprint")); } void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) throws IOException, SolrServerException { QueryRequest qr = new QueryRequest(params("qt","/get", "getVersions",Integer.toString(numVersions), "sync", StrUtils.join(Arrays.asList(syncWith), ','))); + @SuppressWarnings({"rawtypes"}) NamedList rsp = client.request(qr); assertEquals(expectedResult, (Boolean) rsp.get("sync")); } diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderAndIndexFingerprintCachingTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderAndIndexFingerprintCachingTest.java index aa668187d4fb..3b386f4fd226 100644 --- a/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderAndIndexFingerprintCachingTest.java +++ b/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderAndIndexFingerprintCachingTest.java @@ -30,6 +30,7 @@ public class PeerSyncWithLeaderAndIndexFingerprintCachingTest extends PeerSyncWi @Override void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) throws IOException, SolrServerException { QueryRequest qr = new QueryRequest(params("qt","/get", "getVersions",Integer.toString(numVersions), "syncWithLeader", StrUtils.join(Arrays.asList(syncWith), ','))); + @SuppressWarnings({"rawtypes"}) NamedList rsp = client.request(qr); assertEquals(expectedResult, (Boolean) rsp.get("syncWithLeader")); } diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderTest.java index f1c7f696ad9d..414d0437da40 100644 --- a/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderTest.java +++ b/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderTest.java @@ -51,6 +51,7 @@ protected void testOverlap(Set docsAdded, SolrClient client0, SolrClien @Override void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) throws IOException, SolrServerException { QueryRequest qr = new QueryRequest(params("qt","/get", "getVersions",Integer.toString(numVersions), "syncWithLeader", StrUtils.join(Arrays.asList(syncWith), ','))); + @SuppressWarnings({"rawtypes"}) NamedList rsp = client.request(qr); assertEquals(expectedResult, (Boolean) rsp.get("syncWithLeader")); } diff --git a/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java b/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java index 59d50480acc7..9c0f84740418 100644 --- a/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java +++ b/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java @@ -554,7 +554,7 @@ class MockEventListener implements SolrEventListener { public MockEventListener() { /* NOOP */ } @Override - public void init(NamedList args) {} + public void init(@SuppressWarnings({"rawtypes"})NamedList args) {} @Override public void newSearcher(SolrIndexSearcher newSearcher, diff --git a/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java b/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java index 36c511f4f8a6..77f5a89935cc 100644 --- a/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java +++ b/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java @@ -302,7 +302,7 @@ public void test() throws Exception { try (SolrCore core = cores.getCore("collection1")) { core.getUpdateHandler().registerCommitCallback(new SolrEventListener() { @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { } @Override diff --git a/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java b/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java index 0bcc851b6163..fb6498505dec 100644 --- a/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java +++ b/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java @@ -21,6 +21,7 @@ import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.util.List; +import java.util.Set; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; @@ -55,6 +56,7 @@ public static void beforeClass() throws Exception { // System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ System.setProperty("solr.directoryFactory", "solr.NRTCachingDirectoryFactory"); System.setProperty("solr.tests.lockType", DirectoryFactory.LOCK_TYPE_SIMPLE); + initCore("solrconfig.xml", "schema15.xml"); } @@ -67,6 +69,7 @@ public void setUp() throws Exception { indexDir1 = createTempDir("_testSplit1").toFile(); indexDir2 = createTempDir("_testSplit2").toFile(); indexDir3 = createTempDir("_testSplit3").toFile(); + h.getCoreContainer().getAllowPaths().addAll(Set.of(indexDir1.toPath(), indexDir2.toPath(), indexDir3.toPath())); } @Test diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java index 418aead206df..cd3f874099b3 100644 --- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java +++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java @@ -290,7 +290,9 @@ private void reorderedDBQsSimpleTest() throws Exception { // order the updates correctly for NONLEADER 1 for (UpdateRequest update : updates) { - log.info("Issuing well ordered update: " + update.getDocuments()); + if (log.isInfoEnabled()) { + log.info("Issuing well ordered update: {}", update.getDocuments()); + } NONLEADERS.get(1).request(update); } @@ -383,7 +385,7 @@ private void docValuesUpdateTest() throws Exception { // number of docs we're testing (0 <= id), index may contain additional random docs (id < 0) int numDocs = atLeast(100); if (onlyLeaderIndexes) numDocs = TestUtil.nextInt(random(), 10, 50); - log.info("Trying num docs = " + numDocs); + log.info("Trying num docs = {}", numDocs); final List ids = new ArrayList(numDocs); for (int id = 0; id < numDocs; id++) { ids.add(id); @@ -400,7 +402,7 @@ private void docValuesUpdateTest() throws Exception { luceneDocids.add((Integer) doc.get("[docid]")); valuesList.add((Float) doc.get("inplace_updatable_float")); } - log.info("Initial results: "+results); + log.info("Initial results: {}", results); // before we do any atomic operations, sanity check our results against all clients assertDocIdsAndValuesAgainstAllClients("sanitycheck", params, luceneDocids, "inplace_updatable_float", valuesList); @@ -415,7 +417,7 @@ private void docValuesUpdateTest() throws Exception { assert -5.0F <= value && value <= 5.0F; valuesList.set(id, value); } - log.info("inplace_updatable_float: " + valuesList); + log.info("inplace_updatable_float: {}", valuesList); // update doc w/ set Collections.shuffle(ids, r); // so updates aren't applied in index order @@ -646,7 +648,7 @@ private void ensureRtgWorksWithPartialUpdatesTest() throws Exception { assertTrue("Earlier: "+docids+", now: "+getInternalDocIds("100"), docids.equals(getInternalDocIds("100"))); SolrDocument sdoc = LEADER.getById("100"); // RTG straight from the index - assertEquals(sdoc.toString(), (float) inplace_updatable_float, sdoc.get("inplace_updatable_float")); + assertEquals(sdoc.toString(), inplace_updatable_float, sdoc.get("inplace_updatable_float")); assertEquals(sdoc.toString(), title, sdoc.get("title_s")); assertEquals(sdoc.toString(), version, sdoc.get("_version_")); @@ -657,7 +659,7 @@ private void ensureRtgWorksWithPartialUpdatesTest() throws Exception { version = currentVersion; sdoc = LEADER.getById("100"); // RTG from the tlog - assertEquals(sdoc.toString(), (float) inplace_updatable_float, sdoc.get("inplace_updatable_float")); + assertEquals(sdoc.toString(), inplace_updatable_float, sdoc.get("inplace_updatable_float")); assertEquals(sdoc.toString(), title, sdoc.get("title_s")); assertEquals(sdoc.toString(), version, sdoc.get("_version_")); @@ -687,8 +689,8 @@ private void ensureRtgWorksWithPartialUpdatesTest() throws Exception { final String clientDebug = client.toString() + (LEADER.equals(client) ? " (leader)" : " (not leader)"); sdoc = client.getById("100", params("distrib", "false")); - assertEquals(clientDebug + " => "+ sdoc, (int) 100, sdoc.get("inplace_updatable_int")); - assertEquals(clientDebug + " => "+ sdoc, (float) inplace_updatable_float, sdoc.get("inplace_updatable_float")); + assertEquals(clientDebug + " => "+ sdoc, 100, sdoc.get("inplace_updatable_int")); + assertEquals(clientDebug + " => "+ sdoc, inplace_updatable_float, sdoc.get("inplace_updatable_float")); assertEquals(clientDebug + " => "+ sdoc, title, sdoc.get("title_s")); assertEquals(clientDebug + " => "+ sdoc, version, sdoc.get("_version_")); } @@ -743,7 +745,9 @@ private void outOfOrderUpdatesIndividualReplicaTest() throws Exception { // order the updates correctly for NONLEADER 1 for (UpdateRequest update : updates) { - log.info("Issuing well ordered update: " + update.getDocuments()); + if (log.isInfoEnabled()) { + log.info("Issuing well ordered update: {}", update.getDocuments()); + } NONLEADERS.get(1).request(update); } @@ -774,7 +778,9 @@ private void outOfOrderUpdatesIndividualReplicaTest() throws Exception { // assert both replicas have same effect for (SolrClient client : NONLEADERS) { // 0th is re-ordered replica, 1st is well-ordered replica - log.info("Testing client: " + ((HttpSolrClient)client).getBaseURL()); + if (log.isInfoEnabled()) { + log.info("Testing client: {}", ((HttpSolrClient) client).getBaseURL()); + } assertReplicaValue(client, 0, "inplace_updatable_float", (newinplace_updatable_float + (float)(updates.size() - 1)), "inplace_updatable_float didn't match for replica at client: " + ((HttpSolrClient)client).getBaseURL()); assertReplicaValue(client, 0, "title_s", "title0_new", @@ -812,7 +818,9 @@ private void reorderedDeletesTest() throws Exception { // order the updates correctly for NONLEADER 1 for (UpdateRequest update : updates) { - log.info("Issuing well ordered update: " + update.getDocuments()); + if (log.isInfoEnabled()) { + log.info("Issuing well ordered update: {}", update.getDocuments()); + } NONLEADERS.get(1).request(update); } @@ -884,7 +892,9 @@ private void reorderedDBQsResurrectionTest() throws Exception { // order the updates correctly for NONLEADER 1 for (UpdateRequest update : updates) { - log.info("Issuing well ordered update: " + update.getDocuments()); + if (log.isInfoEnabled()) { + log.info("Issuing well ordered update: {}", update.getDocuments()); + } NONLEADERS.get(1).request(update); } @@ -932,15 +942,17 @@ private void reorderedDBQsResurrectionTest() throws Exception { } // All should succeed, i.e. no LIR assertEquals(updateResponses.size(), successful); - - log.info("Non leader 0: "+((HttpSolrClient)NONLEADERS.get(0)).getBaseURL()); - log.info("Non leader 1: "+((HttpSolrClient)NONLEADERS.get(1)).getBaseURL()); + + if (log.isInfoEnabled()) { + log.info("Non leader 0: {}", ((HttpSolrClient) NONLEADERS.get(0)).getBaseURL()); + log.info("Non leader 1: {}", ((HttpSolrClient) NONLEADERS.get(1)).getBaseURL()); // logOk + } SolrDocument doc0 = NONLEADERS.get(0).getById(String.valueOf(0), params("distrib", "false")); SolrDocument doc1 = NONLEADERS.get(1).getById(String.valueOf(0), params("distrib", "false")); - log.info("Doc in both replica 0: "+doc0); - log.info("Doc in both replica 1: "+doc1); + log.info("Doc in both replica 0: {}", doc0); + log.info("Doc in both replica 1: {}", doc1); // assert both replicas have same effect for (int i=0; i model = new LinkedHashMap<>(); diff --git a/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java b/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java index 802090b62635..52230c1ce3bb 100644 --- a/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java +++ b/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java @@ -116,9 +116,13 @@ public void testIndexingPerf() throws IOException { } updateHandler.addDoc(add); } - log.info("doc="+ Arrays.toString(fields)); + if (log.isInfoEnabled()) { + log.info("doc={}", Arrays.toString(fields)); + } double elapsed = timer.getTime(); - log.info("iter="+iter +" time=" + elapsed + " throughput=" + ((long)iter*1000)/elapsed); + if (log.isInfoEnabled()) { + log.info("iter={} time={} throughput={}", iter, elapsed, ((long) iter * 1000) / elapsed); + } //discard all the changes updateHandler.rollback(new RollbackUpdateCommand(req)); diff --git a/solr/core/src/test/org/apache/solr/update/TestNestedUpdateProcessor.java b/solr/core/src/test/org/apache/solr/update/TestNestedUpdateProcessor.java index 438489596c2f..49177a9fe117 100644 --- a/solr/core/src/test/org/apache/solr/update/TestNestedUpdateProcessor.java +++ b/solr/core/src/test/org/apache/solr/update/TestNestedUpdateProcessor.java @@ -163,6 +163,7 @@ public void testDeeplyNestedURPSanity() throws Exception { nestedUpdate.processAdd(cmd); cmd.clear(); + @SuppressWarnings({"rawtypes"}) List children = (List) docHierarchy.get("children").getValues(); SolrInputDocument firstChild = (SolrInputDocument) children.get(0); @@ -189,6 +190,7 @@ public void testDeeplyNestedURPChildrenWoId() throws Exception { cmd.solrDoc = noIdChildren; nestedUpdate.processAdd(cmd); cmd.clear(); + @SuppressWarnings({"rawtypes"}) List children = (List) noIdChildren.get("children").getValues(); SolrInputDocument idLessChild = (SolrInputDocument)((SolrInputDocument) children.get(1)).get(childKey).getValue(); assertTrue("Id less child did not get an Id", idLessChild.containsKey("id")); diff --git a/solr/core/src/test/org/apache/solr/update/TestUpdate.java b/solr/core/src/test/org/apache/solr/update/TestUpdate.java index 24181697448c..d93b844a0ec1 100644 --- a/solr/core/src/test/org/apache/solr/update/TestUpdate.java +++ b/solr/core/src/test/org/apache/solr/update/TestUpdate.java @@ -48,7 +48,7 @@ public void testUpdatableDocs() throws Exception { } - public void doUpdateTest(Callable afterUpdate) throws Exception { + public void doUpdateTest(@SuppressWarnings({"rawtypes"})Callable afterUpdate) throws Exception { clearIndex(); afterUpdate.call(); diff --git a/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java b/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java index e66b05a933b6..fb4ec29a42d3 100644 --- a/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java +++ b/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java @@ -183,6 +183,7 @@ public void testApplyPartialUpdatesWithDelete() throws Exception { // verify that the document is deleted, by doing an RTG call assertJQ(req("qt","/get", "id","1"), "=={'doc':null}"); } else { // dbi + @SuppressWarnings({"rawtypes"}) List entry = ((List)ulog.lookup(DOC_1_INDEXED_ID)); assertEquals(UpdateLog.DELETE, (int)entry.get(UpdateLog.FLAGS_IDX) & UpdateLog.OPERATION_MASK); } diff --git a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java index 48c76b778128..5a15e10daece 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java @@ -989,6 +989,7 @@ public void testAddDistinct() throws Exception { SolrInputDocument doc = new SolrInputDocument(); doc.setField("id", "3"); doc.setField("cat", new String[]{"aaa", "ccc"}); + doc.setField("atomic_is", 10); assertU(adoc(doc)); doc = new SolrInputDocument(); @@ -1005,22 +1006,30 @@ public void testAddDistinct() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "3"); doc.setField("cat", ImmutableMap.of("add-distinct", "bbb")); + doc.setField("atomic_is", ImmutableMap.of("add-distinct", 10)); assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '2']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); - assertQ(req("q", "cat:bbb", "indent", "true"), "//doc/arr[@name='cat'][count(str)=3]"); + assertQ(req("q", "cat:bbb", "indent", "true"), + "//doc/arr[@name='cat'][count(str)=3]", + "//doc/arr[@name='atomic_is'][count(int)=1]" + ); doc = new SolrInputDocument(); doc.setField("id", "3"); - doc.setField("cat", ImmutableMap.of("add-distinct", Arrays.asList(new String[]{"bbb", "bbb"}))); + doc.setField("cat", ImmutableMap.of("add-distinct", Arrays.asList("bbb", "bbb"))); + doc.setField("atomic_is", ImmutableMap.of("add-distinct", Arrays.asList(10, 34))); assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '2']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); - assertQ(req("q", "cat:bbb", "indent", "true"), "//doc/arr[@name='cat'][count(str)=3]"); //'bbb' already present will not be added again + assertQ(req("q", "cat:bbb", "indent", "true"), + "//doc/arr[@name='cat'][count(str)=3]", //'bbb' already present will not be added again + "//doc/arr[@name='atomic_is'][count(int)=2]" + ); doc = new SolrInputDocument(); doc.setField("id", "5"); diff --git a/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java index b885f28d2eb7..56f6ed06b762 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java @@ -74,8 +74,10 @@ public void doBefore() throws Exception { configureCluster(1).configure(); solrClient = getCloudSolrClient(cluster); //log this to help debug potential causes of problems - log.info("SolrClient: {}", solrClient); - log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); + if (log.isInfoEnabled()) { + log.info("SolrClient: {}", solrClient); + log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // logOk + } } @After diff --git a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java index ad39b6ff71be..8a05ca58e130 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java @@ -32,9 +32,11 @@ */ public class ClassificationUpdateProcessorFactoryTest extends SolrTestCaseJ4 { private ClassificationUpdateProcessorFactory cFactoryToTest = new ClassificationUpdateProcessorFactory(); + @SuppressWarnings({"rawtypes"}) private NamedList args = new NamedList(); @Before + @SuppressWarnings({"unchecked"}) public void initArgs() { args.add("inputFields", "inputField1,inputField2"); args.add("classField", "classField1"); @@ -92,6 +94,7 @@ public void init_emptyPredictedClassField_shouldDefaultToTrainingClassField() { } @Test + @SuppressWarnings({"unchecked"}) public void init_unsupportedAlgorithm_shouldThrowExceptionWithDetailedMessage() { args.removeAll("algorithm"); args.add("algorithm", "unsupported"); @@ -103,6 +106,7 @@ public void init_unsupportedAlgorithm_shouldThrowExceptionWithDetailedMessage() } @Test + @SuppressWarnings({"unchecked"}) public void init_unsupportedFilterQuery_shouldThrowExceptionWithDetailedMessage() { assumeWorkingMockito(); diff --git a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorIntegrationTest.java b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorIntegrationTest.java index 3aee1be4d462..3a80f8f50560 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorIntegrationTest.java @@ -25,7 +25,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; -import org.apache.solr.common.util.NamedList; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.search.SolrIndexSearcher; import org.junit.Before; @@ -49,7 +48,6 @@ public class ClassificationUpdateProcessorIntegrationTest extends SolrTestCaseJ4 private static final String BROKEN_CHAIN_FILTER_QUERY = "classification-unsupported-filterQuery"; private ClassificationUpdateProcessorFactory cFactoryToTest = new ClassificationUpdateProcessorFactory(); - private NamedList args = new NamedList(); @BeforeClass public static void beforeClass() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java index b6d4fc653309..19752bddb1fd 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java @@ -236,6 +236,7 @@ public void knnClassification_maxOutputClassesGreaterThanAvailable_shouldAssignC updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); updateProcessorToTest.processAdd(update); + @SuppressWarnings({"unchecked"}) ArrayList assignedClasses = (ArrayList)unseenDocument1.getFieldValues(TRAINING_CLASS); assertThat(assignedClasses.get(0),is("class2")); assertThat(assignedClasses.get(1),is("class1")); @@ -259,6 +260,7 @@ public void knnMultiClass_maxOutputClasses2_shouldAssignMax2Classes() throws Exc updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); updateProcessorToTest.processAdd(update); + @SuppressWarnings({"unchecked"}) ArrayList assignedClasses = (ArrayList)unseenDocument1.getFieldValues(TRAINING_CLASS); assertThat(assignedClasses.size(),is(2)); assertThat(assignedClasses.get(0),is("class2")); @@ -283,6 +285,7 @@ public void bayesMultiClass_maxOutputClasses2_shouldAssignMax2Classes() throws E updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); updateProcessorToTest.processAdd(update); + @SuppressWarnings({"unchecked"}) ArrayList assignedClasses = (ArrayList)unseenDocument1.getFieldValues(TRAINING_CLASS); assertThat(assignedClasses.size(),is(2)); assertThat(assignedClasses.get(0),is("class2")); @@ -309,6 +312,7 @@ public void knnMultiClass_boostFieldsMaxOutputClasses2_shouldAssignMax2Classes() updateProcessorToTest.processAdd(update); + @SuppressWarnings({"unchecked"}) ArrayList assignedClasses = (ArrayList)unseenDocument1.getFieldValues(TRAINING_CLASS); assertThat(assignedClasses.size(),is(2)); assertThat(assignedClasses.get(0),is("class4")); @@ -335,6 +339,7 @@ public void bayesMultiClass_boostFieldsMaxOutputClasses2_shouldAssignMax2Classes updateProcessorToTest.processAdd(update); + @SuppressWarnings({"unchecked"}) ArrayList assignedClasses = (ArrayList)unseenDocument1.getFieldValues(TRAINING_CLASS); assertThat(assignedClasses.size(),is(2)); assertThat(assignedClasses.get(0),is("class4")); diff --git a/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessorFactory.java b/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessorFactory.java index 8fb0924bd3f5..dc1640cc6d03 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessorFactory.java +++ b/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessorFactory.java @@ -26,10 +26,11 @@ */ public class CustomUpdateRequestProcessorFactory extends UpdateRequestProcessorFactory { + @SuppressWarnings({"rawtypes"}) public NamedList args = null; @Override - public void init( NamedList args ) + public void init( @SuppressWarnings({"rawtypes"})NamedList args ) { this.args = args; } diff --git a/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java index f48f1f5b40f8..f69745da04e0 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java @@ -72,8 +72,10 @@ public void doBefore() throws Exception { configureCluster(4).configure(); solrClient = getCloudSolrClient(cluster); //log this to help debug potential causes of problems - log.info("SolrClient: {}", solrClient); - log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); + if (log.isInfoEnabled()) { + log.info("SolrClient: {}", solrClient); + log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // logOk + } } @After diff --git a/solr/core/src/test/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactoryTest.java index 89d3314b5e82..98b1f436e8a8 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactoryTest.java @@ -37,6 +37,7 @@ public class IgnoreLargeDocumentProcessorFactoryTest extends SolrTestCase { @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testProcessor() throws IOException { NamedList args = new NamedList(); args.add(IgnoreLargeDocumentProcessorFactory.LIMIT_SIZE_PARAM, 1); diff --git a/solr/core/src/test/org/apache/solr/update/processor/NestedAtomicUpdateTest.java b/solr/core/src/test/org/apache/solr/update/processor/NestedAtomicUpdateTest.java index 155f0c791af5..d01ce72bf97a 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/NestedAtomicUpdateTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/NestedAtomicUpdateTest.java @@ -714,6 +714,7 @@ private void testBlockAtomicSetToNullOrEmpty(boolean empty) throws Exception { "/response/docs/[0]/cat_ss/[1]==\"ccc\""); } + @SuppressWarnings({"unchecked"}) private static void assertDocContainsSubset(SolrInputDocument subsetDoc, SolrInputDocument fullDoc) { for(SolrInputField field: subsetDoc) { String fieldName = field.getName(); diff --git a/solr/core/src/test/org/apache/solr/update/processor/RoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/RoutedAliasUpdateProcessorTest.java index 69eeadb4cc4a..906e393c66cd 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/RoutedAliasUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/RoutedAliasUpdateProcessorTest.java @@ -326,6 +326,7 @@ void addDocsAndCommit(boolean aliasOnly, SolrInputDocument... solrInputDocuments void assertUpdateResponse(UpdateResponse rsp) { // use of TolerantUpdateProcessor can cause non-thrown "errors" that we need to check for + @SuppressWarnings({"rawtypes"}) List errors = (List) rsp.getResponseHeader().get("errors"); assertTrue("Expected no errors: " + errors,errors == null || errors.isEmpty()); } diff --git a/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java index 06af04699b18..99951ccfe0f5 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java @@ -260,6 +260,7 @@ public void testFailNonIndexedSigWithOverwriteDupes() throws Exception { } @Test + @SuppressWarnings({"rawtypes"}) public void testNonStringFieldsValues() throws Exception { this.chain = "dedupe-allfields"; diff --git a/solr/core/src/test/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactoryTest.java index 63069df21396..a7c2477b6004 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactoryTest.java @@ -42,6 +42,7 @@ public class SkipExistingDocumentsProcessorFactoryTest { private BytesRef docId = new BytesRef(); + @SuppressWarnings({"rawtypes"}) private SolrQueryRequest defaultRequest = new LocalSolrQueryRequest(null, new NamedList()); @BeforeClass diff --git a/solr/core/src/test/org/apache/solr/update/processor/TestNamedUpdateProcessors.java b/solr/core/src/test/org/apache/solr/update/processor/TestNamedUpdateProcessors.java index 530ea3ceab4d..45bb41c07f54 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TestNamedUpdateProcessors.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TestNamedUpdateProcessors.java @@ -133,7 +133,8 @@ public static ByteBuffer getFileContent(String f) throws IOException { return jar; } - public static ByteBuffer persistZip(String loc, Class... classes) throws IOException { + public static ByteBuffer persistZip(String loc, + @SuppressWarnings({"rawtypes"})Class... classes) throws IOException { ByteBuffer jar = generateZip(classes); try (FileOutputStream fos = new FileOutputStream(loc)) { fos.write(jar.array(), 0, jar.limit()); @@ -143,11 +144,11 @@ public static ByteBuffer persistZip(String loc, Class... classes) throws IOExcep } - public static ByteBuffer generateZip(Class... classes) throws IOException { + public static ByteBuffer generateZip(@SuppressWarnings({"rawtypes"})Class... classes) throws IOException { SimplePostTool.BAOS bos = new SimplePostTool.BAOS(); try (ZipOutputStream zipOut = new ZipOutputStream(bos)) { zipOut.setLevel(ZipOutputStream.DEFLATED); - for (Class c : classes) { + for (@SuppressWarnings({"rawtypes"})Class c : classes) { String path = c.getName().replace('.', '/').concat(".class"); ZipEntry entry = new ZipEntry(path); ByteBuffer b = SimplePostTool.inputStreamToByteArray(c.getClassLoader().getResourceAsStream(path)); diff --git a/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java index 835aced82d2f..cba15f887652 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java @@ -89,8 +89,10 @@ public void doBefore() throws Exception { configureCluster(4).configure(); solrClient = getCloudSolrClient(cluster); //log this to help debug potential causes of problems - log.info("SolrClient: {}", solrClient); - log.info("ClusterStateProvider {}",solrClient.getClusterStateProvider()); + if (log.isInfoEnabled()) { + log.info("SolrClient: {}", solrClient); + log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // logOk + } } @After @@ -916,6 +918,7 @@ private List checkCollectionCountIs(int num) { // here we do things not to be emulated elsewhere to create a legacy condition and ensure that we can // work with both old and new formats. + @SuppressWarnings({"unchecked", "rawtypes"}) private void manuallyConstructLegacyTRA() throws Exception { // first create a "modern" alias String configName = getSaferTestName(); @@ -973,7 +976,6 @@ private void manuallyConstructLegacyTRA() throws Exception { if (data == null || data.length == 0) { aliasMap = Collections.emptyMap(); } else { - //noinspection unchecked aliasMap = (Map) Utils.fromJSON(data); } assertNotEquals(0, aliasMap.size()); diff --git a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java index 07b3a8855f12..f51b94a9e980 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java @@ -343,7 +343,7 @@ public String update(String chain, String xml) { try { return connection.request(handler, params, xml); } catch (SolrException e) { - throw (SolrException)e; + throw e; } catch (Exception e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } diff --git a/solr/core/src/test/org/apache/solr/update/processor/TrackingUpdateProcessorFactory.java b/solr/core/src/test/org/apache/solr/update/processor/TrackingUpdateProcessorFactory.java index 06a72ea19348..00996522f86f 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TrackingUpdateProcessorFactory.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TrackingUpdateProcessorFactory.java @@ -87,7 +87,7 @@ public static List stopRecording(String group) { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { if (args != null && args.indexOf("group",0) >= 0) { group = (String) args.get("group"); log.debug("Init URP, group '{}'", group); diff --git a/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java index 66d612fa697c..cbd69203b30c 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java @@ -162,7 +162,7 @@ public void testUpdateDistribChainSkipping() throws Exception { // for these 3 (distrib) chains, the last proc should always be RunUpdateProcessor assertTrue(name + " (distrib) last processor isn't a RunUpdateProcessor: " + procs.toString(), - procs.get(procs.size()-1) instanceof RunUpdateProcessor ); + procs.get(procs.size()-1) instanceof RunUpdateProcessorFactory.RunUpdateProcessor ); // either 1 proc was droped in distrib mode, or 1 for the "implicit" chain diff --git a/solr/core/src/test/org/apache/solr/util/OrderedExecutorTest.java b/solr/core/src/test/org/apache/solr/util/OrderedExecutorTest.java index 825bf1b5f551..f62301cdd2d2 100644 --- a/solr/core/src/test/org/apache/solr/util/OrderedExecutorTest.java +++ b/solr/core/src/test/org/apache/solr/util/OrderedExecutorTest.java @@ -127,19 +127,21 @@ public void testRunInParallel() { barrier.await(120, TimeUnit.SECONDS); postBarrierLatch.countDown(); } catch (TimeoutException t) { - log.error("Timeout in worker#" + lockId + "awaiting barrier", t); + log.error("Timeout in worker# {} awaiting barrier", lockId, t); } catch (BrokenBarrierException b) { - log.error("Broken Barrier in worker#" + lockId, b); + log.error("Broken Barrier in worker#{}", lockId, b); } catch (InterruptedException e) { - log.error("Interrupt in worker#" + lockId + "awaiting barrier", e); + log.error("Interrupt in worker#{} awaiting barrier", lockId, e); Thread.currentThread().interrupt(); } }); }); } - log.info("main thread: about to wait on pre-barrier latch, barrier={}, post-barrier latch={}", - barrier.getNumberWaiting(), postBarrierLatch.getCount()); + if (log.isInfoEnabled()) { + log.info("main thread: about to wait on pre-barrier latch, barrier={}, post-barrier latch={}", + barrier.getNumberWaiting(), postBarrierLatch.getCount()); + } try { // this latch should have fully counted down by now @@ -151,9 +153,11 @@ public void testRunInParallel() { Thread.currentThread().interrupt(); fail("interupt while trying to await the preBarrierLatch"); } - - log.info("main thread: pre-barrier latch done, barrier={}, post-barrier latch={}", - barrier.getNumberWaiting(), postBarrierLatch.getCount()); + + if (log.isInfoEnabled()) { + log.info("main thread: pre-barrier latch done, barrier={}, post-barrier latch={}", + barrier.getNumberWaiting(), postBarrierLatch.getCount()); + } // nothing should have counted down yet on the postBarrierLatch assertEquals(parallelism, postBarrierLatch.getCount()); @@ -162,9 +166,11 @@ public void testRunInParallel() { // if we now await on the the barrier, it should release // (once all other threads get to the barrier as well, but no external action needed) barrier.await(120, TimeUnit.SECONDS); - - log.info("main thread: barrier has released, post-barrier latch={}", - postBarrierLatch.getCount()); + + if (log.isInfoEnabled()) { + log.info("main thread: barrier has released, post-barrier latch={}", + postBarrierLatch.getCount()); + } // and now the post-barrier latch should release immediately // (or with a small await for thread scheduling but no other external action) diff --git a/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java b/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java index 9f9322a974d5..aa39e4dd890a 100644 --- a/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java +++ b/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java @@ -18,6 +18,7 @@ import java.io.BufferedReader; import java.io.StringReader; +import java.util.Collection; import java.util.List; import java.util.ArrayList; @@ -33,7 +34,7 @@ public class SolrLogPostToolTest extends SolrTestCaseJ4 { @Test public void testQueryRecord() throws Exception{ - String record = "2019-12-09 15:05:01.931 INFO (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1] webapp=/solr path=/select params={q=*:*&_=1575835181759&isShard=true&wt=javabin&distrib=false} hits=234868 status=0 QTime=8\n"; + String record = "2019-12-09 15:05:01.931 INFO (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1] webapp=/solr path=/select params={q=*:*&_=1575835181759&shards.purpose=36&isShard=true&wt=javabin&distrib=false} hits=234868 status=0 QTime=8\n"; List docs = readDocs(record); assertEquals(docs.size(), 1); SolrInputDocument doc = docs.get(0); @@ -53,6 +54,8 @@ public void testQueryRecord() throws Exception{ SolrInputField isShard = doc.getField("isShard_s"); SolrInputField ids = doc.getField("ids_s"); SolrInputField shards = doc.getField("shards_s"); + SolrInputField purpose = doc.getField("purpose_ss"); + Object[] purposes = purpose.getValues().toArray(); assertEquals(query.getValue(), "*:*"); assertEquals(date.getValue(), "2019-12-09T15:05:01.931"); @@ -69,6 +72,26 @@ public void testQueryRecord() throws Exception{ assertEquals(isShard.getValue(), "true"); assertEquals(ids.getValue(), "false"); assertEquals(shards.getValue(), "false"); + assertEquals("GET_TOP_IDS", purposes[0].toString()); + assertEquals("REFINE_FACETS", purposes[1].toString()); + } + + // Requests which have multiple copies of the same param should be parsed so that the first param value only is + // indexed, since the log schema expects many of these to be single-valued fields and will throw errors if multiple + // values are received. + @Test + public void testRecordsFirstInstanceOfSingleValuedParams() throws Exception { + final String record = "2019-12-09 15:05:01.931 INFO (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1] webapp=/solr path=/select params={q=*:*&q=inStock:true&_=1575835181759&shards.purpose=36&isShard=true&wt=javabin&wt=xml&distrib=false} hits=234868 status=0 QTime=8\n"; + + List docs = readDocs(record); + assertEquals(docs.size(), 1); + SolrInputDocument doc = docs.get(0); + + assertEquals(doc.getFieldValues("q_s").size(), 1); + assertEquals(doc.getFieldValue("q_s"), "*:*"); + + assertEquals(doc.getFieldValues("wt_s").size(), 1); + assertEquals(doc.getFieldValue("wt_s"), "javabin"); } @Test @@ -264,9 +287,24 @@ public void testNewSearcher() throws Exception{ assertEquals(core.getValue(), "production_cv_month_201912_shard35_replica_n1"); } + // Ensure SolrLogPostTool parses _all_ log lines into searchable records + @Test + public void testOtherRecord() throws Exception { + final String record = "2020-06-11 11:59:08.386 INFO (main) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)"; + final List docs = readDocs(record); + assertEquals(docs.size(), 1); + + SolrInputDocument doc = docs.get(0); + final Collection fields = doc.getFieldNames(); + assertEquals(3, fields.size()); + assertEquals("2020-06-11T11:59:08.386", doc.getField("date_dt").getValue()); + assertEquals("other", doc.getField("type_s").getValue()); + assertEquals(record, doc.getField("line_t").getValue()); + } + private List readDocs(String records) throws Exception { BufferedReader bufferedReader = new BufferedReader(new StringReader(records)); - ArrayList list = new ArrayList(); + ArrayList list = new ArrayList<>(); try { LogRecordReader logRecordReader = new SolrLogPostTool.LogRecordReader(bufferedReader); diff --git a/solr/core/src/test/org/apache/solr/util/SolrPluginUtilsTest.java b/solr/core/src/test/org/apache/solr/util/SolrPluginUtilsTest.java index f5034c4e6b60..d18b575e28e2 100644 --- a/solr/core/src/test/org/apache/solr/util/SolrPluginUtilsTest.java +++ b/solr/core/src/test/org/apache/solr/util/SolrPluginUtilsTest.java @@ -248,7 +248,7 @@ public void testDisjunctionMaxQueryParser() throws Exception { } - private static int countItems(Iterator i) { + private static int countItems(@SuppressWarnings({"rawtypes"})Iterator i) { int count = 0; while (i.hasNext()) { count++; diff --git a/solr/core/src/test/org/apache/solr/util/TestRTimerTree.java b/solr/core/src/test/org/apache/solr/util/TestRTimerTree.java index f668aa14a73b..443e55d76817 100644 --- a/solr/core/src/test/org/apache/solr/util/TestRTimerTree.java +++ b/solr/core/src/test/org/apache/solr/util/TestRTimerTree.java @@ -75,11 +75,14 @@ public void test() { assertEquals(120, (int) subt.getTime()); assertEquals(220, (int) rt.getTime()); + @SuppressWarnings({"rawtypes"}) NamedList nl = rt.asNamedList(); assertEquals(220, ((Double) nl.get("time")).intValue()); + @SuppressWarnings({"rawtypes"}) NamedList sub1nl = (NamedList) nl.get("sub1"); assertNotNull(sub1nl); assertEquals(120, ((Double) sub1nl.get("time")).intValue()); + @SuppressWarnings({"rawtypes"}) NamedList sub11nl = (NamedList) sub1nl.get("sub1.1"); assertNotNull(sub11nl); assertEquals(20, ((Double) sub11nl.get("time")).intValue()); diff --git a/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java b/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java index 49b5dd32bb70..0ef58512382c 100644 --- a/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java +++ b/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java @@ -152,7 +152,7 @@ public int execute(org.apache.commons.exec.CommandLine cmd) throws IOException { if (port == localPort) { try { standaloneSolr.stop(); - log.info("Stopped standalone Solr instance running on port "+port); + log.info("Stopped standalone Solr instance running on port {}", port); } catch (Exception e) { if (e instanceof RuntimeException) { throw (RuntimeException)e; @@ -182,11 +182,11 @@ public int execute(org.apache.commons.exec.CommandLine cmd) throws IOException { } } else { String cmdLine = joinArgs(cmd.getArguments()); - log.info("Executing command: "+cmdLine); + log.info("Executing command: {}", cmdLine); try { return super.execute(cmd); } catch (Exception exc) { - log.error("Execute command ["+cmdLine+"] failed due to: "+exc, exc); + log.error("Execute command [{}] failed due to: {}", cmdLine, exc, exc); throw exc; } } @@ -272,7 +272,7 @@ public void close() throws IOException { try { solrCloudCluster.shutdown(); } catch (Exception e) { - log.warn("Failed to shutdown MiniSolrCloudCluster due to: " + e); + log.warn("Failed to shutdown MiniSolrCloudCluster due to: ", e); } } @@ -280,7 +280,7 @@ public void close() throws IOException { try { standaloneSolr.stop(); } catch (Exception exc) { - log.warn("Failed to shutdown standalone Solr due to: " + exc); + log.warn("Failed to shutdown standalone Solr due to: ", exc); } standaloneSolr = null; } @@ -330,7 +330,7 @@ protected void testExample(String exampleName) throws Exception { bindPort = socket.getLocalPort(); } - log.info("Selected port "+bindPort+" to start "+exampleName+" example Solr instance on ..."); + log.info("Selected port {} to start {} example Solr instance on ...", bindPort, exampleName); String[] toolArgs = new String[] { "-e", exampleName, @@ -361,8 +361,8 @@ protected void testExample(String exampleName) throws Exception { assertEquals("it should be ok "+tool+" "+Arrays.toString(toolArgs),0, status); } catch (Exception e) { - log.error("RunExampleTool failed due to: " + e + - "; stdout from tool prior to failure: " + baos.toString(StandardCharsets.UTF_8.name())); + log.error("RunExampleTool failed due to: {}; stdout from tool prior to failure: {}" + , e , baos.toString(StandardCharsets.UTF_8.name())); // logOk throw e; } @@ -583,6 +583,7 @@ public void testInteractiveSolrCloudExampleWithAutoScalingPolicy() throws Except " {'nodeRole':'overseer', 'replica':0}" + " ]" + "}"; + @SuppressWarnings({"rawtypes"}) SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); NamedList response = cloudClient.request(req); assertEquals(response.get("result").toString(), "success"); diff --git a/solr/core/src/test/org/apache/solr/util/TestSolrJacksonAnnotation.java b/solr/core/src/test/org/apache/solr/util/TestSolrJacksonAnnotation.java index f0b9036576b8..f8a7aa413731 100644 --- a/solr/core/src/test/org/apache/solr/util/TestSolrJacksonAnnotation.java +++ b/solr/core/src/test/org/apache/solr/util/TestSolrJacksonAnnotation.java @@ -29,6 +29,7 @@ public class TestSolrJacksonAnnotation extends SolrTestCase { + @SuppressWarnings({"unchecked"}) public void testSerDe() throws Exception { ObjectMapper mapper = new ObjectMapper(); mapper.setAnnotationIntrospector(new SolrJacksonAnnotationInspector()); @@ -39,6 +40,7 @@ public void testSerDe() throws Exception { o.ifld = 1234; String json = mapper.writeValueAsString(o); + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSONString(json); assertEquals("v1", m.get("field")); assertEquals("v2", m.get("friendlyName")); diff --git a/solr/core/src/test/org/apache/solr/util/TestUtils.java b/solr/core/src/test/org/apache/solr/util/TestUtils.java index beb0bf6b01e8..65a053641c69 100644 --- a/solr/core/src/test/org/apache/solr/util/TestUtils.java +++ b/solr/core/src/test/org/apache/solr/util/TestUtils.java @@ -197,12 +197,13 @@ public void testBinaryCommands() throws IOException { } ContentStream stream = new ContentStreamBase.ByteArrayStream(baos.toByteArray(),null, "application/javabin"); + @SuppressWarnings({"rawtypes"}) List commands = CommandOperation.readCommands(Collections.singletonList(stream), new NamedList(), Collections.singleton("single")); assertEquals(5, commands.size()); } - private void assertNoggitJsonValues(Map m) { + private void assertNoggitJsonValues(@SuppressWarnings({"rawtypes"})Map m) { assertEquals( "c" ,Utils.getObjectByPath(m, true, "/a/b")); assertEquals( "v1" ,Utils.getObjectByPath(m, true, "/a/d[0]/k1")); assertEquals( "v2" ,Utils.getObjectByPath(m, true, "/a/d[1]/k2")); @@ -223,6 +224,7 @@ public void testSetObjectByPath(){ " 'path':'/update/*',\n" + " 'role':'dev'}],\n" + " '':{'v':4}}}"; + @SuppressWarnings({"rawtypes"}) Map m = (Map) fromJSONString(json); Utils.setObjectByPath(m,"authorization/permissions[1]/role","guest"); Utils.setObjectByPath(m,"authorization/permissions[0]/role[-1]","dev"); @@ -248,11 +250,13 @@ public void testUtilsJSPath(){ " 'path':'/update/*',\n" + " 'role':'dev'}],\n" + " '':{'v':4}}}"; + @SuppressWarnings({"rawtypes"}) Map m = (Map) fromJSONString(json); assertEquals("x-update", Utils.getObjectByPath(m,false, "authorization/permissions[1]/name")); } + @SuppressWarnings({"unchecked"}) public void testMapWriterIdx(){ String json = "{" + " 'responseHeader':{" + @@ -279,6 +283,7 @@ public void testMapWriterIdx(){ " 'status':0," + " 'QTime':5033}," + " 'core':'corestatus_test_shard2_replica_n3'}}}"; + @SuppressWarnings({"rawtypes"}) Map m = (Map) fromJSONString(json); assertEquals("127.0.0.1:56443_solr", Utils.getObjectByPath(m,false, "success[0]/key")); @@ -289,6 +294,7 @@ public void testMapWriterIdx(){ assertEquals("corestatus_test_shard2_replica_n3", Utils.getObjectByPath(m, false,asList("success[3]", "value", "core") )); assertEquals(5033L, Utils.getObjectByPath(m, false,asList("success[3]", "value", "responseHeader", "QTime") )); + @SuppressWarnings({"rawtypes"}) Map nodes = (Map) m.get("success"); m.put("success", (MapWriter) ew -> nodes.forEach((o, o2) -> ew.putNoEx((String) o,o2))); assertEquals("127.0.0.1:56443_solr", Utils.getObjectByPath(m,false, "success[0]/key")); @@ -299,11 +305,13 @@ public void testMapWriterIdx(){ assertEquals("corestatus_test_shard2_replica_n3", Utils.getObjectByPath(m, false,asList("success[3]", "value", "core") )); assertEquals(5033L, Utils.getObjectByPath(m, false,asList("success[3]", "value", "responseHeader", "QTime") )); final int[] count = {0}; + @SuppressWarnings({"unchecked", "rawtypes"}) NamedList nl = new NamedList(m); nl._forEachEntry("success", (o, o2) -> count[0]++); assertEquals(count[0], 4); } + @SuppressWarnings({"unchecked"}) public void testMergeJson() { Map sink = (Map) Utils.fromJSONString("{k2:v2, k1: {a:b, p:r, k21:{xx:yy}}}"); assertTrue(Utils.mergeJson(sink, (Map) Utils.fromJSONString("k1:{a:c, e:f, p :null, k11:{a1:b1}, k21:{pp : qq}}"))); @@ -316,7 +324,7 @@ public void testMergeJson() { assertEquals("b1", Utils.getObjectByPath(sink, true, "k1/k11/a1")); sink = new HashMap<>(); - sink.put("legacyCloud", "false"); + sink.put("autoAddReplicas", "false"); assertTrue(Utils.mergeJson(sink, (Map) Utils.fromJSONString("collectionDefaults:{numShards:3 , nrtReplicas:2}"))); assertEquals(3L, Utils.getObjectByPath(sink, true, ImmutableList.of(COLLECTION_DEF, NUM_SHARDS_PROP))); assertEquals(2L, Utils.getObjectByPath(sink, true, ImmutableList.of(COLLECTION_DEF, NRT_REPLICAS))); diff --git a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java index 4c2f9663e007..f9595e1e576e 100644 --- a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java +++ b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java @@ -38,6 +38,7 @@ public class MetricUtilsTest extends SolrTestCaseJ4 { @Test + @SuppressWarnings({"unchecked"}) public void testSolrTimerGetSnapshot() { // create a timer with up to 100 data points final Timer timer = new Timer(); @@ -50,6 +51,7 @@ public void testSolrTimerGetSnapshot() { MetricUtils.convertTimer("", timer, MetricUtils.PropertyFilter.ALL, false, false, ".", (k, v) -> { map.putAll((Map)v); }); + @SuppressWarnings({"rawtypes"}) NamedList lst = new NamedList(map); // check that expected metrics were obtained assertEquals(14, lst.size()); @@ -67,6 +69,7 @@ public void testSolrTimerGetSnapshot() { } @Test + @SuppressWarnings({"unchecked"}) public void testMetrics() throws Exception { MetricRegistry registry = new MetricRegistry(); Counter counter = registry.counter("counter"); @@ -102,6 +105,7 @@ public void testMetrics() throws Exception { registry.register("memory.expected.error", error); MetricUtils.toMaps(registry, Collections.singletonList(MetricFilter.ALL), MetricFilter.ALL, MetricUtils.PropertyFilter.ALL, false, false, false, false, (k, o) -> { + @SuppressWarnings({"rawtypes"}) Map v = (Map)o; if (k.startsWith("counter")) { assertEquals(1L, v.get("count")); @@ -149,19 +153,23 @@ public void testMetrics() throws Exception { assertEquals("foobar", o); } else if (k.startsWith("timer")) { assertTrue(o instanceof Map); + @SuppressWarnings({"rawtypes"}) Map v = (Map)o; assertEquals(1L, v.get("count")); assertTrue(((Number)v.get("min_ms")).intValue() > 100); } else if (k.startsWith("meter")) { assertTrue(o instanceof Map); + @SuppressWarnings({"rawtypes"}) Map v = (Map)o; assertEquals(1L, v.get("count")); } else if (k.startsWith("histogram")) { assertTrue(o instanceof Map); + @SuppressWarnings({"rawtypes"}) Map v = (Map)o; assertEquals(1L, v.get("count")); } else if (k.startsWith("aggregate1")) { assertTrue(o instanceof Map); + @SuppressWarnings({"rawtypes"}) Map v = (Map)o; assertEquals(4, v.get("count")); Map values = (Map)v.get("values"); @@ -175,6 +183,7 @@ public void testMetrics() throws Exception { assertEquals(2, update.get("updateCount")); } else if (k.startsWith("aggregate2")) { assertTrue(o instanceof Map); + @SuppressWarnings({"rawtypes"}) Map v = (Map)o; assertEquals(2, v.get("count")); Map values = (Map)v.get("values"); @@ -189,6 +198,7 @@ public void testMetrics() throws Exception { } else if (k.startsWith("memory.expected.error")) { assertNull(o); } else { + @SuppressWarnings({"rawtypes"}) Map v = (Map)o; assertEquals(1L, v.get("count")); } diff --git a/solr/core/src/test/org/apache/solr/util/tracing/TestHttpServletCarrier.java b/solr/core/src/test/org/apache/solr/util/tracing/TestHttpServletCarrier.java index 790c5bd5da43..a3cb02511a8e 100644 --- a/solr/core/src/test/org/apache/solr/util/tracing/TestHttpServletCarrier.java +++ b/solr/core/src/test/org/apache/solr/util/tracing/TestHttpServletCarrier.java @@ -37,6 +37,7 @@ public class TestHttpServletCarrier extends SolrTestCaseJ4 { @Test + @SuppressWarnings({"unchecked"}) public void test() { SolrTestCaseJ4.assumeWorkingMockito(); HttpServletRequest req = mock(HttpServletRequest.class); diff --git a/solr/example/build.gradle b/solr/example/build.gradle index c948ee359479..3b6b3d16eedc 100644 --- a/solr/example/build.gradle +++ b/solr/example/build.gradle @@ -19,6 +19,8 @@ // I am not convinced packaging of examples should be a separate project... Seems more logical to // move it to just the packaging project (?). Let's leave it for now though. +description = 'Solr examples' + configurations { packaging postJar diff --git a/solr/licenses/caffeine-2.8.0.jar.sha1 b/solr/licenses/caffeine-2.8.0.jar.sha1 deleted file mode 100644 index ce291c474255..000000000000 --- a/solr/licenses/caffeine-2.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6000774d7f8412ced005a704188ced78beeed2bb diff --git a/solr/licenses/caffeine-2.8.4.jar.sha1 b/solr/licenses/caffeine-2.8.4.jar.sha1 new file mode 100644 index 000000000000..813e00d57d03 --- /dev/null +++ b/solr/licenses/caffeine-2.8.4.jar.sha1 @@ -0,0 +1 @@ +e5730b11981406faa28e0912405a0ce7c2d0f377 diff --git a/solr/licenses/commons-cli-1.2.jar.sha1 b/solr/licenses/commons-cli-1.2.jar.sha1 deleted file mode 100644 index 6dacb321cdf7..000000000000 --- a/solr/licenses/commons-cli-1.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2bf96b7aa8b611c177d329452af1dc933e14501c diff --git a/solr/licenses/commons-cli-1.4.jar.sha1 b/solr/licenses/commons-cli-1.4.jar.sha1 new file mode 100644 index 000000000000..536f272467a1 --- /dev/null +++ b/solr/licenses/commons-cli-1.4.jar.sha1 @@ -0,0 +1 @@ +c51c00206bb913cd8612b24abd9fa98ae89719b1 diff --git a/solr/licenses/commons-fileupload-1.3.3.jar.sha1 b/solr/licenses/commons-fileupload-1.3.3.jar.sha1 deleted file mode 100644 index d27deb410a10..000000000000 --- a/solr/licenses/commons-fileupload-1.3.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -04ff14d809195b711fd6bcc87e6777f886730ca1 diff --git a/solr/licenses/commons-fileupload-LICENSE-ASL.txt b/solr/licenses/commons-fileupload-LICENSE-ASL.txt deleted file mode 100644 index d64569567334..000000000000 --- a/solr/licenses/commons-fileupload-LICENSE-ASL.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/solr/licenses/commons-fileupload-NOTICE.txt b/solr/licenses/commons-fileupload-NOTICE.txt deleted file mode 100644 index bec42c04a57d..000000000000 --- a/solr/licenses/commons-fileupload-NOTICE.txt +++ /dev/null @@ -1,5 +0,0 @@ -Apache Commons FileUpload -Copyright 2002-2008 The Apache Software Foundation - -This product includes software developed by -The Apache Software Foundation (http://www.apache.org/). diff --git a/solr/licenses/hppc-0.8.1.jar.sha1 b/solr/licenses/hppc-0.8.1.jar.sha1 deleted file mode 100644 index 7006e68f4482..000000000000 --- a/solr/licenses/hppc-0.8.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ffc7ba8f289428b9508ab484b8001dea944ae603 diff --git a/solr/licenses/hppc-0.8.2.jar.sha1 b/solr/licenses/hppc-0.8.2.jar.sha1 new file mode 100644 index 000000000000..a73358b9c66f --- /dev/null +++ b/solr/licenses/hppc-0.8.2.jar.sha1 @@ -0,0 +1 @@ +ccb3ef933ead6b5d766fa571582ddb9b447e48c4 diff --git a/solr/licenses/log4j-1.2-api-2.11.2.jar.sha1 b/solr/licenses/log4j-1.2-api-2.11.2.jar.sha1 deleted file mode 100644 index 4ad6f9676644..000000000000 --- a/solr/licenses/log4j-1.2-api-2.11.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -afb9ef0baba766725c3733e6a2626877dba72715 diff --git a/solr/licenses/log4j-1.2-api-2.13.2.jar.sha1 b/solr/licenses/log4j-1.2-api-2.13.2.jar.sha1 new file mode 100644 index 000000000000..78679ce88d67 --- /dev/null +++ b/solr/licenses/log4j-1.2-api-2.13.2.jar.sha1 @@ -0,0 +1 @@ +d05a7928403d9a8b37d66dc85319c33e5dba17dd diff --git a/solr/licenses/log4j-api-2.11.2.jar.sha1 b/solr/licenses/log4j-api-2.11.2.jar.sha1 deleted file mode 100644 index 0cdea100b72b..000000000000 --- a/solr/licenses/log4j-api-2.11.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f5e9a2ffca496057d6891a3de65128efc636e26e diff --git a/solr/licenses/log4j-api-2.13.2.jar.sha1 b/solr/licenses/log4j-api-2.13.2.jar.sha1 new file mode 100644 index 000000000000..a98264912453 --- /dev/null +++ b/solr/licenses/log4j-api-2.13.2.jar.sha1 @@ -0,0 +1 @@ +567ea514dedd8679c429c5b5b39b0d67b6464c3c diff --git a/solr/licenses/log4j-core-2.11.2.jar.sha1 b/solr/licenses/log4j-core-2.11.2.jar.sha1 deleted file mode 100644 index ec2acae4df7f..000000000000 --- a/solr/licenses/log4j-core-2.11.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6c2fb3f5b7cd27504726aef1b674b542a0c9cf53 diff --git a/solr/licenses/log4j-core-2.13.2.jar.sha1 b/solr/licenses/log4j-core-2.13.2.jar.sha1 new file mode 100644 index 000000000000..ce26d9c45a85 --- /dev/null +++ b/solr/licenses/log4j-core-2.13.2.jar.sha1 @@ -0,0 +1 @@ +8eb1fc1914eb2550bf3ddea26917c9a7cbb00593 diff --git a/solr/licenses/log4j-slf4j-impl-2.11.2.jar.sha1 b/solr/licenses/log4j-slf4j-impl-2.11.2.jar.sha1 deleted file mode 100644 index 69bca4b80c07..000000000000 --- a/solr/licenses/log4j-slf4j-impl-2.11.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4d44e4edc4a7fb39f09b95b09f560a15976fa1ba diff --git a/solr/licenses/log4j-slf4j-impl-2.13.2.jar.sha1 b/solr/licenses/log4j-slf4j-impl-2.13.2.jar.sha1 new file mode 100644 index 000000000000..58fa437840ba --- /dev/null +++ b/solr/licenses/log4j-slf4j-impl-2.13.2.jar.sha1 @@ -0,0 +1 @@ +49df25f7a35dd7fbd8131fc5ab09665d18e3d4fe diff --git a/solr/licenses/log4j-web-2.11.2.jar.sha1 b/solr/licenses/log4j-web-2.11.2.jar.sha1 deleted file mode 100644 index cc4476efe49b..000000000000 --- a/solr/licenses/log4j-web-2.11.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d11ebc03fdf773d32143e0f7ea0fc131c21311e7 diff --git a/solr/licenses/log4j-web-2.13.2.jar.sha1 b/solr/licenses/log4j-web-2.13.2.jar.sha1 new file mode 100644 index 000000000000..5dad454a820a --- /dev/null +++ b/solr/licenses/log4j-web-2.13.2.jar.sha1 @@ -0,0 +1 @@ +fb0fd4f049e77ced8db9b7706d33681c73ecb7e4 diff --git a/solr/packaging/build.gradle b/solr/packaging/build.gradle index e45c3081fb04..55b78ca0182c 100644 --- a/solr/packaging/build.gradle +++ b/solr/packaging/build.gradle @@ -15,7 +15,6 @@ * limitations under the License. */ - // This project puts together a "distribution", assembling dependencies from // various other projects. @@ -23,6 +22,8 @@ plugins { id 'base' } +description = 'Solr packaging' + ext { distDir = file("$buildDir/solr-${version}") devDir = file("$buildDir/dev") diff --git a/solr/server/build.gradle b/solr/server/build.gradle index 5a74f3d10efd..371c5ce456f3 100644 --- a/solr/server/build.gradle +++ b/solr/server/build.gradle @@ -17,6 +17,8 @@ apply plugin: 'java-library' +description = 'Solr Server' + configurations { api { exclude group: "org.slf4j" diff --git a/solr/server/solr/solr.xml b/solr/server/solr/solr.xml index 7fce0e8b12a2..1bb9b28eed1b 100644 --- a/solr/server/solr/solr.xml +++ b/solr/server/solr/solr.xml @@ -30,6 +30,7 @@ ${solr.max.booleanClauses:1024} ${solr.sharedLib:} + ${solr.allowPaths:} diff --git a/solr/site/index.template.md b/solr/site/index.template.md new file mode 100644 index 000000000000..d961c23f200c --- /dev/null +++ b/solr/site/index.template.md @@ -0,0 +1,37 @@ +
    + + Solr + +
    + TM +
    +
    + +# Apache Solr™ ${project.version} Documentation + +Solr is the popular, blazing fast, open source NoSQL search platform from the Apache Lucene project. Its major +features include powerful full-text search, hit highlighting, faceted search and analytics, rich document +parsing, geospatial search, extensive REST APIs as well as parallel SQL. Solr is enterprise grade, secure and +highly scalable, providing fault tolerant distributed search and indexing, and powers the search and navigation +features of many of the world's largest internet sites. + +Solr is written in Java and runs as a standalone full-text search server. Solr uses the Lucene Java search +library at its core for full-text indexing and search, and has REST-like JSON APIs that make it easy to use +from virtually any programming language. Solr's powerful configuration APIs and files allows it to be tailored +to almost any type of application without Java coding, and it has an extensive plugin architecture when more +advanced customization is required. + +This is the official documentation for **Apache Solr ${project.version}**. + +## Reference Documents + +* [Reference Guide](${project.solrRefguideUrl}/): The main documentation for Solr +* [Changes](changes/Changes.html): List of changes in this release +* [System Requirements](SYSTEM_REQUIREMENTS.html): Minimum and supported Java versions +* [Solr Tutorial](${project.solrRefguideUrl}/solr-tutorial.html): + This document covers the basics of running Solr using an example schema, and some sample data +* [Lucene Documentation](${project.luceneDocUrl}/index.html) + +## API Javadocs + +${projectList} diff --git a/solr/site/online-link.template.md b/solr/site/online-link.template.md new file mode 100644 index 000000000000..1ea7142d0524 --- /dev/null +++ b/solr/site/online-link.template.md @@ -0,0 +1,19 @@ +
    + + Solr + +
    + TM +
    +
    + +# Apache Solr™ ${project.version} Documentation + +<% +if ("${project.version}" == "${project.baseVersion}") { // compare as strings because of lazy evaluation + println "Follow [this link to view online documentation](${project.solrDocUrl}) for Apache Solr ${project.version}." +} else { + println "No online documentation available for custom builds or `SNAPSHOT` versions." + println "Run `gradlew documentation` from `src.tgz` package to build docs locally." +} +%> diff --git a/solr/solr-ref-guide/build.gradle b/solr/solr-ref-guide/build.gradle index 50f056ad2d69..f61a9e0d1348 100644 --- a/solr/solr-ref-guide/build.gradle +++ b/solr/solr-ref-guide/build.gradle @@ -52,6 +52,8 @@ plugins { id 'com.github.jruby-gradle.base' version '2.0.0-alpha.7' } +description = 'Solr reference guide' + // Use an internal proxy to ruby gems. repositories { ruby.gems() diff --git a/solr/solr-ref-guide/src/authentication-and-authorization-plugins.adoc b/solr/solr-ref-guide/src/authentication-and-authorization-plugins.adoc index b8f9a3d6196a..4e4a8c3ce7dd 100644 --- a/solr/solr-ref-guide/src/authentication-and-authorization-plugins.adoc +++ b/solr/solr-ref-guide/src/authentication-and-authorization-plugins.adoc @@ -1,5 +1,5 @@ = Configuring Authentication, Authorization and Audit Logging -:page-children: basic-authentication-plugin, hadoop-authentication-plugin, kerberos-authentication-plugin, jwt-authentication-plugin, rule-based-authorization-plugin, audit-logging +:page-children: basic-authentication-plugin, hadoop-authentication-plugin, kerberos-authentication-plugin, jwt-authentication-plugin, cert-authentication-plugin, rule-based-authorization-plugin, audit-logging // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information diff --git a/solr/solr-ref-guide/src/cert-authentication-plugin.adoc b/solr/solr-ref-guide/src/cert-authentication-plugin.adoc new file mode 100644 index 000000000000..4b23cc986046 --- /dev/null +++ b/solr/solr-ref-guide/src/cert-authentication-plugin.adoc @@ -0,0 +1,61 @@ += Certificate Authentication Plugin +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +Solr can support extracting the user principal out of the client's certificate with the use of the CertAuthPlugin. + +== Enable Certificate Authentication + +For Certificate authentication, the `security.json` file must have an `authentication` part which defines the class being used for authentication. + +An example `security.json` is shown below: + +[source,json] +---- +{ + "authentication": { + "class":"solr.CertAuthPlugin" + } +} +---- + +=== Certificate Validation + +Parts of certificate validation, including verifying the trust chain and peer hostname/ip address will be done by the web servlet container before the request ever reaches the authentication plugin. +These checks are described in the <> section. + +This plugin provides no additional checking beyond what has been configured via SSL properties. + +=== User Principal Extraction + +This plugin will configure the user principal for the request based on the X500 subject present in the client certificate. +Authorization plugins will need to accept and handle the full subject name, for example: + +[source] +---- +CN=Solr User,OU=Engineering,O=Example Inc.,C=US +---- + +A list of possible tags that can be present in the subject name is available in https://tools.ietf.org/html/rfc5280#section-4.1.2.4[RFC-5280, Section 4.1.2.4]. Values may have spaces, punctuation, and other characters. + +It is best practice to verify the actual contents of certificates issued by your trusted certificate authority before configuring authorization based on the contents. + +== Using Certificate Auth with Clients (including SolrJ) + +With certificate authentication enabled, all client requests must include a valid certificate. +This is identical to the <> when using SSL. + diff --git a/solr/solr-ref-guide/src/cluster-node-management.adoc b/solr/solr-ref-guide/src/cluster-node-management.adoc index 571666fb9592..4269ed0107fc 100644 --- a/solr/solr-ref-guide/src/cluster-node-management.adoc +++ b/solr/solr-ref-guide/src/cluster-node-management.adoc @@ -131,7 +131,7 @@ Add, edit or delete a cluster-wide property. === CLUSTERPROP Parameters `name`:: -The name of the property. Supported properties names are `autoAddReplicas`, `legacyCloud`, `location`, `maxCoresPerNode` and `urlScheme`. Other properties can be set +The name of the property. Supported properties names are `autoAddReplicas`, `location`, `maxCoresPerNode`, `urlScheme` and `defaultShardPreferences`. Other properties can be set (for example, if you need them for custom plugins) but they must begin with the prefix `ext.`. Unknown properties that don't begin with `ext.` will be rejected. `val`:: @@ -213,6 +213,23 @@ replaced with `defaults`. Using the `collectionDefaults` parameter in Solr 7.4 o but the format of the properties will automatically be converted to the new nested structure. Support for the "collectionDefaults" key will be removed in Solr 9. +=== Default Shard Preferences + +Using the `defaultShardPreferences` parameter, you can implement rack or availability zone awareness. First, make sure to "label" your nodes using a <> (e.g. `-Drack=rack1`). Then, set the value of `defaultShardPreferences` to `node.sysprop:sysprop.YOUR_PROPERTY_NAME` like this: + +[source,bash] +---- +curl -X POST -H 'Content-type:application/json' --data-binary ' +{ + "set-property" : { + "name" : "defaultShardPreferences", + "val" : "node.sysprop:sysprop.rack" + } +}' http://localhost:8983/api/cluster +---- + +At this point, if you run a query on a node having e.g. `rack=rack1`, Solr will try to hit only replicas from `rack1`. + [[balanceshardunique]] == BALANCESHARDUNIQUE: Balance a Property Across Nodes @@ -481,21 +498,4 @@ http://localhost:8983/solr/admin/collections?action=OVERSEERSTATUS ], "..." } ----- - -[[migratestateformat]] -== MIGRATESTATEFORMAT: Migrate Cluster State - -A expert level utility API to move a collection from shared `clusterstate.json` ZooKeeper node (created with `stateFormat=1`, the default in all Solr releases prior to 5.0) to the per-collection `state.json` stored in ZooKeeper (created with `stateFormat=2`, the current default) seamlessly without any application down-time. - -`/admin/collections?action=MIGRATESTATEFORMAT&collection=` - -=== MIGRATESTATEFORMAT Parameters - -`collection`:: -The name of the collection to be migrated from `clusterstate.json` to its own `state.json` ZooKeeper node. This parameter is required. - -`async`:: -Request ID to track this action which will be <>. - -This API is useful in migrating any collections created prior to Solr 5.0 to the more scalable cluster state format now used by default. If a collection was created in any Solr 5.x version or higher, then executing this command is not necessary. +---- \ No newline at end of file diff --git a/solr/solr-ref-guide/src/collection-management.adoc b/solr/solr-ref-guide/src/collection-management.adoc index 981ad5fcb06d..6d2b2a9c62c6 100644 --- a/solr/solr-ref-guide/src/collection-management.adoc +++ b/solr/solr-ref-guide/src/collection-management.adoc @@ -903,7 +903,6 @@ http://localhost:8983/solr/admin/collections?action=COLSTATUS&collection=getting "QTime": 50 }, "gettingstarted": { - "stateFormat": 2, "znodeVersion": 16, "properties": { "autoAddReplicas": "false", @@ -1049,7 +1048,6 @@ http://localhost:8983/solr/admin/collections?action=COLSTATUS&collection=getting "QTime": 26812 }, "gettingstarted": { - "stateFormat": 2, "znodeVersion": 33, "properties": { "autoAddReplicas": "false", diff --git a/solr/solr-ref-guide/src/common-query-parameters.adoc b/solr/solr-ref-guide/src/common-query-parameters.adoc index 1cfd39168535..c80178af99f9 100644 --- a/solr/solr-ref-guide/src/common-query-parameters.adoc +++ b/solr/solr-ref-guide/src/common-query-parameters.adoc @@ -193,7 +193,7 @@ The default behavior is not to include debugging information. == explainOther Parameter -The `explainOther` parameter specifies a Lucene query in order to identify a set of documents. If this parameter is included and is set to a non-blank value, the query will return debugging information, along with the "explain info" of each document that matches the Lucene query, relative to the main query (which is specified by the q parameter). For example: +The `explainOther` parameter specifies a Lucene query in order to identify a set of documents. If this parameter is included and is set to a non-blank value, the query will return debugging information, along with the "explain info" of each document that matches the Lucene query, relative to the main query (which is specified by the `q` parameter). For example: [source,text] ---- @@ -361,3 +361,43 @@ This is what happens if a similar request is sent that adds `echoParams=all` to } } ---- + +== minExactCount Parameter +When this parameter is used, Solr will count the number of hits accurately at least until this value. After that, Solr can skip over documents that don't have a score high enough to enter in the top N. This can greatly improve performance of search queries. On the other hand, when this parameter is used, the `numFound` may not be exact, and may instead be an approximation. +The `numFoundExact` boolean attribute is included in all responses, indicating if the `numFound` value is exact or an approximation. If it's an approximation, the real number of hits for the query is guaranteed to be greater or equal `numFound`. + +More about approximate document counting and `minExactCount`: + +* The documents returned in the response are guaranteed to be the docs with the top scores. This parameter will not make Solr skip documents that are to be returned in the response, it will only allow Solr to skip counting docs that, while they match the query, their score is low enough to not be in the top N. +* Providing `minExactCount` doesn't guarantee that Solr will use approximate hit counting (and thus, provide the speedup). Some types of queries, or other parameters (like if facets are requested) will require accurate counting. +* Approximate counting can only be used when sorting by `score desc` first (which is the default sort in Solr). Other fields can be used after `score desc`, but if any other type of sorting is used before score, then the approximation won't be applied. +* When doing distributed queries across multiple shards, each shard will accurately count hits until `minExactCount` (which means the query could be hitting `numShards * minExactCount` docs and `numFound` in the response would still be accurate) +For example: + +[source,text] +q=quick brown fox&minExactCount=100&rows=10 + +[source,json] +---- +"response": { + "numFound": 153, + "start": 0, + "numFoundExact": false, + "docs": Array[10] +... +---- +Since `numFoundExact=false`, we know the number of documents matching the query is greater or equal to 153. If we specify a higher value for `minExactCount`: + +[source,text] +q=quick brown fox&minExactCount=200&rows=10 + +[source,json] +---- +"response": { + "numFound": 163, + "start": 0, + "numFoundExact": true, + "docs": Array[10] +... +---- +In this case we know that `163` is the exact number of hits for the query. Both queries must have returned the same number of documents in the top 10. diff --git a/solr/solr-ref-guide/src/coreadmin-api.adoc b/solr/solr-ref-guide/src/coreadmin-api.adoc index 22b7d1ceaf57..90f0ab86aff4 100644 --- a/solr/solr-ref-guide/src/coreadmin-api.adoc +++ b/solr/solr-ref-guide/src/coreadmin-api.adoc @@ -77,7 +77,7 @@ The `core.properties` file is built as part of the CREATE command. If you create The name of the new core. Same as `name` on the `` element. This parameter is required. `instanceDir`:: -The directory where files for this core should be stored. Same as `instanceDir` on the `` element. The default is the value specified for the `name` parameter if not supplied. +The directory where files for this core should be stored. Same as `instanceDir` on the `` element. The default is the value specified for the `name` parameter if not supplied. This directory must be inside `SOLR_HOME`, `SOLR_DATA_HOME` or one of the paths specified by system property `solr.allowPaths`. `config`:: Name of the config file (i.e., `solrconfig.xml`) relative to `instanceDir`. @@ -86,7 +86,7 @@ Name of the config file (i.e., `solrconfig.xml`) relative to `instanceDir`. Name of the schema file to use for the core. Please note that if you are using a "managed schema" (the default behavior) then any value for this property which does not match the effective `managedSchemaResourceName` will be read once, backed up, and converted for managed schema use. See <> for details. `dataDir`:: -Name of the data directory relative to `instanceDir`. +Name of the data directory relative to `instanceDir`. If absolute value is used, it must be inside `SOLR_HOME`, `SOLR_DATA_HOME` or one of the paths specified by system property `solr.allowPaths`. `configSet`:: Name of the configset to use for this core. For more information, see the section <>. diff --git a/solr/solr-ref-guide/src/css/decoration.css b/solr/solr-ref-guide/src/css/decoration.css index 1c8d3354fff0..ff3a3e2f4cce 100644 --- a/solr/solr-ref-guide/src/css/decoration.css +++ b/solr/solr-ref-guide/src/css/decoration.css @@ -113,7 +113,7 @@ b.button:after .nav-pills .nav-link.active:hover, .nav-pills .nav-link.active:focus { - background-color: #FF833D; + background-color: #D9411E; color: white; } diff --git a/solr/solr-ref-guide/src/css/ref-guide.css b/solr/solr-ref-guide/src/css/ref-guide.css index 19edf0d20d58..efac8a05533d 100644 --- a/solr/solr-ref-guide/src/css/ref-guide.css +++ b/solr/solr-ref-guide/src/css/ref-guide.css @@ -267,11 +267,7 @@ pre > code font-weight: 400; font-family: 'Inconsolata', monospace; line-height: 1.45; - white-space: pre-wrap; -} - -table code { - background-color: transparent; + white-space: pre-wrap; } a code { diff --git a/solr/solr-ref-guide/src/distributed-requests.adoc b/solr/solr-ref-guide/src/distributed-requests.adoc index 7b9a109b026d..191bb8506314 100644 --- a/solr/solr-ref-guide/src/distributed-requests.adoc +++ b/solr/solr-ref-guide/src/distributed-requests.adoc @@ -189,6 +189,10 @@ Applied after sorting by inherent replica attributes, this property defines a fa + `stable[:hash[:_paramName_]]` the string value associated with the given parameter name is hashed to a dividend that is used to determine replica preference order (analogous to the explicit `dividend` property above); `_paramName_` defaults to `q` if not specified, providing stable routing keyed to the string value of the "main query". Note that this may be inappropriate for some use cases (e.g., static main queries that leverage parameter substitution) +`node.sysprop`:: +Query will be routed to nodes with same defined system properties as the current one. For example, if you start Solr nodes on different racks, you'll want to identify those nodes by a <> (e.g. `-Drack=rack1`). Then, queries can contain `shards.preference=node.sysprop:sysprop.rack`, to make sure you always hit shards with the same value of `rack`. + + Examples: * Prefer stable routing (keyed to client "sessionId" param) among otherwise equivalent replicas: diff --git a/solr/solr-ref-guide/src/enabling-ssl.adoc b/solr/solr-ref-guide/src/enabling-ssl.adoc index 5840e3f35e06..6d1be3ba5b32 100644 --- a/solr/solr-ref-guide/src/enabling-ssl.adoc +++ b/solr/solr-ref-guide/src/enabling-ssl.adoc @@ -16,27 +16,30 @@ // specific language governing permissions and limitations // under the License. -Solr can encrypt communications to and from clients, and between nodes in SolrCloud mode, with SSL. +Solr can encrypt communications to and from clients and between nodes in SolrCloud mode with Secure Sockets Layer encryption (SSL). This section describes enabling SSL using a self-signed certificate. For background on SSL certificates and keys, see http://www.tldp.org/HOWTO/SSL-Certificates-HOWTO/. -== Basic SSL Setup +== Configuring Solr for SSL === Generate a Self-Signed Certificate and a Key -To generate a self-signed certificate and a single key that will be used to authenticate both the server and the client, we'll use the JDK https://docs.oracle.com/javase/8/docs/technotes/tools/unix/keytool.html[`keytool`] command and create a separate keystore. This keystore will also be used as a truststore below. It's possible to use the keystore that comes with the JDK for these purposes, and to use a separate truststore, but those options aren't covered here. +To generate a self-signed certificate and a single key that will be used to authenticate both the server and the client, we'll use the JDK https://docs.oracle.com/javase/8/docs/technotes/tools/unix/keytool.html[`keytool`] command and create a separate keystore. +This keystore will also be used as a truststore below. +It's possible to use the keystore that comes with the JDK for these purposes, and to use a separate truststore, but those options aren't covered here. -Run the commands below in the `server/etc/` directory in the binary Solr distribution. It's assumed that you have the JDK `keytool` utility on your `PATH`, and that `openssl` is also on your `PATH`. See https://www.openssl.org/related/binaries.html for OpenSSL binaries for Windows and Solaris. +Run the commands below in the `server/etc/` directory in the binary Solr distribution. +It's assumed that you have the JDK `keytool` utility on your `PATH`, and that `openssl` is also on your `PATH`. See https://www.openssl.org/related/binaries.html for OpenSSL binaries for Windows and Solaris. -The `-ext SAN=...` `keytool` option allows you to specify all the DNS names and/or IP addresses that will be allowed during hostname verification (but see below for how to skip hostname verification between Solr nodes so that you don't have to specify all hosts here). +The `-ext SAN=...` `keytool` option allows you to specify all the DNS names and/or IP addresses that will be allowed during hostname verification if you choose to require it. In addition to `localhost` and `127.0.0.1`, this example includes a LAN IP address `192.168.1.3` for the machine the Solr nodes will be running on: -[source,bash] +[source,terminal] ---- -keytool -genkeypair -alias solr-ssl -keyalg RSA -keysize 2048 -keypass secret -storepass secret -validity 9999 -keystore solr-ssl.keystore.p12 -storetype PKCS12 -ext SAN=DNS:localhost,IP:192.168.1.3,IP:127.0.0.1 -dname "CN=localhost, OU=Organizational Unit, O=Organization, L=Location, ST=State, C=Country" +$ keytool -genkeypair -alias solr-ssl -keyalg RSA -keysize 2048 -keypass secret -storepass secret -validity 9999 -keystore solr-ssl.keystore.p12 -storetype PKCS12 -ext SAN=DNS:localhost,IP:192.168.1.3,IP:127.0.0.1 -dname "CN=localhost, OU=Organizational Unit, O=Organization, L=Location, ST=State, C=Country" ---- The above command will create a keystore file named `solr-ssl.keystore.p12` in the current directory. @@ -45,25 +48,31 @@ The above command will create a keystore file named `solr-ssl.keystore.p12` in t Convert the PKCS12 format keystore, including both the certificate and the key, into PEM format using the http://www.openssl.org[`openssl`] command: -[source,bash] +[source,terminal] ---- -openssl pkcs12 -in solr-ssl.keystore.p12 -out solr-ssl.pem +$ openssl pkcs12 -in solr-ssl.keystore.p12 -out solr-ssl.pem ---- If you want to use curl on OS X Yosemite (10.10), you'll need to create a certificate-only version of the PEM format, as follows: -[source,bash] +[source,terminal] ---- -openssl pkcs12 -nokeys -in solr-ssl.keystore.p12 -out solr-ssl.cacert.pem +$ openssl pkcs12 -nokeys -in solr-ssl.keystore.p12 -out solr-ssl.cacert.pem ---- === Set Common SSL-Related System Properties -The Solr Control Script is already setup to pass SSL-related Java system properties to the JVM. To activate the SSL settings, uncomment and update the set of properties beginning with SOLR_SSL_* in `bin/solr.in.sh`. (or `bin\solr.in.cmd` on Windows). +The Solr Control Script is already setup to pass SSL-related Java system properties to the JVM. +To activate the SSL settings, uncomment and update the set of properties beginning with `SOLR_SSL_*` in `bin/solr.in.sh` on *nix systems or `bin\solr.in.cmd` on Windows. + +[.dynamic-tabs] +-- +[example.tab-pane#solr-in-sh] +==== +[.tab-label]**nix (solr.in.sh)* -NOTE: If you setup Solr as a service on Linux using the steps outlined in <>, then make these changes in `/var/solr/solr.in.sh` instead. +NOTE: If you setup Solr as a service on Linux using the steps outlined in <>, then make these changes in `/var/solr/solr.in.sh`. -.bin/solr.in.sh example SOLR_SSL_* configuration [source,bash] ---- # Enables HTTPS. It is implicitly true if you set SOLR_SSL_KEY_STORE. Use this config @@ -83,16 +92,12 @@ SOLR_SSL_WANT_CLIENT_AUTH=false # this to false can be useful to disable these checks when re-using a certificate on many hosts SOLR_SSL_CHECK_PEER_NAME=true ---- +==== -When you start Solr, the `bin/solr` script includes the settings in `bin/solr.in.sh` and will pass these SSL-related system properties to the JVM. - -.Client Authentication Settings -WARNING: Enable either SOLR_SSL_NEED_CLIENT_AUTH or SOLR_SSL_WANT_CLIENT_AUTH but not both at the same time. They are mutually exclusive and Jetty will select one of them which may not be what you expect. SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION should be set to false if you want to disable hostname verification. - -Similarly, when you start Solr on Windows, the `bin\solr.cmd` script includes the settings in `bin\solr.in.cmd` - uncomment and update the set of properties beginning with `SOLR_SSL_*` to pass these SSL-related system properties to the JVM: - -.bin\solr.in.cmd example SOLR_SSL_* configuration -[source,text] +[example.tab-pane#solr-in-cmd] +==== +[.tab-label]*Windows (solr.in.cmd)* +[source,powershell] ---- REM Enables HTTPS. It is implicitly true if you set SOLR_SSL_KEY_STORE. Use this config REM to enable https module with custom jetty configuration. @@ -111,40 +116,28 @@ REM SSL Certificates contain host/ip "peer name" information that is validated b REM this to false can be useful to disable these checks when re-using a certificate on many hosts set SOLR_SSL_CHECK_PEER_NAME=true ---- +==== +-- -=== Run Single Node Solr using SSL +.Client Authentication Settings +WARNING: Enable either `SOLR_SSL_NEED_CLIENT_AUTH` or `SOLR_SSL_WANT_CLIENT_AUTH` but not both at the same time. They are mutually exclusive and Jetty will select one of them which may not be what you expect. `SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION` should be set to false if you want to disable hostname verification. -Start Solr using the command shown below; by default clients will not be required to authenticate: +When you start Solr, the `bin/solr` script includes these settings and will pass them as system properties to the JVM. -[.dynamic-tabs] --- -[example.tab-pane#single-unix] -==== -[.tab-label]**nix Command* -[source,bash] ----- -bin/solr -p 8984 ----- -==== +If you are running Solr in standalone mode, you can skip to <>. -[example.tab-pane#single-windows] -==== -[.tab-label]*Windows Command* -[source,text] ----- -bin\solr.cmd -p 8984 ----- -==== --- +If you are using SolrCloud, however, you need to <> before starting Solr. + +=== Password Distribution via Hadoop Credential Store -== Password Distribution via Hadoop Credential Store +Solr supports reading keystore and truststore passwords from Hadoop credential store. +This approach can be beneficial if password rotation and distribution is already handled by credential stores. -Solr supports reading keystore and truststore passwords from Hadoop credential store. This approach can be beneficial -if password rotation and distribution is already handled by credential stores. +If you are not using a Hadoop credential store, you can skip this step. Hadoop credential store can be used with Solr using the following two steps. -=== Provide a Hadoop Credential Store +==== Provide a Hadoop Credential Store Create a Hadoop credstore file and define the entries below with the actual keystore passwords. [source,text] @@ -157,7 +150,7 @@ javax.net.ssl.trustStorePassword Note that if the `javax.net.ssl.\*` configurations are not set, they will fallback to the corresponding `solr.jetty.*` configurations. -=== Configure Solr to use Hadoop Credential Store +==== Configure Solr to use Hadoop Credential Store Solr requires three parameters to be configured in order to use the credential store file for keystore passwords. @@ -174,8 +167,8 @@ The password to the credential store. -- [example.tab-pane#credstore-unix] ==== -[.tab-label]**nix Example* -[source,text] +[.tab-label]**nix (solr.in.sh)* +[source,bash] ---- SOLR_OPTS=" -Dsolr.ssl.credential.provider.chain=hadoop" SOLR_HADOOP_CREDENTIAL_PROVIDER_PATH=localjceks://file/home/solr/hadoop-credential-provider.jceks @@ -185,8 +178,8 @@ HADOOP_CREDSTORE_PASSWORD="credStorePass123" [example.tab-pane#credstore-windows] ===== -[.tab-label]*Windows Example* -[source,text] +[.tab-label]*Windows (solr.in.cmd)* +[source,powershell] ---- set SOLR_OPTS=" -Dsolr.ssl.credential.provider.chain=hadoop" set SOLR_HADOOP_CREDENTIAL_PROVIDER_PATH=localjceks://file/home/solr/hadoop-credential-provider.jceks @@ -195,106 +188,113 @@ set HADOOP_CREDSTORE_PASSWORD="credStorePass123" ===== -- - -== SSL with SolrCloud - -This section describes how to run a two-node SolrCloud cluster with no initial collections and a single-node external ZooKeeper. The commands below assume you have already created the keystore described above. - === Configure ZooKeeper NOTE: ZooKeeper does not support encrypted communication with clients like Solr. There are several related JIRA tickets where SSL support is being planned/worked on: https://issues.apache.org/jira/browse/ZOOKEEPER-235[ZOOKEEPER-235]; https://issues.apache.org/jira/browse/ZOOKEEPER-236[ZOOKEEPER-236]; https://issues.apache.org/jira/browse/ZOOKEEPER-1000[ZOOKEEPER-1000]; and https://issues.apache.org/jira/browse/ZOOKEEPER-2120[ZOOKEEPER-2120]. -Before you start any SolrCloud nodes, you must configure your Solr cluster properties in ZooKeeper, so that Solr nodes know to communicate via SSL. +After creating the keystore described above and before you start any SolrCloud nodes, you must configure your Solr cluster properties in ZooKeeper so that Solr nodes know to communicate via SSL. -This section assumes you have created and started a single-node external ZooKeeper on port 2181 on localhost - see <>. +This section assumes you have created and started an external ZooKeeper. +See <> for more information. -The `urlScheme` cluster-wide property needs to be set to `https` before any Solr node starts up. The example below uses the `zkcli` tool that comes with the binary Solr distribution to do this: +The `urlScheme` cluster-wide property needs to be set to `https` before any Solr node starts up. +The examples below use the `zkcli` tool that comes with Solr to do this. -.*nix command -[source,bash] +[.dynamic-tabs] +-- +[example.tab-pane#zkclusterprops-unix] +==== +[.tab-label]**nix Command* +[source,terminal] ---- -server/scripts/cloud-scripts/zkcli.sh -zkhost localhost:2181 -cmd clusterprop -name urlScheme -val https +$ server/scripts/cloud-scripts/zkcli.sh -zkhost server1:2181,server2:2181,server3:2181 -cmd clusterprop -name urlScheme -val https ---- +==== -.Windows command -[source,text] +[example.tab-pane#zkclusterprops-windows] +===== +[.tab-label]*Windows Command* +[source,powershell] ---- -server\scripts\cloud-scripts\zkcli.bat -zkhost localhost:2181 -cmd clusterprop -name urlScheme -val https +C:\> server\scripts\cloud-scripts\zkcli.bat -zkhost server1:2181,server2:2181,server3:2181 -cmd clusterprop -name urlScheme -val https ---- +===== +-- -If you have set up your ZooKeeper cluster to use a <>, make sure you use the correct `zkhost` string with `zkcli`, e.g., `-zkhost localhost:2181/solr`. +Be sure to use the correct `zkhost` value for your system. If you have set up your ZooKeeper ensemble to use a <>, make sure to include it in the `zkhost` string, e.g., `-zkhost server1:2181,server2:2181,server3:2181/solr`. -=== Run SolrCloud with SSL +=== Update Cluster Properties for Existing Collections -NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from all of the `bin/solr`/`bin\solr.cmd` commands below. +If you are using SolrCloud and have collections created before enabling SSL, you will need to update the cluster properties to use HTTPS. -==== Create Solr Home Directories for Two Nodes +If you do not have existing collections or are not using SolrCloud, you can skip ahead and start Solr. -Create two copies of the `server/solr/` directory which will serve as the Solr home directories for each of your two SolrCloud nodes: +Updating cluster properties can be done with the Collections API <> command, as in this example (update the hostname and port as appropriate for your system): -.*nix commands -[source,bash] ----- -mkdir cloud -cp -r server/solr cloud/node1 -cp -r server/solr cloud/node2 ----- +[source,terminal] +$ http://localhost:8983/solr/admin/collections?action=CLUSTERPROP&name=urlScheme&val=https -.Windows commands -[source,text] ----- -mkdir cloud -xcopy /E server\solr cloud\node1\ -xcopy /E server\solr cloud\node2\ ----- +This command only needs to be run on one node of the cluster, the change will apply to all nodes. -==== Start the First Solr Node +Once this and all other steps are complete, you can go ahead and start Solr. -Next, start the first Solr node on port 8984. Be sure to stop the standalone server first if you started it when working through the previous section on this page. +== Starting Solr After Enabling SSL -.*nix command -[source,bash] ----- -bin/solr -cloud -s cloud/node1 -z localhost:2181 -p 8984 ----- - -.Windows command -[source,text] ----- -bin\solr.cmd -cloud -s cloud\node1 -z localhost:2181 -p 8984 ----- - -Notice the use of the `-s` option to set the location of the Solr home directory for node1. +=== Run Single Node Solr using SSL -If you created your SSL key without all DNS names/IP addresses on which Solr nodes will run, you can tell Solr to skip hostname verification for inter-Solr-node communications by setting the `solr.ssl.checkPeerName` system property to `false`: +Start Solr using the Solr control script as shown in the examples below. +Customize the values for the parameters shown as needed and add any used in your system. -.*nix command -[source,bash] +[.dynamic-tabs] +-- +[example.tab-pane#single-unix] +==== +[.tab-label]**nix Command* +[source,terminal] ---- -bin/solr -cloud -s cloud/node1 -z localhost:2181 -p 8984 -Dsolr.ssl.checkPeerName=false +$ bin/solr -p 8984 ---- +==== -.Windows command -[source,text] +[example.tab-pane#single-windows] +==== +[.tab-label]*Windows Command* +[source,powershell] ---- -bin\solr.cmd -cloud -s cloud\node1 -z localhost:2181 -p 8984 -Dsolr.ssl.checkPeerName=false +C:\> bin\solr.cmd -p 8984 ---- +==== +-- -==== Start the Second Solr Node +=== Run SolrCloud with SSL -Finally, start the second Solr node on port 7574 - again, to skip hostname verification, add `-Dsolr.ssl.checkPeerName=false`; +NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from all of the `bin/solr`/`bin\solr.cmd` commands below. -.*nix command -[source,text] +Start each Solr node with the Solr control script as shown in the examples below. Customize the values for the parameters shown as necessary and add any used in your system. + +If you created the SSL key without all DNS names or IP addresses on which Solr nodes run, you can tell Solr to skip hostname verification for inter-node communications by setting the `-Dsolr.ssl.checkPeerName=false` system property. + +[.dynamic-tabs] +-- +[example.tab-pane#cloud-unix] +==== +[.tab-label]*\*nix* +[source,terminal] ---- -bin/solr -cloud -s cloud/node2 -z localhost:2181 -p 7574 +$ bin/solr -cloud -s cloud/node1 -z server1:2181,server2:2181,server3:2181 -p 8984 ---- +==== -.Windows command -[source,text] +[example.tab-pane#cloud-windows] +==== +[.tab-label]*Windows* +[source,powershell] ---- -bin\solr.cmd -cloud -s cloud\node2 -z localhost:2181 -p 7574 +C:\> bin\solr.cmd -cloud -s cloud\node1 -z server1:2181,server2:2181,server3:2181 + ---- +==== +-- == Example Client Actions @@ -305,7 +305,7 @@ curl on OS X Mavericks (10.9) has degraded SSL support. For more information and The curl commands in the following sections will not work with the system `curl` on OS X Yosemite (10.10). Instead, the certificate supplied with the `-E` parameter must be in PKCS12 format, and the file supplied with the `--cacert` parameter must contain only the CA certificate, and no key (see <> for instructions on creating this file): [source,bash] -curl -E solr-ssl.keystore.p12:secret --cacert solr-ssl.cacert.pem ... +$ curl -E solr-ssl.keystore.p12:secret --cacert solr-ssl.cacert.pem ... ==== @@ -333,9 +333,9 @@ The `create` action will pass the `SOLR_SSL_*` properties set in your include fi To get the resulting cluster status (again, if you have not enabled client authentication, remove the `-E solr-ssl.pem:secret` option): -[source,bash] +[source,terminal] ---- -curl -E solr-ssl.pem:secret --cacert solr-ssl.pem "https://localhost:8984/solr/admin/collections?action=CLUSTERSTATUS&indent=on" +$ curl -E solr-ssl.pem:secret --cacert solr-ssl.pem "https://localhost:8984/solr/admin/collections?action=CLUSTERSTATUS&indent=on" ---- You should get a response that looks like this: diff --git a/solr/solr-ref-guide/src/exporting-result-sets.adoc b/solr/solr-ref-guide/src/exporting-result-sets.adoc index b0565d87931d..8a072f24faa8 100644 --- a/solr/solr-ref-guide/src/exporting-result-sets.adoc +++ b/solr/solr-ref-guide/src/exporting-result-sets.adoc @@ -59,6 +59,34 @@ It can get worse otherwise. The `fl` property defines the fields that will be exported with the result set. Any of the field types that can be sorted (i.e., int, long, float, double, string, date, boolean) can be used in the field list. The fields can be single or multi-valued. However, returning scores and wildcards are not supported at this time. +=== Specifying the Local Streaming Expression + +The optional `expr` property defines a <> that allows documents to be processed locally before they are exported in the result set. + +Expressions have to use a special `input()` stream that represents original results from the `/export` handler. Output from the stream expression then becomes the output from the `/export` handler. The `&streamLocalOnly=true` flag is always set for this streaming expression. + +Only stream <> and <> are supported in these expressions - using any of the <> expressions except for the pre-defined `input()` will result in an error. + +Using stream expressions with the `/export` handler may result in dramatic performance improvements due to the local in-memory reduction of the number of documents to be returned. + +Here's an example of using `top` decorator for returning only top N results: +[source,text] +---- +http://localhost:8983/solr/core_name/export?q=my-query&sort=timestamp+desc,&fl=timestamp,reporter,severity&expr=top(n=2,input(),sort="timestamp+desc") +---- + +(Note that the sort spec in the `top` decorator must match the sort spec in the +handler parameter). + +Here's an example of using `unique` decorator: + +[source,text] +---- +http://localhost:8983/solr/core_name/export?q=my-query&sort=reporter+desc,&fl=reporter&expr=unique(input(),over="reporter") +---- + +(Note that the `over` parameter must use one of the fields requested in the `fl` parameter). + == Distributed Support See the section <> for distributed support. diff --git a/solr/solr-ref-guide/src/format-of-solr-xml.adoc b/solr/solr-ref-guide/src/format-of-solr-xml.adoc index 7d47a720459b..4b47b7780413 100644 --- a/solr/solr-ref-guide/src/format-of-solr-xml.adoc +++ b/solr/solr-ref-guide/src/format-of-solr-xml.adoc @@ -30,6 +30,7 @@ You can find `solr.xml` in your `$SOLR_HOME` directory (usually `server/solr` or ${solr.max.booleanClauses:1024} ${solr.sharedLib:} + ${solr.allowPaths:} ${host:} @@ -91,6 +92,9 @@ Currently non-operational. `sharedLib`:: Specifies the path to a common library directory that will be shared across all cores. Any JAR files in this directory will be added to the search path for Solr plugins. If the specified path is not absolute, it will be relative to `$SOLR_HOME`. Custom handlers may be placed in this directory. Note that specifying `sharedLib` will not remove `$SOLR_HOME/lib` from Solr's class path. +`allowPaths`:: +Solr will normally only access folders relative to `$SOLR_HOME`, `$SOLR_DATA_HOME` or `coreRootDir`. If you need to e.g. create a core outside of these paths, you can explicitly allow the path with `allowPaths`. It is a comma separated string of file system paths to allow. The special value of `*` will allow any path on the system. + `shareSchema`:: This attribute, when set to `true`, ensures that the multiple cores pointing to the same Schema resource file will be referring to the same IndexSchema Object. Sharing the IndexSchema Object makes loading the core faster. If you use this feature, make sure that no core-specific property is used in your Schema file. diff --git a/solr/solr-ref-guide/src/json-facet-api.adoc b/solr/solr-ref-guide/src/json-facet-api.adoc index d06d9c2caf16..5f636adc9297 100644 --- a/solr/solr-ref-guide/src/json-facet-api.adoc +++ b/solr/solr-ref-guide/src/json-facet-api.adoc @@ -910,6 +910,8 @@ The `relatedness(...)` function is used to "score" these relationships, relative Unlike most aggregation functions, the `relatedness(...)` function is aware of whether and how it's used in <>. It evaluates the query defining the current bucket _independently_ from its parent/ancestor buckets, and intersects those documents with a "Foreground Set" defined by the foreground query _combined with the ancestor buckets_. The result is then compared to a similar intersection done against the "Background Set" (defined exclusively by background query) to see if there is a positive, or negative, correlation between the current bucket and the Foreground Set, relative to the Background Set. +NOTE: The semantics of `relatedness(...)` in an `allBuckets` context is currently undefined. Accordingly, although the `relatedness(...)` stat may be specified for a facet request that also specifies `allBuckets:true`, the `allBuckets` bucket itself will not include a relatedness calculation. + NOTE: While it's very common to define the Background Set as `\*:*`, or some other super-set of the Foreground Query, it is not strictly required. The `relatedness(...)` function can be used to compare the statistical relatedness of sets of documents to orthogonal foreground/background queries. [[relatedness-options]] diff --git a/solr/solr-ref-guide/src/json-query-dsl.adoc b/solr/solr-ref-guide/src/json-query-dsl.adoc index e38ea0832fe2..3c77a5d6993f 100644 --- a/solr/solr-ref-guide/src/json-query-dsl.adoc +++ b/solr/solr-ref-guide/src/json-query-dsl.adoc @@ -341,7 +341,7 @@ include::{example-source-dir}JsonRequestApiTest.java[tag=solrj-ipod-query-bool-f == Additional Queries -Multiple additional queries might be specified under `queries` key with all syntax alternatives described above. Every entry might have multiple values in array. Notice that old-style referencing `"{!v=$query_name}"` picks only the first element in array ignoring everything beyond, e.g., if one changes the reference below from `"{!v=$electronic}"` to `"{!v=$manufacturers}"` it's equivalent to querying for `manu:apple`, ignoring the later query. These queries don't impact query result until explicit referencing. +Multiple additional queries might be specified under `queries` key with all syntax alternatives described above. Every entry might have multiple values in array. To reference these queries one can use `{"param":"query_name"}`, as well as old-style referencing `"{!v=$query_name}"`. Beware of arity for these references. Depending on the context, a reference might be resolved into the first element in array ignoring the later elements, e.g., if one changes the reference below from `{"param":"electronic"}` to `{"param":"manufacturers"}` it's equivalent to querying for `manu:apple`, ignoring the later query. These queries don't impact query result until explicit referencing. [source,bash] ---- @@ -354,7 +354,7 @@ curl -X POST http://localhost:8983/solr/techproducts/query -d ' {"field": {"f":"manu", "query":"belkin"}} ] }, - "query":"{!v=$electronic}" + "query":{"param":"electronic"} }' ---- diff --git a/solr/solr-ref-guide/src/jwt-authentication-plugin.adoc b/solr/solr-ref-guide/src/jwt-authentication-plugin.adoc index dbe6147c2345..5423e7791ed2 100644 --- a/solr/solr-ref-guide/src/jwt-authentication-plugin.adoc +++ b/solr/solr-ref-guide/src/jwt-authentication-plugin.adoc @@ -50,6 +50,7 @@ requireExp ; Fails requests that lacks an `exp` (expiry time) claim algWhitelist ; JSON array with algorithms to accept: `HS256`, `HS384`, `HS512`, `RS256`, `RS384`, `RS512`, `ES256`, `ES384`, `ES512`, `PS256`, `PS384`, `PS512`, `none ; Default is to allow all algorithms jwkCacheDur ; Duration of JWK cache in seconds ; `3600` (1 hour) principalClaim ; What claim id to pull principal from ; `sub` +rolesClaim ; What claim id to pull user roles from. The claim must then either contain a space separated list of roles or a JSON array. The roles can then be used to define fine-grained access in an Authorization plugin ; By default the scopes from `scope` claim are passed on as user roles claimsMatch ; JSON object of claims (key) that must match a regular expression (value). Example: `{ "foo" : "A|B" }` will require the `foo` claim to be either "A" or "B". ; adminUiScope ; Define what scope is requested when logging in from Admin UI ; If not defined, the first scope from `scope` parameter is used redirectUris ; Valid location(s) for redirect after external authentication. Takes a string or array of strings. Must be the base URL of Solr, e.g., https://solr1.example.com:8983/solr/ and must match the list of redirect URIs registered with the Identity Provider beforehand. ; Defaults to empty list, i.e., any node is assumed to be a valid redirect target. @@ -101,7 +102,7 @@ The next example shows configuring using https://openid.net/specs/openid-connect "class": "solr.JWTAuthPlugin", "wellKnownUrl": "https://idp.example.com/.well-known/openid-configuration", "clientId": "xyz", - "redirectUri": "https://my.solr.server:8983/solr/" + "redirectUris": "https://my.solr.server:8983/solr/" } } ---- diff --git a/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc b/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc index cdb0b867468c..1e7774a03aa2 100644 --- a/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc +++ b/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc @@ -105,8 +105,18 @@ _(raw; not yet edited)_ * SOLR-11775: Return long value for facet count in Json Facet module irrespective of number of shards (hossman, Munendra S N) +* SOLR-12823: Remove /clusterstate.json support, i.e. support for collections created with stateFormat=1 as well as support + for Collection API MIGRATESTATEFORMAT action. Also removes support for cluster property `legacyCloud` (as if always false now). + === Upgrade Prerequisites in Solr 9 +* Upgrade all collections in stateFormat=1 to stateFormat=2 *before* upgrading to Solr 9, as Solr 9 does not support the +older format and no longer supports migrating collections from the older format to the current format (previously known +as stateFormat=2). +Upgrade is to be done using Collection API MIGRATESTATEFORMAT action using a previous version of Solr. +See for example https://lucene.apache.org/solr/guide/8_5/cluster-node-management.html#migratestateforma[Solr 8.5 Ref Guide]. +// Can't link directly to .adoc file, need to link to 8.something ref guide as MIGRATESTATEFORMAT no longer exists in 9.0. + === Rolling Upgrades with Solr 9 === Reindexing After Upgrades in Solr 9 diff --git a/solr/solr-ref-guide/src/metrics-reporting.adoc b/solr/solr-ref-guide/src/metrics-reporting.adoc index c66017c6e98c..b319bb23904a 100644 --- a/solr/solr-ref-guide/src/metrics-reporting.adoc +++ b/solr/solr-ref-guide/src/metrics-reporting.adoc @@ -670,4 +670,4 @@ http://localhost:8983/solr/admin/metrics?regex=.*\.requests&group=core Request only "user.name" property of "system.properties" metric from registry "solr.jvm": [source,text] -http://localhost:8983/solr/admin/metrics?wt=xml?key=solr.jvm:system.properties:user.name +http://localhost:8983/solr/admin/metrics?wt=xml&key=solr.jvm:system.properties:user.name diff --git a/solr/solr-ref-guide/src/replica-management.adoc b/solr/solr-ref-guide/src/replica-management.adoc index cf508c3d9dc1..6dd03cc2da5b 100644 --- a/solr/solr-ref-guide/src/replica-management.adoc +++ b/solr/solr-ref-guide/src/replica-management.adoc @@ -26,6 +26,8 @@ Add one or more replicas to a shard in a collection. The node name can be specif The API uses the Autoscaling framework to find nodes that can satisfy the disk requirements for the new replica(s) but only when an Autoscaling preferences or policy is configured. Refer to <> section for more details. +WARNING: If the destination node is specified, this command will ignore the maxShardsPerNode property. + `/admin/collections?action=ADDREPLICA&collection=_collection_&shard=_shard_&node=_nodeName_` === ADDREPLICA Parameters @@ -158,32 +160,100 @@ http://localhost:8983/solr/admin/collections?action=addreplica&collection=gettin [[movereplica]] == MOVEREPLICA: Move a Replica to a New Node -This command moves a replica from one node to a new node. In case of shared filesystems the `dataDir` will be reused. +This command moves a replica from one node to another node by executing ADDREPLICA on the destination and then DELETEREPLICA on the source. If this command is interrupted or times out before the ADDREPLICA operation produces a replica in an active state, the DELETEREPLICA will not occur. Timeouts do not cancel the ADDREPLICA, and will result in extra shards. In case of shared filesystems the `dataDir` will be reused. -The API uses the Autoscaling framework to find nodes that can satisfy the disk requirements for the replica to be moved but only when an Autoscaling policy is configured. Refer to <> section for more details. +If this command is used on a collection where more than one replica from the same shard exists on the same node, and the `shard` and `sourceNode` parameters match more than one replica, the replica selected is not deterministic (currently it's random). -`/admin/collections?action=MOVEREPLICA&collection=collection&shard=shard&replica=replica&sourceNode=nodeName&targetNode=nodeName` +WARNING: MOVERREPLICA does not check the maxShardsPerNode setting, and may produce a collection that is in violation of the maxShardsPerNode. === MOVEREPLICA Parameters `collection`:: The name of the collection. This parameter is required. +`targetNode`:: +The name of the destination node. This parameter is required. + +`sourceNode`:: +The name of the node that contains the replica to move. This parameter is required unless `replica` is specified. If `replica` is specified this parameter is ignored. + `shard`:: -The name of the shard that the replica belongs to. This parameter is required. +The name of the shard for which a replica should be moved. This parameter is required unless `replica` is specified. If `replica` is specified, this parameter is ignored. `replica`:: -The name of the replica. This parameter is required. +The name of the replica to move. This parameter is required unless `shard` and `sourceNode` are specified, however this parameter has precedence over those two parameters. -`sourceNode`:: -The name of the node that contains the replica. This parameter is required. +`timeout`:: +The number of seconds to wait for the replica to be live in the new location before deleting the replica in the old location. Defaults to 600 seconds. Deletion will not occur and creation will not be rolled back in the event of a timeout, potentially leaving an extra replica. Presently, this parameter is ignored if the replica is an hdfs replica. -`targetNode`:: -The name of the destination node. This parameter is required. +`inPlaceMove`:: +For replicas that use shared filesystems allow 'in-place' move that reuses shared data. Defaults to true, but is ignored if the replica does not have the property `shared_storage` with a value of `true` `async`:: Request ID to track this action which will be <>. +=== Examples using MOVEREPLICA + +[.dynamic-tabs] +-- + +[example.tab-pane#v2moveReplica] +==== +[.tab-label]*V2 API* +*Input* + +`POST /api/c/test` + +[source,json] +---- +{ + "move-replica":{ + "replica":"core_node6", + "targetNode": "localhost:8983_solr" + } +} +---- +*Output* + +[source,json] +---- +{ + "responseHeader": { + "status": 0, + "QTime": 3668 + }, + "success": "MOVEREPLICA action completed successfully, moved replica=test_shard1_replica_n5 at node=localhost:8982_solr to replica=test_shard1_replica_n7 at node=localhost:8983_solr" +} +---- +==== + +[example.tab-pane#v1createAlias] +==== +[.tab-label]*V1 API* + +*Input* + +[source,text] +---- +http://localhost:8983/solr/admin/collections?action=MOVEREPLICA&collection=test&targetNode=localhost:8983_solr&replica=core_node6 +---- + +*Output* + +[source,json] +---- +{ + "responseHeader": { + "status": 0, + "QTime": 3668 + }, + "success": "MOVEREPLICA action completed successfully, moved replica=test_shard1_replica_n5 at node=localhost:8982_solr to replica=test_shard1_replica_n7 at node=localhost:8983_solr" +} +---- +==== +-- + + [[deletereplica]] == DELETEREPLICA: Delete a Replica diff --git a/solr/solr-ref-guide/src/requesthandlers-and-searchcomponents-in-solrconfig.adoc b/solr/solr-ref-guide/src/requesthandlers-and-searchcomponents-in-solrconfig.adoc index fab960691679..52e2788a12d5 100644 --- a/solr/solr-ref-guide/src/requesthandlers-and-searchcomponents-in-solrconfig.adoc +++ b/solr/solr-ref-guide/src/requesthandlers-and-searchcomponents-in-solrconfig.adoc @@ -169,3 +169,12 @@ Many of the other useful components are described in sections of this Guide for * `TermVectorComponent`, described in the section <>. * `QueryElevationComponent`, described in the section <>. * `TermsComponent`, described in the section <>. +* `RealTimeGetComponent`, described in the section <>. +* `ClusteringComponent`, described in the section <>. +* `SuggestComponent`, described in the section <>. +* `AnalyticsComponent`, described in the section <>. + +Other components that ship with Solr include: + +* `ResponseLogComponent`, used to record which documents are returned to the user via the Solr log, described in the {solr-javadocs}solr-core/org/apache/solr/handler/component/ResponseLogComponent.html[ResponseLogComponent] javadocs. +* `PhrasesIdentificationComponent`, used to identify & score "phrases" found in the input string, based on shingles in indexed fields, described in the {solr-javadocs}solr-core/org/apache/solr/handler/component/PhrasesIdentificationComponent.html[PhrasesIdentificationComponent] javadocs. diff --git a/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc b/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc index 99d09874795a..6eb63f55ef2a 100644 --- a/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc +++ b/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc @@ -16,7 +16,7 @@ // specific language governing permissions and limitations // under the License. -Solr's authentication plugins control whether users can access Solr in a binary fashion. A user is either authenticated, or they aren't. For more fine-grained access control, Solr's Rule-Based Authorization Plugin (hereafter, "RBAP") can be used. +Solr's authentication plugins control whether users can access Solr in a binary fashion. A user is either authenticated, or they aren't. For more fine-grained access control, Solr's Rule-Based Authorization Plugins (hereafter, "RBAP") can be used. [CAUTION] ==== @@ -35,7 +35,10 @@ The users that RBAP sees come from whatever authentication plugin has been confi === Roles -Roles help bridge the gap between users and permissions. Users are assigned one or more roles, and permissions are then given to each of these roles in `security.json` +Roles help bridge the gap between users and permissions. The roles can be used with any of the authentication plugins or with a custom authentication plugin if you have created one. You will only need to ensure that logged-in users are mapped to the roles defined by the plugin. There are two implementations of the plugin, which only differs in how the user's roles are obtained: + +* `RuleBasedAuthorizationPlugin`: The role-to-user mappings must be defined explicitly in `security.json` for every possible authenticated user. +* `ExternalRoleRuleBasedAuthorizationPlugin`: The role-to-user mappings are managed externally. This plugin expects the AuthenticationPlugin to provide a Principal that has the roles information as well, implementing the `VerifiedUserRoles` interface. === Permissions @@ -43,7 +46,7 @@ Permissions control which roles (and consequently, which users) have access to p Administrators can use permissions from a list of predefined options or define their own custom permissions, are are free to mix and match both. -== Configuring the Rule-Based Authorization Plugin +== Configuring the Rule-Based Authorization Plugins Like all of Solr's security plugins, configuration for RBAP lives in a file or ZooKeeper node with the name `security.json`. See <> for more information on how to setup `security.json` in your cluster. @@ -54,15 +57,6 @@ Solr offers an <> for making changes to RBAP configuration. RBAP configuration consists of a small number of required configuration properties. Each of these lives under the `authorization` top level property in `security.json` class:: The authorization plugin to use. For RBAP, this value will always be `solr.RuleBasedAuthorizationPlugin` -user-role:: A mapping of individual users to the roles they belong to. The value of this property is a JSON map, where each property name is a user, and each property value is either the name of a single role or a JSON array of multiple roles that the specified user belongs to. For example: -+ -[source,json] ----- -"user-role": { - "user1": "role1", - "user2": ["role1", "role2"] -} ----- permissions:: A JSON array of permission rules used to restrict access to sections of Solr's API. For example: + [source,json] @@ -75,9 +69,21 @@ permissions:: A JSON array of permission rules used to restrict access to sectio + The syntax for individual permissions is more involved and is treated in greater detail <>. -=== Complete Example +User's roles may either come from the request itself, then you will use the `ExternalRoleRuleBasedAuthorizationPlugin` variant of RBAC. If you need to hardcode user-role mappings, then you need to use the `RuleBasedAuthorizationPlugin` and define the user-role mappings in `security.json` like this: -The example below shows how the configuration properties above can be used to achieve a typical (if simple) RBAP use-case. +user-role:: A mapping of individual users to the roles they belong to. The value of this property is a JSON map, where each property name is a user, and each property value is either the name of a single role or a JSON array of multiple roles that the specified user belongs to. For example: ++ +[source,json] +---- +"user-role": { + "user1": "role1", + "user2": ["role1", "role2"] +} +---- + +=== Example for RuleBasedAuthorizationPlugin and BasicAuth + +This example `security.json` shows how the <> can work with the `RuleBasedAuthorizationPlugin` plugin: [source,json] ---- @@ -112,6 +118,35 @@ The example below shows how the configuration properties above can be used to ac Altogether, this example carves out two restricted areas. Only `admin-user` can access Solr's Authentication and Authorization APIs, and only `dev-user` can access their `dev-private` collection. All other APIs are left open, and can be accessed by both users. +=== Example for ExternalRoleRuleBasedAuthorizationPlugin with JWT auth + +This example `security.json` shows how the <>, which pulls user and user roles from JWT claims, can work with the `ExternalRoleRuleBasedAuthorizationPlugin` plugin: + +[source,json] +---- +{ +"authentication":{ + "class": "solr.JWTAuthPlugin", <1> + "jwksUrl": "https://my.key.server/jwk.json", <2> + "rolesClaim": "roles" <3> +}, +"authorization":{ + "class":"solr.ExternalRoleRuleBasedAuthorizationPlugin", <4> + "permissions":[{"name":"security-edit", + "role":"admin"}] <5> +}} +---- + +Let's walk through this example: + +<1> JWT Authentication plugin is enabled +<2> Public keys is pulled over https +<3> We expect each JWT token to contain a "roles" claim, which will be passed on to Authorization +<4> External Role Rule-based authorization plugin is enabled. +<5> The 'admin' role has been defined, and it has permission to edit security settings. + +Only requests from users having a JWT token with role "admin" will be granted the `security-edit` permission. + == Permissions Solr's Rule-Based Authorization plugin supports a flexible and powerful permission syntax. RBAP supports two types of permissions, each with a slightly different syntax. diff --git a/solr/solr-ref-guide/src/rule-based-replica-placement.adoc b/solr/solr-ref-guide/src/rule-based-replica-placement.adoc index 34f990cb7335..b22383b2d9aa 100644 --- a/solr/solr-ref-guide/src/rule-based-replica-placement.adoc +++ b/solr/solr-ref-guide/src/rule-based-replica-placement.adoc @@ -174,4 +174,4 @@ Rules are specified per collection during collection creation as request paramet snitch=class:EC2Snitch&rule=shard:*,replica:1,dc:dc1&rule=shard:*,replica:<2,dc:dc3 ---- -These rules are persisted in `clusterstate.json` in ZooKeeper and are available throughout the lifetime of the collection. This enables the system to perform any future node allocation without direct user interaction. The rules added during collection creation can be modified later using the <> API. +These rules are persisted in the collection's `state.json` in ZooKeeper and are available throughout the lifetime of the collection. This enables the system to perform any future node allocation without direct user interaction. The rules added during collection creation can be modified later using the <> API. diff --git a/solr/solr-ref-guide/src/securing-solr.adoc b/solr/solr-ref-guide/src/securing-solr.adoc index c3e17e9d982a..837256e88f7a 100644 --- a/solr/solr-ref-guide/src/securing-solr.adoc +++ b/solr/solr-ref-guide/src/securing-solr.adoc @@ -44,6 +44,7 @@ Authentication makes sure you know the identity of your users. The authenticatio * <> * <> * <> +* <> // end::list-of-authentication-plugins[] === Authorization Plugins @@ -52,6 +53,7 @@ Authorization makes sure that only users with the necessary roles/permissions ca // tag::list-of-authorization-plugins[] * <> +* <> // end::list-of-authorization-plugins[] === Audit Logging Plugins diff --git a/solr/solr-ref-guide/src/shard-management.adoc b/solr/solr-ref-guide/src/shard-management.adoc index fa0712dde411..228d5f94545a 100644 --- a/solr/solr-ref-guide/src/shard-management.adoc +++ b/solr/solr-ref-guide/src/shard-management.adoc @@ -272,7 +272,7 @@ http://localhost:8983/solr/admin/collections?action=CREATESHARD&collection=anImp [[deleteshard]] == DELETESHARD: Delete a Shard -Deleting a shard will unload all replicas of the shard, remove them from `clusterstate.json`, and (by default) delete the instanceDir and dataDir for each replica. It will only remove shards that are inactive, or which have no range given for custom sharding. +Deleting a shard will unload all replicas of the shard, remove them from the collection's `state.json`, and (by default) delete the instanceDir and dataDir for each replica. It will only remove shards that are inactive, or which have no range given for custom sharding. `/admin/collections?action=DELETESHARD&shard=_shardID_&collection=_name_` diff --git a/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc b/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc index 3ad37726b103..099f99239923 100644 --- a/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc +++ b/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc @@ -29,12 +29,19 @@ commands which can re-balance the cluster in response to trigger events. The following parameters are configurable: `collections`:: -A comma-separated list of collection names. If this list is not empty then -the computed operations will only calculate collection operations that affect -listed collections and ignore any other collection operations for collections -not listed here. Note that non-collection operations are not affected by this. +A comma-separated list of collection names, or a selector on collection properties that can be used to filter collections for which the plan is computed. -Example configuration: +If a non-empty list or selector is specified then the computed operations will only calculate collection operations that affect +matched collections and ignore any other collection operations for collections +not listed here. This does not affect non-collection operations. + +A collection selector is of the form `collections: {key1: value1, key2: value2, ...}` where the key can be any collection property such as `name`, `policy`, `numShards` etc. +The value must match exactly and all specified properties must match for a collection to match. + +A collection selector is useful in a cluster where collections are added and removed frequently and where selecting only collections that +use a specific autoscaling policy is useful. + +Example configurations: [source,json] ---- @@ -48,11 +55,11 @@ Example configuration: { "name" : "compute_plan", "class" : "solr.ComputePlanAction", - "collections" : "test1,test2", + "collections" : "test1,test2" }, { "name" : "execute_plan", - "class" : "solr.ExecutePlanAction", + "class" : "solr.ExecutePlanAction" } ] } @@ -63,6 +70,56 @@ In this example only collections `test1` and `test2` will be potentially replicated / moved to an added node, other collections will be ignored even if they cause policy violations. +[source,json] +---- +{ + "set-trigger" : { + "name" : "node_added_trigger", + "event" : "nodeAdded", + "waitFor" : "1s", + "enabled" : true, + "actions" : [ + { + "name" : "compute_plan", + "class" : "solr.ComputePlanAction", + "collections" : {"policy": "my_policy"} + }, + { + "name" : "execute_plan", + "class" : "solr.ExecutePlanAction" + } + ] + } +} +---- + +In this example only collections which use the `my_policy` as their autoscaling policy will be potentially replicated / moved to an added node, other collections will be ignored even if they cause policy violations. + +[source,json] +---- +{ + "set-trigger" : { + "name" : "node_added_trigger", + "event" : "nodeAdded", + "waitFor" : "1s", + "enabled" : true, + "actions" : [ + { + "name" : "compute_plan", + "class" : "solr.ComputePlanAction", + "collections" : {"policy": "my_policy", "numShards" : "4"} + }, + { + "name" : "execute_plan", + "class" : "solr.ExecutePlanAction" + } + ] + } +} +---- + +In this example only collections which use the `my_policy` as their autoscaling policy and that have `numShards` equal to `4` will be potentially replicated / moved to an added node, other collections will be ignored even if they cause policy violations. + == Execute Plan Action The `ExecutePlanAction` executes the Collection API commands emitted by the `ComputePlanAction` against diff --git a/solr/solrj/build.gradle b/solr/solrj/build.gradle index eb85711e2f00..e8c8a072d053 100644 --- a/solr/solrj/build.gradle +++ b/solr/solrj/build.gradle @@ -18,6 +18,8 @@ apply plugin: 'java-library' +description = 'Solrj - Solr Java Client' + dependencies { api 'org.slf4j:slf4j-api' implementation 'org.slf4j:jcl-over-slf4j' diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java index 11081bfd4846..01f577564231 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java @@ -1273,7 +1273,7 @@ public SolrDocumentList getById(Collection ids, SolrParams params) throw * @throws IOException If there is a low-level I/O error. * @throws SolrServerException if there is an error on the server */ - public abstract NamedList request(final SolrRequest request, String collection) + public abstract NamedList request(@SuppressWarnings({"rawtypes"})final SolrRequest request, String collection) throws SolrServerException, IOException; /** @@ -1286,7 +1286,7 @@ public abstract NamedList request(final SolrRequest request, String coll * @throws IOException If there is a low-level I/O error. * @throws SolrServerException if there is an error on the server */ - public final NamedList request(final SolrRequest request) throws SolrServerException, IOException { + public final NamedList request(@SuppressWarnings({"rawtypes"})final SolrRequest request) throws SolrServerException, IOException { return request(request, null); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java index 2730ad6d4896..31dcda09217d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java @@ -74,6 +74,7 @@ public enum METHOD { /**If set to true, every request that implements {@link V2RequestSupport} will be converted * to a V2 API call */ + @SuppressWarnings({"rawtypes"}) public SolrRequest setUseV2(boolean flag){ this.usev2 = flag; return this; @@ -81,6 +82,7 @@ public SolrRequest setUseV2(boolean flag){ /**If set to true use javabin instead of json (default) */ + @SuppressWarnings({"rawtypes"}) public SolrRequest setUseBinaryV2(boolean flag){ this.useBinaryV2 = flag; return this; @@ -90,6 +92,7 @@ public SolrRequest setUseBinaryV2(boolean flag){ private String basePath; + @SuppressWarnings({"rawtypes"}) public SolrRequest setBasicAuthCredentials(String user, String password) { this.basicAuthUser = user; this.basicAuthPwd = password; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java index c9f2cc5eb583..43955d83b2a9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java @@ -55,6 +55,7 @@ public void writeMap(EntryWriter ew) throws IOException { } public Exception getException() { + @SuppressWarnings({"rawtypes"}) NamedList exp = (NamedList) getResponse().get("exception"); if (exp == null) { return null; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/V2RequestSupport.java b/solr/solrj/src/java/org/apache/solr/client/solrj/V2RequestSupport.java index 3ff13a6d307c..6e75749948fa 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/V2RequestSupport.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/V2RequestSupport.java @@ -26,5 +26,6 @@ public interface V2RequestSupport { * return V1 request object * */ + @SuppressWarnings({"rawtypes"}) SolrRequest getV2Request(); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java b/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java index 0e0661578fa4..9dc10d85a7c5 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java @@ -37,6 +37,7 @@ */ public class DocumentObjectBinder { + @SuppressWarnings({"rawtypes"}) private final Map> infocache = new ConcurrentHashMap<>(); public DocumentObjectBinder() { @@ -83,6 +84,7 @@ public SolrInputDocument toSolrInputDocument(Object obj) { if (field.dynamicFieldNamePatternMatcher != null && field.get(obj) != null && field.isContainedInMap) { + @SuppressWarnings({"unchecked"}) Map mapValue = (Map) field.get(obj); for (Map.Entry e : mapValue.entrySet()) { @@ -103,6 +105,7 @@ private void addChild(Object obj, DocField field, SolrInputDocument doc) { Object val = field.get(obj); if (val == null) return; if (val instanceof Collection) { + @SuppressWarnings({"rawtypes"}) Collection collection = (Collection) val; for (Object o : collection) { SolrInputDocument child = toSolrInputDocument(o); @@ -116,7 +119,7 @@ private void addChild(Object obj, DocField field, SolrInputDocument doc) { } } - private List getDocFields(Class clazz) { + private List getDocFields(@SuppressWarnings({"rawtypes"})Class clazz) { List fields = infocache.get(clazz); if (fields == null) { synchronized(infocache) { @@ -127,8 +130,9 @@ private List getDocFields(Class clazz) { } @SuppressForbidden(reason = "Needs access to possibly private @Field annotated fields/methods") - private List collectInfo(Class clazz) { + private List collectInfo(@SuppressWarnings({"rawtypes"})Class clazz) { List fields = new ArrayList<>(); + @SuppressWarnings({"rawtypes"}) Class superClazz = clazz; List members = new ArrayList<>(); @@ -159,6 +163,7 @@ private class DocField { private java.lang.reflect.Field field; private Method setter; private Method getter; + @SuppressWarnings({"rawtypes"}) private Class type; private boolean isArray; private boolean isList; @@ -230,6 +235,7 @@ private void storeType() { if (field != null) { type = field.getType(); } else { + @SuppressWarnings({"rawtypes"}) Class[] params = setter.getParameterTypes(); if (params.length != 1) { throw new BindingException("Invalid setter method (" + setter + @@ -325,7 +331,7 @@ private void populateChild(Type typ) { * Returns SolrDocument.getFieldValue for regular fields, * and Map> for a dynamic field. The key is all matching fieldName's. */ - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) private Object getFieldValue(SolrDocument solrDocument) { if (child != null) { List children = solrDocument.getChildDocuments(); @@ -406,6 +412,7 @@ private Object getFieldValue(SolrDocument solrDocument) { } } + @SuppressWarnings({"unchecked", "rawtypes"}) void inject(T obj, SolrDocument sdoc) { Object val = getFieldValue(sdoc); if(val == null) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SolrCloudManager.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SolrCloudManager.java index 4ce87d905c2e..dc844400ed32 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SolrCloudManager.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SolrCloudManager.java @@ -48,7 +48,7 @@ public interface SolrCloudManager extends SolrCloseable { // Solr-like methods - SolrResponse request(SolrRequest req) throws IOException; + SolrResponse request(@SuppressWarnings({"rawtypes"})SolrRequest req) throws IOException; byte[] httpRequest(String url, SolrRequest.METHOD method, Map headers, String payload, int timeout, boolean followRedirects) throws IOException; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AddReplicaSuggester.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AddReplicaSuggester.java index 87b831acb2c0..58ddb5cac1f7 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AddReplicaSuggester.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AddReplicaSuggester.java @@ -31,13 +31,16 @@ class AddReplicaSuggester extends Suggester { + @SuppressWarnings({"rawtypes"}) SolrRequest init() { SolrRequest operation = tryEachNode(true); if (operation == null) operation = tryEachNode(false); return operation; } + @SuppressWarnings({"rawtypes"}) SolrRequest tryEachNode(boolean strict) { + @SuppressWarnings({"unchecked"}) Set> shards = (Set>) hints.getOrDefault(Hint.COLL_SHARD, Collections.emptySet()); if (shards.isEmpty()) { throw new RuntimeException("add-replica requires 'collection' and 'shard'"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java index 366b891e1b3b..300b24a64096 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java @@ -42,6 +42,7 @@ * Bean representation of autoscaling.json, which parses data * lazily. */ +@SuppressWarnings({"overrides"}) public class AutoScalingConfig implements MapWriter { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -58,6 +59,7 @@ public class AutoScalingConfig implements MapWriter { /** * Bean representation of trigger listener config. */ + @SuppressWarnings({"overrides"}) public static class TriggerListenerConfig implements MapWriter { public final String name; public final String trigger; @@ -138,6 +140,11 @@ public boolean equals(Object o) { return properties.equals(that.properties); } +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented"); +// } +// @Override public String toString() { return Utils.toJSONString(this); @@ -147,6 +154,7 @@ public String toString() { /** * Bean representation of trigger config. */ + @SuppressWarnings({"overrides"}) public static class TriggerConfig implements MapWriter { /** Trigger name. */ public final String name; @@ -183,6 +191,7 @@ public TriggerConfig(String name, Map properties) { } enabled = Boolean.parseBoolean(String.valueOf(this.properties.getOrDefault("enabled", "true"))); + @SuppressWarnings({"unchecked"}) List> newActions = (List>)this.properties.get("actions"); if (newActions != null) { this.actions = newActions.stream().map(ActionConfig::new).collect(collectingAndThen(toList(), Collections::unmodifiableList)); @@ -225,6 +234,10 @@ public boolean equals(Object o) { if (event != that.event) return false; return properties.equals(that.properties); } +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented"); +// } @Override public void writeMap(EntryWriter ew) throws IOException { @@ -249,6 +262,7 @@ public String toString() { /** * Bean representation of trigger action configuration. */ + @SuppressWarnings({"overrides"}) public static class ActionConfig implements MapWriter { /** Action name. */ public final String name; @@ -291,6 +305,11 @@ public boolean equals(Object o) { return properties.equals(that.properties); } +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented"); +// } + @Override public String toString() { return Utils.toJSONString(this); @@ -301,6 +320,7 @@ public String toString() { * Construct from bytes that represent a UTF-8 JSON string. * @param utf8 config data */ + @SuppressWarnings({"unchecked"}) public AutoScalingConfig(byte[] utf8) { this(utf8 != null && utf8.length > 0 ? (Map)Utils.fromJSON(utf8) : Collections.emptyMap()); } @@ -362,6 +382,7 @@ public Policy getPolicy() { /** * Get trigger configurations. */ + @SuppressWarnings({"unchecked"}) public Map getTriggerConfigs() { if (triggers == null) { if (jsonMap != null) { @@ -405,6 +426,7 @@ public boolean hasTriggerForEvents(TriggerEventType... types) { /** * Get listener configurations. */ + @SuppressWarnings({"unchecked"}) public Map getTriggerListenerConfigs() { if (listeners == null) { if (jsonMap != null) { @@ -428,6 +450,7 @@ public Map getTriggerListenerConfigs() { public Map getProperties() { if (properties == null) { if (jsonMap != null) { + @SuppressWarnings({"unchecked"}) Map map = (Map) jsonMap.get("properties"); if (map == null) { this.properties = Collections.emptyMap(); @@ -564,11 +587,17 @@ public boolean equals(Object o) { if (!getTriggerListenerConfigs().equals(that.getTriggerListenerConfigs())) return false; return getProperties().equals(that.getProperties()); } +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented"); +// } + private static List getList(String key, Map properties) { return getList(key, properties, null); } + @SuppressWarnings({"unchecked", "rawtypes"}) private static List getList(String key, Map properties, List defaultList) { if (defaultList == null) { defaultList = Collections.emptyList(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java index 68d30b5c886a..b5c956a1197b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java @@ -101,6 +101,7 @@ protected Clause(Clause clause, Function computedValueEvaluat this.nodeSetPresent = nodeSetPresent; } + @SuppressWarnings({"unchecked"}) private Clause(Map m) { derivedFrom = (Clause) m.remove(Clause.class.getName()); this.original = Utils.getDeepCopy(m, 10); @@ -154,6 +155,7 @@ private boolean parseNodeset(Map m) { String key = validateObjectInNodeset(m, (Map) o); parseCondition(key, o, m); } else if (o instanceof List) { + @SuppressWarnings({"rawtypes"}) List l = (List) o; if(l.size()<2) throwExp(m, "nodeset [] must have atleast 2 items"); if( checkMapArray(l, m)) return true; @@ -168,7 +170,8 @@ private boolean parseNodeset(Map m) { return true; } - private String validateObjectInNodeset(Map m, Map map) { + private String validateObjectInNodeset(@SuppressWarnings({"rawtypes"})Map m, + @SuppressWarnings({"rawtypes"})Map map) { if (map.size() != 1) { throwExp(m, "nodeset must only have one and only one key"); } @@ -180,7 +183,8 @@ private String validateObjectInNodeset(Map m, Map map) { return key; } - private boolean checkMapArray(List l, Map m) { + private boolean checkMapArray(@SuppressWarnings({"rawtypes"})List l, Map m) { + @SuppressWarnings({"rawtypes"}) List maps = null; for (Object o : l) { if (o instanceof Map) { @@ -192,7 +196,7 @@ private boolean checkMapArray(List l, Map m) { if (maps != null) { if (maps.size() != l.size()) throwExp(m, "all elements of nodeset must be Objects"); List tags = new ArrayList<>(maps.size()); - for (Map map : maps) { + for (@SuppressWarnings({"rawtypes"})Map map : maps) { String s = validateObjectInNodeset(m, map); if(key == null) key = s; if(!Objects.equals(key, s)){ @@ -222,6 +226,7 @@ private void doPostValidate(Condition... conditions) { } } + @SuppressWarnings({"unchecked"}) public static Clause create(String json) { return create((Map) Utils.fromJSONString(json)); } @@ -276,7 +281,8 @@ public boolean isPerCollectiontag() { return globalTag == null; } - void parseCondition(String s, Object o, Map m) { + @SuppressWarnings({"unchecked"}) + void parseCondition(String s, Object o, @SuppressWarnings({"rawtypes"})Map m) { if (IGNORE_TAGS.contains(s)) return; if (tag != null) { throwExp(m, "Only one tag other than collection, shard, replica is possible"); @@ -401,10 +407,11 @@ Condition parse(String s, Map m) { } } - public static void throwExp(Map clause, String msg, Object... args) { + public static void throwExp(@SuppressWarnings({"rawtypes"})Map clause, String msg, Object... args) { throw new IllegalArgumentException("syntax error in clause :" + toJSONString(clause) + " , msg: " + formatString(msg, args)); } + @SuppressWarnings({"unchecked", "rawtypes"}) private static List readListVal(Map m, List val, Type varType, String conditionName) { List list = val; list = (List) list.stream() @@ -466,6 +473,7 @@ List testGroupNodes(Policy.Session session, double[] deviations) { eval.collName = (String) collection.getValue(); Violation.Ctx ctx = new Violation.Ctx(this, session.matrix, eval); + @SuppressWarnings({"rawtypes"}) Set tags = getUniqueTags(session, eval); if (tags.isEmpty()) return Collections.emptyList(); @@ -505,6 +513,7 @@ List testGroupNodes(Policy.Session session, double[] deviations) { return ctx.allViolations; } + @SuppressWarnings({"unchecked", "rawtypes"}) private Set getUniqueTags(Policy.Session session, ComputedValueEvaluator eval) { Set tags = new HashSet(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DelegatingCloudManager.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DelegatingCloudManager.java index 8f3b08b3116e..aa0e62e20804 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DelegatingCloudManager.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DelegatingCloudManager.java @@ -77,7 +77,7 @@ public TimeSource getTimeSource() { } @Override - public SolrResponse request(SolrRequest req) throws IOException { + public SolrResponse request(@SuppressWarnings({"rawtypes"})SolrRequest req) throws IOException { return delegate.request(req); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DeleteNodeSuggester.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DeleteNodeSuggester.java index cfff49ef5e5b..62bfbdef9824 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DeleteNodeSuggester.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DeleteNodeSuggester.java @@ -33,7 +33,9 @@ public CollectionParams.CollectionAction getAction() { } @Override + @SuppressWarnings({"rawtypes"}) SolrRequest init() { + @SuppressWarnings({"unchecked"}) Set srcNodes = (Set) hints.get(Hint.SRC_NODE); if (srcNodes.isEmpty()) { throw new RuntimeException("delete-node requires 'src_node' hint"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DeleteReplicaSuggester.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DeleteReplicaSuggester.java index 9a942ad2f7da..20f38275007c 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DeleteReplicaSuggester.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/DeleteReplicaSuggester.java @@ -36,7 +36,9 @@ public CollectionParams.CollectionAction getAction() { } @Override + @SuppressWarnings({"rawtypes"}) SolrRequest init() { + @SuppressWarnings({"unchecked"}) Set> shards = (Set>) hints.getOrDefault(Hint.COLL_SHARD, Collections.emptySet()); if (shards.isEmpty()) { throw new RuntimeException("delete-replica requires 'collection' and 'shard'"); @@ -45,6 +47,7 @@ SolrRequest init() { throw new RuntimeException("delete-replica requires exactly one pair of 'collection' and 'shard'"); } Pair collShard = shards.iterator().next(); + @SuppressWarnings({"unchecked"}) Set counts = (Set) hints.getOrDefault(Hint.NUMBER, Collections.emptySet()); Integer count = null; if (!counts.isEmpty()) { @@ -54,6 +57,7 @@ SolrRequest init() { Number n = counts.iterator().next(); count = n.intValue(); } + @SuppressWarnings({"unchecked"}) Set replicas = (Set) hints.getOrDefault(Hint.REPLICA, Collections.emptySet()); String replica = null; if (!replicas.isEmpty()) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/MoveReplicaSuggester.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/MoveReplicaSuggester.java index d529922da391..49be0e3c62db 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/MoveReplicaSuggester.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/MoveReplicaSuggester.java @@ -34,12 +34,14 @@ public class MoveReplicaSuggester extends Suggester { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Override + @SuppressWarnings({"rawtypes"}) SolrRequest init() { SolrRequest operation = tryEachNode(true); if (operation == null) operation = tryEachNode(false); return operation; } + @SuppressWarnings({"rawtypes"}) SolrRequest tryEachNode(boolean strict) { //iterate through elements and identify the least loaded List leastSeriousViolation = null; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/NoneSuggester.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/NoneSuggester.java index 2f6c3691058f..51bfc70e426e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/NoneSuggester.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/NoneSuggester.java @@ -28,11 +28,13 @@ public static NoneSuggester get(Policy.Session session) { } @Override + @SuppressWarnings({"rawtypes"}) SolrRequest init() { return null; } @Override + @SuppressWarnings({"rawtypes"}) public SolrRequest getSuggestion() { return null; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java index 58b72bb01099..dac28c654b1b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Operand.java @@ -80,6 +80,7 @@ public double _delta(double expected, double actual) { IN("", 0) { @Override public TestStatus match(Object ruleVal, Object testVal) { + @SuppressWarnings({"rawtypes"}) List l = (List) ruleVal; return (l.contains(testVal)) ? PASS: FAIL; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java index c137100ccf11..1c494f059e27 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java @@ -70,6 +70,7 @@ * Create a fresh new session for each use * */ +@SuppressWarnings({"overrides"}) public class Policy implements MapWriter { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -80,6 +81,7 @@ public class Policy implements MapWriter { public static final String CLUSTER_POLICY = "cluster-policy"; public static final String CLUSTER_PREFERENCES = "cluster-preferences"; public static final Set GLOBAL_ONLY_TAGS = Set.of("cores", CollectionAdminParams.WITH_COLLECTION); + @SuppressWarnings({"unchecked"}) public static final List DEFAULT_PREFERENCES = Collections.unmodifiableList( Arrays.asList( // NOTE - if you change this, make sure to update the solrcloud-autoscaling-overview.adoc which @@ -286,6 +288,11 @@ public boolean equals(Object o) { return getClusterPreferences().equals(policy.getClusterPreferences()); } +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented"); +// } + public static Map> clausesFromMap(Map>> map, List newParams) { Map> newPolicies = new HashMap<>(); map.forEach((s, l1) -> @@ -328,6 +335,7 @@ static void setApproxValuesAndSortNodes(List clusterPreferences, Lis }); } catch (Exception e) { try { + @SuppressWarnings({"rawtypes"}) Map m = Collections.singletonMap("diagnostics", (MapWriter) ew -> { PolicyHelper.writeNodes(ew, matrixCopy); ew.put("config", matrix.get(0).session.getPolicy()); @@ -382,7 +390,7 @@ static List insertColl(String coll, Collection conditions) { } public Session createSession(SolrCloudManager cloudManager) { - return new Session(cloudManager, this, null); + return createSession(cloudManager, null); } public Session createSession(SolrCloudManager cloudManager, Transaction tx) { @@ -550,15 +558,18 @@ public static class Session implements MapWriter { final SolrCloudManager cloudManager; final List matrix; final NodeStateProvider nodeStateProvider; - final int znodeVersion; - Set collections = new HashSet<>(); + final Set collections; final Policy policy; List expandedClauses; List violations = new ArrayList<>(); Transaction transaction; + /** + * This constructor creates a Session from the current Zookeeper collection, replica and node states. + */ Session(SolrCloudManager cloudManager, Policy policy, Transaction transaction) { + collections = new HashSet<>(); this.transaction = transaction; this.policy = policy; ClusterState state = null; @@ -569,7 +580,6 @@ public static class Session implements MapWriter { } catch (Exception e) { log.trace("-- session created, can't obtain cluster state", e); } - this.znodeVersion = state != null ? state.getZNodeVersion() : -1; this.nodes = new ArrayList<>(cloudManager.getClusterStateProvider().getLiveNodes()); this.cloudManager = cloudManager; for (String node : nodes) { @@ -584,8 +594,10 @@ public static class Session implements MapWriter { //if any collection has 'withCollection' irrespective of the node, the NodeStateProvider returns a map value Map vals = nodeStateProvider.getNodeValues(nodes.get(0), Collections.singleton("withCollection")); if (!vals.isEmpty() && vals.get("withCollection") != null) { + @SuppressWarnings({"unchecked"}) Map withCollMap = (Map) vals.get("withCollection"); if (!withCollMap.isEmpty()) { + @SuppressWarnings({"unchecked"}) Clause withCollClause = new Clause((Map)Utils.fromJSONString("{withCollection:'*' , node: '#ANY'}") , new Condition(NODE.tagName, "#ANY", Operand.EQUAL, null, null), new Condition(WITH_COLLECTION.tagName,"*" , Operand.EQUAL, null, null), true, null, false @@ -607,38 +619,108 @@ public static class Session implements MapWriter { applyRules(); } + /** + * Creates a new Session and updates the Rows in the internal matrix to reference this session. + */ private Session(List nodes, SolrCloudManager cloudManager, - List matrix, List expandedClauses, int znodeVersion, + List matrix, Set collections, List expandedClauses, NodeStateProvider nodeStateProvider, Policy policy, Transaction transaction) { this.transaction = transaction; this.policy = policy; this.nodes = nodes; this.cloudManager = cloudManager; + this.collections = collections; this.matrix = matrix; this.expandedClauses = expandedClauses; - this.znodeVersion = znodeVersion; this.nodeStateProvider = nodeStateProvider; for (Row row : matrix) row.session = this; } + /** + * Given a session (this one), creates a new one for placement simulations that retains all the relevant information, + * whether or not that info already made it to Zookeeper. + */ + public Session cloneToNewSession(SolrCloudManager cloudManager) { + NodeStateProvider nodeStateProvider = cloudManager.getNodeStateProvider(); + ClusterStateProvider clusterStateProvider = cloudManager.getClusterStateProvider(); + + List nodes = new ArrayList<>(clusterStateProvider.getLiveNodes()); + + // Copy all collections from old session, even those not yet in ZK state + Set collections = new HashSet<>(this.collections); + + // (shallow) copy the expanded clauses + List expandedClauses = new ArrayList<>(this.expandedClauses); + + List matrix = new ArrayList<>(nodes.size()); + Map copyNodes = new HashMap<>(); + for (Row oldRow: this.matrix) { + copyNodes.put(oldRow.node, oldRow.copy()); + } + for (String node : nodes) { + // Do we have a row for that node in this session? If yes, reuse without trying to fetch from cluster state (latest changes might not be there) + Row newRow = copyNodes.get(node); + if (newRow == null) { + // Dealing with a node that doesn't exist in this Session. Need to create related data from scratch. + // We pass null for the Session in purpose. The current (this) session in not the correct one for this Row. + // The correct session will be set when we build the new Session instance at the end of this method. + newRow = new Row(node, this.policy.getParams(), this.policy.getPerReplicaAttributes(), null, nodeStateProvider, cloudManager); + // Get info for collections on that node + Set collectionsOnNewNode = nodeStateProvider.getReplicaInfo(node, Collections.emptyList()).keySet(); + collections.addAll(collectionsOnNewNode); + + // Adjust policies to take into account new collections + for (String collection : collectionsOnNewNode) { + // We pass this.policy but it is not modified so will not impact this session being cloned + addClausesForCollection(this.policy, expandedClauses, clusterStateProvider, collection); + } + } + matrix.add(newRow); + } + + if (nodes.size() > 0) { + //if any collection has 'withCollection' irrespective of the node, the NodeStateProvider returns a map value + Map vals = nodeStateProvider.getNodeValues(nodes.get(0), Collections.singleton("withCollection")); + if (!vals.isEmpty() && vals.get("withCollection") != null) { + @SuppressWarnings({"unchecked"}) + Map withCollMap = (Map) vals.get("withCollection"); + if (!withCollMap.isEmpty()) { + @SuppressWarnings({"unchecked"}) + Clause withCollClause = new Clause((Map)Utils.fromJSONString("{withCollection:'*' , node: '#ANY'}") , + new Condition(NODE.tagName, "#ANY", Operand.EQUAL, null, null), + new Condition(WITH_COLLECTION.tagName,"*" , Operand.EQUAL, null, null), true, null, false + ); + expandedClauses.add(withCollClause); + } + } + } + + Collections.sort(expandedClauses); + + Session newSession = new Session(nodes, cloudManager, matrix, collections, expandedClauses, + nodeStateProvider, this.policy, this.transaction); + newSession.applyRules(); + + return newSession; + } void addClausesForCollection(ClusterStateProvider stateProvider, String collection) { addClausesForCollection(policy, expandedClauses, stateProvider, collection); } - public static void addClausesForCollection(Policy policy, List clauses, ClusterStateProvider stateProvider, String c) { - String p = stateProvider.getPolicyNameByCollection(c); + public static void addClausesForCollection(Policy policy, List clauses, ClusterStateProvider stateProvider, String collectionName) { + String p = stateProvider.getPolicyNameByCollection(collectionName); if (p != null) { List perCollPolicy = policy.getPolicies().get(p); if (perCollPolicy == null) { return; } } - clauses.addAll(mergePolicies(c, policy.getPolicies().getOrDefault(p, emptyList()), policy.getClusterPolicy())); + clauses.addAll(mergePolicies(collectionName, policy.getPolicies().getOrDefault(p, emptyList()), policy.getClusterPolicy())); } Session copy() { - return new Session(nodes, cloudManager, getMatrixCopy(), expandedClauses, znodeVersion, nodeStateProvider, policy, transaction); + return new Session(nodes, cloudManager, getMatrixCopy(), new HashSet<>(), expandedClauses, nodeStateProvider, policy, transaction); } public Row getNode(String node) { @@ -648,7 +730,7 @@ public Row getNode(String node) { List getMatrixCopy() { return matrix.stream() - .map(row -> row.copy(this)) + .map(row -> row.copy()) .collect(Collectors.toList()); } @@ -687,7 +769,6 @@ public Suggester getSuggester(CollectionAction action) { @Override public void writeMap(EntryWriter ew) throws IOException { - ew.put("znodeVersion", znodeVersion); for (Row row : matrix) { ew.put(row.node, row); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/PolicyHelper.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/PolicyHelper.java index be2f65d9cb5b..52ad5406f5cd 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/PolicyHelper.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/PolicyHelper.java @@ -75,6 +75,7 @@ public class PolicyHelper { private static final String POLICY_MAPPING_KEY = "PolicyHelper.policyMapping"; + @SuppressWarnings({"unchecked"}) private static ThreadLocal> getPolicyMapping(SolrCloudManager cloudManager) { return (ThreadLocal>) cloudManager.getObjectCache() .computeIfAbsent(POLICY_MAPPING_KEY, k -> new ThreadLocal<>()); @@ -121,17 +122,21 @@ public AutoScalingConfig getAutoScalingConfig() { policyMapping.set(optionalPolicyMapping); SessionWrapper sessionWrapper = null; - Policy.Session origSession = null; + try { try { SESSION_WRAPPPER_REF.set(sessionWrapper = getSession(delegatingManager)); } catch (Exception e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "unable to get autoscaling policy session", e); - } - origSession = sessionWrapper.session; + + Policy.Session origSession = sessionWrapper.session; // new session needs to be created to avoid side-effects from per-collection policies - Policy.Session session = new Policy.Session(delegatingManager, origSession.policy, origSession.transaction); + // TODO: refactor so cluster state cache is separate from storage of policies to avoid per cluster vs per collection interactions + // Need a Session that has all previous history of the original session, NOT filtered by what's present or not in Zookeeper + // (as does constructor Session(SolrCloudManager, Policy, Transaction)). + Policy.Session newSession = origSession.cloneToNewSession(delegatingManager); + Map diskSpaceReqd = new HashMap<>(); try { DocCollection coll = cloudManager.getClusterStateProvider().getCollection(collName); @@ -165,7 +170,7 @@ public AutoScalingConfig getAutoScalingConfig() { int idx = 0; for (Map.Entry e : typeVsCount.entrySet()) { for (int i = 0; i < e.getValue(); i++) { - Suggester suggester = session.getSuggester(ADDREPLICA) + Suggester suggester = newSession.getSuggester(ADDREPLICA) .hint(Hint.REPLICATYPE, e.getKey()) .hint(Hint.COLL_SHARD, new Pair<>(collName, shardName)); if (nodesList != null) { @@ -176,6 +181,7 @@ public AutoScalingConfig getAutoScalingConfig() { if (diskSpaceReqd.get(shardName) != null) { suggester.hint(Hint.MINFREEDISK, diskSpaceReqd.get(shardName)); } + @SuppressWarnings({"rawtypes"}) SolrRequest op = suggester.getSuggestion(); if (op == null) { String errorId = "AutoScaling.error.diagnostics." + System.nanoTime(); @@ -184,18 +190,23 @@ public AutoScalingConfig getAutoScalingConfig() { , handleExp(log, "", () -> Utils.writeJson(getDiagnostics(sessionCopy), new StringWriter(), true).toString())); // logOk throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, " No node can satisfy the rules " + - Utils.toJSONString(Utils.getDeepCopy(session.expandedClauses, 4, true) + " More details from logs in node : " + Utils.toJSONString(Utils.getDeepCopy(newSession.expandedClauses, 4, true) + " More details from logs in node : " + Utils.getMDCNode() + ", errorId : " + errorId)); } - session = suggester.getSession(); + newSession = suggester.getSession(); positions.add(new ReplicaPosition(shardName, ++idx, e.getKey(), op.getParams().get(NODE))); } } } + + // We're happy with the updated session based on the original one, so let's update what the wrapper would hand + // to the next computation that wants a session. + sessionWrapper.update(newSession); } finally { policyMapping.remove(); + // We mark the wrapper (and its session) as being available to others. if (sessionWrapper != null) { - sessionWrapper.returnSession(origSession); + sessionWrapper.returnSession(); } } return positions; @@ -256,6 +267,7 @@ public static List getSuggestions(AutoScalingConfig au ctx.max = max; ctx.session = policy.createSession(cloudManager); String[] t = params == null ? null : params.getParams("type"); + @SuppressWarnings({"unchecked"}) List types = t == null? Collections.EMPTY_LIST: Arrays.asList(t); if(types.isEmpty() || types.contains(violation.name())) { @@ -311,11 +323,13 @@ private static void addMissingReplicas(SolrCloudManager cloudManager, Suggestion )); } + @SuppressWarnings({"unchecked"}) private static void addMissingReplicas(ReplicaCount count, DocCollection coll, String shard, Replica.Type type, Suggestion.Ctx ctx) { int delta = count.delta(coll.getExpectedReplicaCount(type, 0), type); for (; ; ) { if (!ctx.needMore()) return; if (delta >= 0) break; + @SuppressWarnings({"rawtypes"}) SolrRequest suggestion = ctx.addSuggestion( ctx.session.getSuggester(ADDREPLICA) .hint(Hint.REPLICATYPE, type) @@ -600,16 +614,21 @@ public int getRefCount() { */ public void returnSession(Policy.Session session) { this.update(session); + this.returnSession(); + } + + /** + * return this for later use without updating the internal Session for cases where it's easier to update separately + */ + public void returnSession() { refCount.incrementAndGet(); ref.returnSession(this); - } //all ops are executed now it can be destroyed public void release() { refCount.decrementAndGet(); ref.release(this); - } } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Preference.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Preference.java index e5e9599c300d..a464168c79d9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Preference.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Preference.java @@ -29,6 +29,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +@SuppressWarnings({"overrides"}) public class Preference implements MapWriter { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -37,6 +38,7 @@ public class Preference implements MapWriter { final Policy.Sort sort; Preference next; final int idx; + @SuppressWarnings({"rawtypes"}) private final Map original; public Preference(Map m) { @@ -110,6 +112,7 @@ void setApproxVal(List tmpMatrix) { @Override public void writeMap(EntryWriter ew) throws IOException { for (Object o : original.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry e = (Map.Entry) o; ew.put(String.valueOf(e.getKey()), e.getValue()); } @@ -130,6 +133,11 @@ public boolean equals(Object o) { return original.equals(that.original); } +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented"); +// } + public Policy.SortParam getName() { return name; } @@ -142,6 +150,7 @@ public String toString() { /** * @return an unmodifiable copy of the original map from which this object was constructed */ + @SuppressWarnings({"unchecked", "rawtypes"}) public Map getOriginal() { return Collections.unmodifiableMap(original); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaCount.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaCount.java index 8f39b641d5d5..1d40211ee19d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaCount.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaCount.java @@ -24,6 +24,7 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.util.Utils; +@SuppressWarnings({"overrides"}) class ReplicaCount implements MapWriter { long nrt, tlog, pull; @@ -106,6 +107,11 @@ public boolean equals(Object obj) { return false; } +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented"); +// } + @Override public String toString() { return Utils.toJSONString(this); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaInfo.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaInfo.java index 09b8bfa68529..f3a2cde929f1 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaInfo.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaInfo.java @@ -35,7 +35,7 @@ import static org.apache.solr.common.ConditionalMapWriter.dedupeKeyPredicate; import static org.apache.solr.common.cloud.ZkStateReader.LEADER_PROP; - +@SuppressWarnings({"overrides"}) public class ReplicaInfo implements MapWriter { private final String name; private final String core, collection, shard; @@ -75,9 +75,10 @@ public ReplicaInfo(String name, String core, String coll, String shard, Replica. validate(); } + @SuppressWarnings({"unchecked"}) public ReplicaInfo(Map map) { this.name = map.keySet().iterator().next(); - Map details = (Map) map.get(name); + @SuppressWarnings({"rawtypes"})Map details = (Map) map.get(name); details = Utils.getDeepCopy(details, 4); this.collection = (String) details.remove("collection"); this.shard = (String) details.remove("shard"); @@ -208,6 +209,11 @@ public boolean equals(Object o) { } } +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented"); +// } + @Override public String toString() { return Utils.toJSONString(this); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java index 675382a94757..5163d3a0c207 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaVariable.java @@ -156,6 +156,7 @@ public Object computeValue(Policy.Session session, Condition cv, String collecti } } + @SuppressWarnings({"unchecked", "rawtypes"}) private int getNumBuckets(Policy.Session session, Clause clause) { if (clause.getTag().getOperand() == Operand.IN) { return ((Collection) clause.getTag().val).size(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Row.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Row.java index 2cc48ea55eb4..e2b6a55b57bf 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Row.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Row.java @@ -27,12 +27,15 @@ import java.util.List; import java.util.Map; import java.util.Random; +import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; +import org.apache.solr.client.solrj.cloud.NodeStateProvider; +import org.apache.solr.client.solrj.cloud.SolrCloudManager; import org.apache.solr.common.MapWriter; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.ZkStateReader; @@ -57,18 +60,34 @@ public class Row implements MapWriter { boolean anyValueMissing = false; boolean isLive = true; Policy.Session session; + @SuppressWarnings({"rawtypes"}) Map globalCache; + @SuppressWarnings({"rawtypes"}) Map perCollCache; public Row(String node, List> params, List perReplicaAttributes, Policy.Session session) { + this(node, params, perReplicaAttributes, session, session.nodeStateProvider, session.cloudManager); + } + + /** + * Constructor that allows explicitly passing a {@link NodeStateProvider} and a {@link SolrCloudManager} in order not to + * use those obtained through the passed session. + *

    Note the resulting row has a {@link Policy.Session} that may not be consistent with the rest of the Row's state. When rows are copied + * as part of a {@link Policy.Session} copy, the copied rows' sessions are eventually updated in + * {@link org.apache.solr.client.solrj.cloud.autoscaling.Policy.Session#Session(List, SolrCloudManager, List, Set, List, NodeStateProvider, Policy, Policy.Transaction)} + * once the new {@link Policy.Session} instance is available.

    + */ + @SuppressWarnings({"rawtypes"}) + Row(String node, List> params, List perReplicaAttributes, Policy.Session session, + NodeStateProvider nsp, SolrCloudManager cloudManager) { this.session = session; - collectionVsShardVsReplicas = session.nodeStateProvider.getReplicaInfo(node, perReplicaAttributes); + collectionVsShardVsReplicas = nsp.getReplicaInfo(node, perReplicaAttributes); if (collectionVsShardVsReplicas == null) collectionVsShardVsReplicas = new HashMap<>(); this.node = node; cells = new Cell[params.size()]; - isLive = session.cloudManager.getClusterStateProvider().getLiveNodes().contains(node); + isLive = cloudManager.getClusterStateProvider().getLiveNodes().contains(node); List paramNames = params.stream().map(Pair::first).collect(Collectors.toList()); - Map vals = isLive ? session.nodeStateProvider.getNodeValues(node, paramNames) : Collections.emptyMap(); + Map vals = isLive ? nsp.getNodeValues(node, paramNames) : Collections.emptyMap(); for (int i = 0; i < params.size(); i++) { Pair pair = params.get(i); cells[i] = new Cell(i, pair.first(), Clause.validate(pair.first(), vals.get(pair.first()), false), null, pair.second(), this); @@ -80,7 +99,6 @@ public Row(String node, List> params, List p isAlreadyCopied = true; } - public static final Map cacheStats = new HashMap<>(); static class CacheEntry implements MapWriter { @@ -119,6 +137,7 @@ public void forEachShard(String collection, BiConsumer } + @SuppressWarnings({"unchecked"}) public R computeCacheIfAbsent(String cacheName, Function supplier) { R result = (R) globalCache.get(cacheName); if (result != null) { @@ -131,6 +150,7 @@ public R computeCacheIfAbsent(String cacheName, Function supplier } } + @SuppressWarnings({"unchecked", "rawtypes"}) public R computeCacheIfAbsent(String coll, String shard, String cacheName, Object key, Function supplier) { Map collMap = (Map) this.perCollCache.get(coll); if (collMap == null) this.perCollCache.put(coll, collMap = new HashMap()); @@ -150,9 +170,11 @@ public R computeCacheIfAbsent(String coll, String shard, String cacheName, O } - - public Row(String node, Cell[] cells, boolean anyValueMissing, Map>> collectionVsShardVsReplicas, boolean isLive, Policy.Session session, Map perRowCache, Map globalCache) { + public Row(String node, Cell[] cells, boolean anyValueMissing, + @SuppressWarnings({"rawtypes"}) Map>> collectionVsShardVsReplicas, boolean isLive, Policy.Session session, + @SuppressWarnings({"rawtypes"}) Map perRowCache, + @SuppressWarnings({"rawtypes"})Map globalCache) { this.session = session; this.node = node; this.isLive = isLive; @@ -175,7 +197,7 @@ public void writeMap(EntryWriter ew) throws IOException { ew.put("attributes", Arrays.asList(cells)); } - Row copy(Policy.Session session) { + Row copy() { return new Row(node, cells, anyValueMissing, collectionVsShardVsReplicas, isLive, session, this.globalCache, this.perCollCache); } @@ -251,6 +273,7 @@ Row addReplica(String coll, String shard, Replica.Type type, int recursionCount, boolean isAlreadyCopied = false; + @SuppressWarnings({"unchecked", "rawtypes"}) private void lazyCopyReplicas(String coll, String shard) { globalCache = new HashMap(); Map cacheCopy = new HashMap<>(perCollCache); @@ -271,6 +294,7 @@ boolean hasColl(String coll) { return collectionVsShardVsReplicas.containsKey(coll); } + @SuppressWarnings({"unchecked"}) public void createCollShard(Pair collShard) { Map> shardInfo = collectionVsShardVsReplicas.computeIfAbsent(collShard.first(), Utils.NEW_HASHMAP_FUN); if (collShard.second() != null) shardInfo.computeIfAbsent(collShard.second(), Utils.NEW_ARRAYLIST_FUN); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/SplitShardSuggester.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/SplitShardSuggester.java index a244a10dbbf6..b483033c0090 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/SplitShardSuggester.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/SplitShardSuggester.java @@ -37,7 +37,9 @@ public CollectionParams.CollectionAction getAction() { } @Override + @SuppressWarnings({"rawtypes"}) SolrRequest init() { + @SuppressWarnings({"unchecked"}) Set> shards = (Set>) hints.getOrDefault(Hint.COLL_SHARD, Collections.emptySet()); if (shards.isEmpty()) { throw new RuntimeException("split-shard requires 'collection' and 'shard'"); @@ -46,6 +48,7 @@ SolrRequest init() { throw new RuntimeException("split-shard requires exactly one pair of 'collection' and 'shard'"); } Pair collShard = shards.iterator().next(); + @SuppressWarnings({"unchecked"}) Map params = (Map)hints.getOrDefault(Hint.PARAMS, Collections.emptyMap()); Float splitFuzz = (Float)params.get(CommonAdminParams.SPLIT_FUZZ); CollectionAdminRequest.SplitShard req = CollectionAdminRequest.splitShard(collShard.first()).setShardName(collShard.second()); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java index 26f1a9d640f6..28460cdba514 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java @@ -62,6 +62,7 @@ public abstract class Suggester implements MapWriter { protected final EnumMap hints = new EnumMap<>(Hint.class); Policy.Session session; + @SuppressWarnings({"rawtypes"}) SolrRequest operation; boolean force; protected List originalViolations = new ArrayList<>(); @@ -88,6 +89,7 @@ boolean isLessDeviant() { } return false; } + @SuppressWarnings({"unchecked", "rawtypes"}) public Suggester hint(Hint hint, Object value) { hint.validator.accept(value); if (hint.multiValued) { @@ -143,9 +145,10 @@ protected boolean isNodeSuitableForReplicaAddition(Row targetRow, Row srcRow) { return true; } + @SuppressWarnings({"rawtypes"}) abstract SolrRequest init(); - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) public SolrRequest getSuggestion() { if (!isInitialized) { Set collections = (Set) hints.getOrDefault(Hint.COLL, Collections.emptySet()); @@ -226,14 +229,16 @@ List getMatrix() { public static class SuggestionInfo implements MapWriter { Suggestion.Type type; Violation violation; + @SuppressWarnings({"rawtypes"}) SolrRequest operation; - public SuggestionInfo(Violation violation, SolrRequest op, Suggestion.Type type) { + public SuggestionInfo(Violation violation, @SuppressWarnings({"rawtypes"})SolrRequest op, Suggestion.Type type) { this.violation = violation; this.operation = op; this.type = type; } + @SuppressWarnings({"rawtypes"}) public SolrRequest getOperation() { return operation; } @@ -344,6 +349,7 @@ protected boolean isAllowed(Object v, Hint hint) { Object hintVal = hints.get(hint); if (hintVal == null) return true; if (hint.multiValued) { + @SuppressWarnings({"rawtypes"}) Set set = (Set) hintVal; return set == null || set.contains(v); } else { @@ -356,11 +362,13 @@ public enum Hint { // collection shard pair // this should be a Pair , (collection,shard) COLL_SHARD(true, v -> { + @SuppressWarnings({"rawtypes"}) Collection c = v instanceof Collection ? (Collection) v : Collections.singleton(v); for (Object o : c) { if (!(o instanceof Pair)) { throw new RuntimeException("COLL_SHARD hint must use a Pair"); } + @SuppressWarnings({"rawtypes"}) Pair p = (Pair) o; if (p.first() == null || p.second() == null) { throw new RuntimeException("Both collection and shard must not be null"); @@ -371,6 +379,7 @@ public enum Hint { @Override public Object parse(Object v) { if (v instanceof Map) { + @SuppressWarnings({"rawtypes"}) Map map = (Map) v; return Pair.parse(map); } @@ -409,6 +418,7 @@ public Object parse(Object v) { Hint(boolean multiValued) { this(multiValued, v -> { + @SuppressWarnings({"rawtypes"}) Collection c = v instanceof Collection ? (Collection) v : Collections.singleton(v); for (Object o : c) { if (!(o instanceof String)) throw new RuntimeException("hint must be of type String"); @@ -453,6 +463,7 @@ public void writeMap(EntryWriter ew) throws IOException { ew.put("hints", (MapWriter) ew1 -> hints.forEach((hint, o) -> ew1.putNoEx(hint.toString(), o))); } + @SuppressWarnings({"rawtypes"}) protected Collection setupWithCollectionTargetNodes(Set collections, Set> s, String withCollection) { Collection originalTargetNodesCopy = null; if (withCollection != null) { @@ -477,6 +488,7 @@ protected Collection setupWithCollectionTargetNodes(Set collections, Set if (originalTargetNodesCopy != null && !originalTargetNodesCopy.isEmpty()) { // find intersection of the set of target nodes with the set of 'withCollection' nodes + @SuppressWarnings({"unchecked"}) Set set = (Set) hints.computeIfAbsent(Hint.TARGET_NODE, h -> new HashSet<>()); set.retainAll(withCollectionNodes); if (set.isEmpty()) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggestion.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggestion.java index dd83eb4d085d..29fea84c538d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggestion.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggestion.java @@ -42,11 +42,14 @@ static class Ctx { public Policy.Session session; public Violation violation; List suggestions = new ArrayList<>(); + @SuppressWarnings({"rawtypes"}) SolrRequest addSuggestion(Suggester suggester) { return addSuggestion(suggester, Type.violation); } + @SuppressWarnings({"rawtypes"}) SolrRequest addSuggestion(Suggester suggester, Type type) { + @SuppressWarnings({"rawtypes"}) SolrRequest op = suggester.getSuggestion(); if (op != null) { session = suggester.getSession(); @@ -93,6 +96,7 @@ static void suggestNegativeViolations(Suggestion.Ctx ctx, Function, Suggester suggester = ctx.session.getSuggester(MOVEREPLICA) .hint(Suggester.Hint.COLL_SHARD, new Pair<>(ctx.violation.coll, shard)) .forceOperation(true); + @SuppressWarnings({"rawtypes"}) SolrRequest op = ctx.addSuggestion(suggester); if (op == null) continue; totalSuggestions++; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/UnsupportedSuggester.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/UnsupportedSuggester.java index ab5c28b07b61..0ba49ba178f0 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/UnsupportedSuggester.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/UnsupportedSuggester.java @@ -47,12 +47,14 @@ public CollectionParams.CollectionAction getAction() { } @Override + @SuppressWarnings({"rawtypes"}) SolrRequest init() { log.warn("Unsupported suggester for action {} with hings {} - no suggestion available", action, hints); return null; } @Override + @SuppressWarnings({"rawtypes"}) public SolrRequest getSuggestion() { return null; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java index 22fd7a55ef02..4b4510751162 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Variable.java @@ -237,6 +237,7 @@ public enum Type implements Variable { DISKTYPE; public final String tagName; + @SuppressWarnings({"rawtypes"}) public final Class type; public Meta meta; @@ -375,6 +376,7 @@ static Type get(String name) { @interface Meta { String name(); + @SuppressWarnings({"rawtypes"}) Class type(); String[] associatedPerNodeValue() default NULL; @@ -399,6 +401,7 @@ static Type get(String name) { String metricsKey() default NULL; + @SuppressWarnings({"rawtypes"}) Class implementation() default void.class; ComputedType[] computedValues() default ComputedType.NULL; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java index 3001f11ca5c7..746faca3565f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VariableBase.java @@ -98,7 +98,9 @@ public static Type getTagType(String name) { return info; } + @SuppressWarnings({"unchecked"}) static Variable loadImpl(Meta meta, Type t) { + @SuppressWarnings({"rawtypes"}) Class implementation = meta.implementation(); if (implementation == void.class) implementation = VariableBase.class; try { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VersionedData.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VersionedData.java index 2aa4a9b2f23b..8fad7cb1fe79 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VersionedData.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/VersionedData.java @@ -28,6 +28,7 @@ /** * Immutable representation of binary data with version. */ +@SuppressWarnings({"overrides"}) public class VersionedData implements MapWriter { private final int version; private final byte[] data; @@ -91,4 +92,10 @@ public boolean equals(Object o) { Objects.equals(owner, that.owner) && mode == that.mode; } + +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented"); +// } + } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVariable.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVariable.java index db507263b196..8cab9017a259 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVariable.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/WithCollectionVariable.java @@ -42,6 +42,7 @@ public WithCollectionVariable(Type type) { @Override public boolean match(Object inputVal, Operand op, Object val, String name, Row row) { + @SuppressWarnings({"unchecked"}) Map withCollectionMap = (Map) inputVal; if (withCollectionMap == null || withCollectionMap.isEmpty()) return true; @@ -61,6 +62,7 @@ public void projectAddReplica(Cell cell, ReplicaInfo ri, Consumer withCollectionMap = (Map) cell.val; if (withCollectionMap == null || withCollectionMap.isEmpty()) return; @@ -86,6 +88,7 @@ public boolean addViolatingReplicas(Violation.Ctx ctx) { String node = ctx.currentViolation.node; for (Row row : ctx.allRows) { if (node.equals(row.node)) { + @SuppressWarnings({"unchecked"}) Map withCollectionMap = (Map) row.getVal("withCollection"); if (withCollectionMap != null) { row.forEachReplica(r -> { @@ -111,6 +114,7 @@ public void getSuggestions(Suggestion.Ctx ctx) { if (ctx.violation.getViolatingReplicas().isEmpty()) return; Map nodeValues = ctx.session.nodeStateProvider.getNodeValues(ctx.violation.node, Collections.singleton("withCollection")); + @SuppressWarnings({"unchecked"}) Map withCollectionsMap = (Map) nodeValues.get("withCollection"); if (withCollectionsMap == null) return; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java index 73dc6c170d6a..6a38f7c6a955 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java @@ -227,7 +227,7 @@ protected BaseCloudSolrClient(boolean updatesToLeaders, boolean parallelUpdates, this.requestRLTGenerator = new RequestReplicaListTransformerGenerator(); } - /** Sets the cache ttl for DocCollection Objects cached . This is only applicable for collections which are persisted outside of clusterstate.json + /** Sets the cache ttl for DocCollection Objects cached. * @param seconds ttl value in seconds */ public void setCollectionCacheTTl(int seconds){ @@ -463,6 +463,7 @@ public void registerDocCollectionWatcher(String collection, DocCollectionWatcher assertZKStateProvider().zkStateReader.registerDocCollectionWatcher(collection, watcher); } + @SuppressWarnings({"unchecked"}) private NamedList directUpdate(AbstractUpdateRequest request, String collection) throws SolrServerException { UpdateRequest updateRequest = (UpdateRequest) request; SolrParams params = request.getParams(); @@ -522,6 +523,7 @@ private NamedList directUpdate(AbstractUpdateRequest request, String col } final NamedList exceptions = new NamedList<>(); + @SuppressWarnings({"rawtypes"}) final NamedList shardResponses = new NamedList<>(routes.size()+1); // +1 for deleteQuery long start = System.nanoTime(); @@ -612,6 +614,7 @@ private NamedList directUpdate(AbstractUpdateRequest request, String col long end = System.nanoTime(); + @SuppressWarnings({"rawtypes"}) RouteResponse rr = condenseResponse(shardResponses, (int) TimeUnit.MILLISECONDS.convert(end - start, TimeUnit.NANOSECONDS)); rr.setRouteResponses(shardResponses); rr.setRoutes(routes); @@ -670,6 +673,7 @@ private Map> buildUrlMap(DocCollection col, ReplicaListTrans return urlMap; } + @SuppressWarnings({"unchecked", "rawtypes"}) protected T condenseResponse(NamedList response, int timeMillis, Supplier supplier) { T condensed = supplier.get(); int status = 0; @@ -766,18 +770,22 @@ protected T condenseResponse(NamedList response, int t return condensed; } + @SuppressWarnings({"rawtypes"}) public RouteResponse condenseResponse(NamedList response, int timeMillis) { return condenseResponse(response, timeMillis, RouteResponse::new); } + @SuppressWarnings({"rawtypes"}) public static class RouteResponse extends NamedList { + @SuppressWarnings({"rawtypes"}) private NamedList routeResponses; private Map routes; - public void setRouteResponses(NamedList routeResponses) { + public void setRouteResponses(@SuppressWarnings({"rawtypes"})NamedList routeResponses) { this.routeResponses = routeResponses; } + @SuppressWarnings({"rawtypes"}) public NamedList getRouteResponses() { return routeResponses; } @@ -829,7 +837,7 @@ public NamedList getThrowables() { } @Override - public NamedList request(SolrRequest request, String collection) throws SolrServerException, IOException { + public NamedList request(@SuppressWarnings({"rawtypes"})SolrRequest request, String collection) throws SolrServerException, IOException { // the collection parameter of the request overrides that of the parameter to this method String requestCollection = request.getCollection(); if (requestCollection != null) { @@ -847,7 +855,7 @@ public NamedList request(SolrRequest request, String collection) throws * there's a chance that the request will fail due to cached stale state, * which means the state must be refreshed from ZK and retried. */ - protected NamedList requestWithRetryOnStaleState(SolrRequest request, int retryCount, List inputCollections) + protected NamedList requestWithRetryOnStaleState(@SuppressWarnings({"rawtypes"})SolrRequest request, int retryCount, List inputCollections) throws SolrServerException, IOException { connect(); // important to call this before you start working with the ZkStateReader @@ -877,18 +885,16 @@ protected NamedList requestWithRetryOnStaleState(SolrRequest request, in throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Collection not found: " + requestedCollection); } int collVer = coll.getZNodeVersion(); - if (coll.getStateFormat()>1) { - if(requestedCollections == null) requestedCollections = new ArrayList<>(requestedCollectionNames.size()); - requestedCollections.add(coll); + if(requestedCollections == null) requestedCollections = new ArrayList<>(requestedCollectionNames.size()); + requestedCollections.add(coll); - if (stateVerParamBuilder == null) { - stateVerParamBuilder = new StringBuilder(); - } else { - stateVerParamBuilder.append("|"); // hopefully pipe is not an allowed char in a collection name - } - - stateVerParamBuilder.append(coll.getName()).append(":").append(collVer); + if (stateVerParamBuilder == null) { + stateVerParamBuilder = new StringBuilder(); + } else { + stateVerParamBuilder.append("|"); // hopefully pipe is not an allowed char in a collection name } + + stateVerParamBuilder.append(coll.getName()).append(":").append(collVer); } if (stateVerParamBuilder != null) { @@ -913,8 +919,10 @@ protected NamedList requestWithRetryOnStaleState(SolrRequest request, in if(o != null && o instanceof Map) { //remove this because no one else needs this and tests would fail if they are comparing responses resp.remove(resp.size()-1); + @SuppressWarnings({"rawtypes"}) Map invalidStates = (Map) o; for (Object invalidEntries : invalidStates.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry e = (Map.Entry) invalidEntries; getDocCollection((String) e.getKey(), (Integer) e.getValue()); } @@ -1032,7 +1040,7 @@ protected NamedList requestWithRetryOnStaleState(SolrRequest request, in return resp; } - protected NamedList sendRequest(SolrRequest request, List inputCollections) + protected NamedList sendRequest(@SuppressWarnings({"rawtypes"})SolrRequest request, List inputCollections) throws SolrServerException, IOException { connect(); @@ -1207,6 +1215,7 @@ protected DocCollection getDocCollection(String collection, Integer expectedVers //it is readily available just return it return ref.get(); } + @SuppressWarnings({"rawtypes"}) List locks = this.locks; final Object lock = locks.get(Math.abs(Hash.murmurhash3_x86_32(collection, 0, collection.length(), 0) % locks.size())); DocCollection fetchedCol = null; @@ -1226,8 +1235,7 @@ protected DocCollection getDocCollection(String collection, Integer expectedVers cacheEntry.setRetriedAt();//we retried and found that it is the same version cacheEntry.maybeStale = false; } else { - if (fetchedCol.getStateFormat() > 1) - collectionStateCache.put(collection, new ExpiringCachedDocCollection(fetchedCol)); + collectionStateCache.put(collection, new ExpiringCachedDocCollection(fetchedCol)); } return fetchedCol; } @@ -1265,7 +1273,7 @@ public int getMinAchievedReplicationFactor(String collection, NamedList resp) { * the replication factor that was achieved in each shard involved in the request. * For single doc updates, there will be only one shard in the return value. */ - @SuppressWarnings("rawtypes") + @SuppressWarnings({"unchecked", "rawtypes"}) public Map getShardReplicationFactor(String collection, NamedList resp) { connect(); @@ -1285,6 +1293,7 @@ public Map getShardReplicationFactor(String collection, NamedLis } } + @SuppressWarnings({"unchecked"}) Iterator> routeIter = routes.iterator(); while (routeIter.hasNext()) { Map.Entry next = routeIter.next(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpClusterStateProvider.java index 03fb2aabc81a..55fa3b5aea5b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpClusterStateProvider.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpClusterStateProvider.java @@ -44,6 +44,7 @@ import static org.apache.solr.client.solrj.impl.BaseHttpSolrClient.*; +@SuppressWarnings({"unchecked"}) public abstract class BaseHttpClusterStateProvider implements ClusterStateProvider { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -108,7 +109,7 @@ public ClusterState.CollectionRef getState(String collection) { + " solrUrl(s) or zkHost(s)."); } - @SuppressWarnings({"rawtypes", "unchecked"}) + @SuppressWarnings({"rawtypes"}) private ClusterState fetchClusterState(SolrClient client, String collection, Map clusterProperties) throws SolrServerException, IOException, NotACollectionException { ModifiableSolrParams params = new ModifiableSolrParams(); if (collection != null) { @@ -138,8 +139,7 @@ private ClusterState fetchClusterState(SolrClient client, String collection, Map Set liveNodes = new HashSet((List)(cluster.get("live_nodes"))); this.liveNodes = liveNodes; liveNodesTimestamp = System.nanoTime(); - //TODO SOLR-11877 we don't know the znode path; CLUSTER_STATE is probably wrong leading to bad stateFormat - ClusterState cs = ClusterState.load(znodeVersion, collectionsMap, liveNodes, ZkStateReader.CLUSTER_STATE); + ClusterState cs = ClusterState.createFromCollectionMap(znodeVersion, collectionsMap, liveNodes); if (clusterProperties != null) { Map properties = (Map) cluster.get("properties"); if (properties != null) { @@ -180,6 +180,7 @@ public Set getLiveNodes() { } } + @SuppressWarnings({"rawtypes"}) private static Set fetchLiveNodes(SolrClient client) throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", "CLUSTERSTATUS"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpSolrClient.java index 96443201f072..6a5edc96d655 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpSolrClient.java @@ -49,15 +49,16 @@ public RemoteSolrException(String remoteHost, int code, String msg, Throwable th * it sends a proper payload back to the client */ public static class RemoteExecutionException extends RemoteSolrException { + @SuppressWarnings({"rawtypes"}) private NamedList meta; - public RemoteExecutionException(String remoteHost, int code, String msg, NamedList meta) { + public RemoteExecutionException(String remoteHost, int code, String msg, @SuppressWarnings({"rawtypes"})NamedList meta) { super(remoteHost, code, msg, null); this.meta = meta; } - public static RemoteExecutionException create(String host, NamedList errResponse) { + public static RemoteExecutionException create(String host, @SuppressWarnings({"rawtypes"})NamedList errResponse) { Object errObj = errResponse.get("error"); if (errObj != null) { Number code = (Number) getObjectByPath(errObj, true, Collections.singletonList("code")); @@ -71,6 +72,7 @@ public static RemoteExecutionException create(String host, NamedList errResponse } + @SuppressWarnings({"rawtypes"}) public NamedList getMetaData() { return meta; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java index 542c876f0c92..9906ddf4cc26 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java @@ -39,7 +39,7 @@ public class BinaryRequestWriter extends RequestWriter { @Override - public ContentWriter getContentWriter(SolrRequest req) { + public ContentWriter getContentWriter(@SuppressWarnings({"rawtypes"})SolrRequest req) { if (req instanceof UpdateRequest) { UpdateRequest updateRequest = (UpdateRequest) req; if (isEmpty(updateRequest)) return null; @@ -60,7 +60,7 @@ public String getContentType() { } @Override - public Collection getContentStreams(SolrRequest req) throws IOException { + public Collection getContentStreams(@SuppressWarnings({"rawtypes"})SolrRequest req) throws IOException { if (req instanceof UpdateRequest) { UpdateRequest updateRequest = (UpdateRequest) req; if (isEmpty(updateRequest) ) return null; @@ -77,7 +77,7 @@ public String getUpdateContentType() { } @Override - public void write(SolrRequest request, OutputStream os) throws IOException { + public void write(@SuppressWarnings({"rawtypes"})SolrRequest request, OutputStream os) throws IOException { if (request instanceof UpdateRequest) { UpdateRequest updateRequest = (UpdateRequest) request; new JavaBinUpdateRequestCodec().marshal(updateRequest, os); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryResponseParser.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryResponseParser.java index e033abb12a79..b146dcd9ee82 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryResponseParser.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryResponseParser.java @@ -45,6 +45,7 @@ public String getWriterType() { } @Override + @SuppressWarnings({"unchecked"}) public NamedList processResponse(InputStream body, String encoding) { try { return (NamedList) createCodec().unmarshal(body); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java index 24748caba5f9..cb9e7c2e80c7 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java @@ -135,7 +135,7 @@ public void setParallelUpdates(boolean parallelUpdates) { * @deprecated since Solr 8.0 */ @Deprecated - public RouteResponse condenseResponse(NamedList response, int timeMillis) { + public RouteResponse condenseResponse(@SuppressWarnings({"rawtypes"})NamedList response, int timeMillis) { return condenseResponse(response, timeMillis, RouteResponse::new); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java index a7ce278e2c00..fcd9e29e9f02 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java @@ -89,6 +89,7 @@ default DocCollection getCollection(String name) throws IOException{ * Obtain a cluster property, or the default value if it doesn't exist. */ default T getClusterProperty(String key, T defaultValue) { + @SuppressWarnings({"unchecked"}) T value = (T) getClusterProperties().get(key); if (value == null) return defaultValue; @@ -98,6 +99,7 @@ default T getClusterProperty(String key, T defaultValue) { /** * Obtain a cluster property, or null if it doesn't exist. */ + @SuppressWarnings({"unchecked"}) default T getClusterProperty(String propertyName) { return (T) getClusterProperties().get(propertyName); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java index ea0f773edc8b..a1334cbf1c37 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java @@ -209,6 +209,7 @@ public void run() { // Pull from the queue multiple times and streams over a single connection. // Exits on exception, interruption, or an empty queue to pull from. // + @SuppressWarnings({"unchecked"}) void sendUpdateStream() throws Exception { try { @@ -329,7 +330,7 @@ private void notifyQueueAndRunnersIfEmptyQueue() { // *must* be called with runners monitor held, e.g. synchronized(runners){ addRunner() } private void addRunner() { - MDC.put("ConcurrentUpdateHttp2SolrClient.url", client.getBaseURL()); + MDC.put("ConcurrentUpdateHttp2SolrClient.url", String.valueOf(client.getBaseURL())); // MDC can't have null value try { Runner r = new Runner(); runners.add(r); @@ -345,7 +346,7 @@ private void addRunner() { } @Override - public NamedList request(final SolrRequest request, String collection) + public NamedList request(@SuppressWarnings({"rawtypes"})final SolrRequest request, String collection) throws SolrServerException, IOException { if (!(request instanceof UpdateRequest)) { request.setBasePath(basePath); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java index c95cbd7991b4..2d0b75d3940a 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java @@ -171,6 +171,7 @@ public void setQueryParams(Set queryParams) { /** * Opens a connection and sends everything... */ + @SuppressWarnings({"unchecked"}) class Runner implements Runnable { volatile Thread thread = null; volatile boolean inPoll = false; @@ -225,6 +226,7 @@ public void interruptPoll() { // Pull from the queue multiple times and streams over a single connection. // Exits on exception, interruption, or an empty queue to pull from. // + @SuppressWarnings({"unchecked"}) void sendUpdateStream() throws Exception { while (!queue.isEmpty()) { @@ -481,7 +483,7 @@ public void setCollection(String collection) { } @Override - public NamedList request(final SolrRequest request, String collection) + public NamedList request(@SuppressWarnings({"rawtypes"})final SolrRequest request, String collection) throws SolrServerException, IOException { if (!(request instanceof UpdateRequest)) { return client.request(request, collection); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/DelegationTokenHttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/DelegationTokenHttpSolrClient.java index 1b51f3bd6f27..25792aed65be 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/DelegationTokenHttpSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/DelegationTokenHttpSolrClient.java @@ -88,7 +88,7 @@ protected DelegationTokenHttpSolrClient(String baseURL, } @Override - protected HttpRequestBase createMethod(final SolrRequest request, String collection) throws IOException, SolrServerException { + protected HttpRequestBase createMethod(@SuppressWarnings({"rawtypes"})final SolrRequest request, String collection) throws IOException, SolrServerException { SolrParams params = request.getParams(); if (params != null && params.getParams(DELEGATION_TOKEN_PARAM) != null) { throw new IllegalArgumentException(DELEGATION_TOKEN_PARAM + " parameter not supported"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2ClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2ClusterStateProvider.java index 335684a4df5c..1f801c980750 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2ClusterStateProvider.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2ClusterStateProvider.java @@ -22,6 +22,7 @@ import org.apache.solr.client.solrj.SolrClient; +@SuppressWarnings({"unchecked"}) public class Http2ClusterStateProvider extends BaseHttpClusterStateProvider { final Http2SolrClient httpClient; final boolean closeClient; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java index 6a08816265fd..a8b7207a22e3 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java @@ -20,11 +20,11 @@ import java.io.Closeable; import java.io.IOException; import java.io.InputStream; -import java.io.UnsupportedEncodingException; import java.lang.invoke.MethodHandles; import java.net.ConnectException; import java.net.MalformedURLException; import java.net.URL; +import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collection; @@ -69,6 +69,7 @@ import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.ObjectReleaseTracker; import org.apache.solr.common.util.SolrNamedThreadFactory; +import org.apache.solr.common.util.Utils; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.HttpClientTransport; import org.eclipse.jetty.client.ProtocolHandlers; @@ -96,7 +97,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.client.solrj.impl.BaseHttpSolrClient.*; +import static org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteExecutionException; +import static org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException; import static org.apache.solr.common.util.Utils.getObjectByPath; /** @@ -116,7 +118,7 @@ public class Http2SolrClient extends SolrClient { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String AGENT = "Solr[" + Http2SolrClient.class.getName() + "] 2.0"; - private static final String UTF_8 = StandardCharsets.UTF_8.name(); + private static final Charset FALLBACK_CHARSET = StandardCharsets.UTF_8; private static final String DEFAULT_PATH = "/select"; private static final List errPath = Arrays.asList("metadata", "error-class"); @@ -242,7 +244,7 @@ public void close() { assert ObjectReleaseTracker.release(this); } - public boolean isV2ApiRequest(final SolrRequest request) { + public boolean isV2ApiRequest(@SuppressWarnings({"rawtypes"})final SolrRequest request) { return request instanceof V2Request || request.getPath().contains("/____v2"); } @@ -266,7 +268,7 @@ public OutStream(String origCollection, ModifiableSolrParams origParams, this.isXml = isXml; } - boolean belongToThisStream(SolrRequest solrRequest, String collection) { + boolean belongToThisStream(@SuppressWarnings({"rawtypes"})SolrRequest solrRequest, String collection) { ModifiableSolrParams solrParams = new ModifiableSolrParams(solrRequest.getParams()); if (!origParams.toNamedList().equals(solrParams.toNamedList()) || !StringUtils.equals(origCollection, collection)) { return false; @@ -285,7 +287,7 @@ public void flush() throws IOException { @Override public void close() throws IOException { if (isXml) { - write("".getBytes(StandardCharsets.UTF_8)); + write("".getBytes(FALLBACK_CHARSET)); } this.outProvider.getOutputStream().close(); } @@ -329,12 +331,12 @@ public OutStream initOutStream(String baseUrl, OutStream outStream = new OutStream(collection, origParams, provider, responseListener, isXml); if (isXml) { - outStream.write("".getBytes(StandardCharsets.UTF_8)); + outStream.write("".getBytes(FALLBACK_CHARSET)); } return outStream; } - public void send(OutStream outStream, SolrRequest req, String collection) throws IOException { + public void send(OutStream outStream, @SuppressWarnings({"rawtypes"})SolrRequest req, String collection) throws IOException { assert outStream.belongToThisStream(req, collection); this.requestWriter.write(req, outStream.outProvider.getOutputStream()); if (outStream.isXml) { @@ -351,7 +353,7 @@ public void send(OutStream outStream, SolrRequest req, String collection) throws byte[] content = String.format(Locale.ROOT, fmt, params.getBool(UpdateParams.WAIT_SEARCHER, false) + "") - .getBytes(StandardCharsets.UTF_8); + .getBytes(FALLBACK_CHARSET); outStream.write(content); } } @@ -359,7 +361,7 @@ public void send(OutStream outStream, SolrRequest req, String collection) throws outStream.flush(); } - public NamedList request(SolrRequest solrRequest, + public NamedList request(@SuppressWarnings({"rawtypes"})SolrRequest solrRequest, String collection, OnComplete onComplete) throws IOException, SolrServerException { Request req = makeRequest(solrRequest, collection); @@ -383,7 +385,7 @@ public void onComplete(Result result) { InputStream is = getContentAsInputStream(); assert ObjectReleaseTracker.track(is); rsp = processErrorsAndResponse(result.getResponse(), - parser, is, getEncoding(), isV2ApiRequest(solrRequest)); + parser, is, getMediaType(), getEncoding(), isV2ApiRequest(solrRequest)); onComplete.onSuccess(rsp); } catch (Exception e) { onComplete.onFailure(e); @@ -398,7 +400,15 @@ public void onComplete(Result result) { Response response = listener.get(idleTimeout, TimeUnit.MILLISECONDS); InputStream is = listener.getInputStream(); assert ObjectReleaseTracker.track(is); - return processErrorsAndResponse(response, parser, is, getEncoding(response), isV2ApiRequest(solrRequest)); + + ContentType contentType = getContentType(response); + String mimeType = null; + String encoding = null; + if (contentType != null) { + mimeType = contentType.getMimeType(); + encoding = contentType.getCharset() != null? contentType.getCharset().name() : null; + } + return processErrorsAndResponse(response, parser, is, mimeType, encoding, isV2ApiRequest(solrRequest)); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); @@ -421,43 +431,27 @@ public void onComplete(Result result) { } } - private String getEncoding(Response response) { + private ContentType getContentType(Response response) { String contentType = response.getHeaders().get(HttpHeader.CONTENT_TYPE); - if (contentType != null) { - String charset = "charset="; - int index = contentType.toLowerCase(Locale.ENGLISH).indexOf(charset); - if (index > 0) { - String encoding = contentType.substring(index + charset.length()); - // Sometimes charsets arrive with an ending semicolon. - int semicolon = encoding.indexOf(';'); - if (semicolon > 0) - encoding = encoding.substring(0, semicolon).trim(); - // Sometimes charsets are quoted. - int lastIndex = encoding.length() - 1; - if (encoding.charAt(0) == '"' && encoding.charAt(lastIndex) == '"') - encoding = encoding.substring(1, lastIndex).trim(); - return encoding; - } - } - return null; + return StringUtils.isEmpty(contentType)? null : ContentType.parse(contentType); } - private void setBasicAuthHeader(SolrRequest solrRequest, Request req) { + private void setBasicAuthHeader(@SuppressWarnings({"rawtypes"})SolrRequest solrRequest, Request req) { if (solrRequest.getBasicAuthUser() != null && solrRequest.getBasicAuthPassword() != null) { String userPass = solrRequest.getBasicAuthUser() + ":" + solrRequest.getBasicAuthPassword(); - String encoded = Base64.byteArrayToBase64(userPass.getBytes(StandardCharsets.UTF_8)); + String encoded = Base64.byteArrayToBase64(userPass.getBytes(FALLBACK_CHARSET)); req.header("Authorization", "Basic " + encoded); } } - private Request makeRequest(SolrRequest solrRequest, String collection) + private Request makeRequest(@SuppressWarnings({"rawtypes"})SolrRequest solrRequest, String collection) throws SolrServerException, IOException { Request req = createRequest(solrRequest, collection); decorateRequest(req, solrRequest); return req; } - private void decorateRequest(Request req, SolrRequest solrRequest) { + private void decorateRequest(Request req, @SuppressWarnings({"rawtypes"})SolrRequest solrRequest) { req.header(HttpHeader.ACCEPT_ENCODING, null); if (solrRequest.getUserPrincipal() != null) { req.attribute(REQ_PRINCIPAL_KEY, solrRequest.getUserPrincipal()); @@ -471,6 +465,7 @@ private void decorateRequest(Request req, SolrRequest solrRequest) { req.onComplete(listener); } + @SuppressWarnings({"unchecked"}) Map headers = solrRequest.getHeaders(); if (headers != null) { for (Map.Entry entry : headers.entrySet()) { @@ -485,7 +480,8 @@ private String changeV2RequestEndpoint(String basePath) throws MalformedURLExcep return new URL(oldURL.getProtocol(), oldURL.getHost(), oldURL.getPort(), newPath).toString(); } - private Request createRequest(SolrRequest solrRequest, String collection) throws IOException, SolrServerException { + @SuppressWarnings({"unchecked"}) + private Request createRequest(@SuppressWarnings({"rawtypes"})SolrRequest solrRequest, String collection) throws IOException, SolrServerException { if (solrRequest.getBasePath() == null && serverBaseUrl == null) throw new IllegalArgumentException("Destination node is not provided!"); @@ -626,7 +622,7 @@ private Request fillContentStream(Request req, Collection streams } } } - req.content(new FormContentProvider(fields, StandardCharsets.UTF_8)); + req.content(new FormContentProvider(fields, FALLBACK_CHARSET)); } return req; @@ -636,9 +632,11 @@ private boolean wantStream(final ResponseParser processor) { return processor == null || processor instanceof InputStreamResponseParser; } + @SuppressWarnings({"unchecked", "rawtypes"}) private NamedList processErrorsAndResponse(Response response, final ResponseParser processor, InputStream is, + String mimeType, String encoding, final boolean isV2Api) throws SolrServerException { @@ -647,10 +645,6 @@ private NamedList processErrorsAndResponse(Response response, // handle some http level checks before trying to parse the response int httpStatus = response.getStatus(); - String contentType; - contentType = response.getHeaders().get("content-type"); - if (contentType == null) contentType = ""; - switch (httpStatus) { case HttpStatus.SC_OK: case HttpStatus.SC_BAD_REQUEST: @@ -664,7 +658,7 @@ private NamedList processErrorsAndResponse(Response response, } break; default: - if (processor == null || "".equals(contentType)) { + if (processor == null || mimeType == null) { throw new RemoteSolrException(serverBaseUrl, httpStatus, "non ok status: " + httpStatus + ", message:" + response.getReason(), null); @@ -683,14 +677,14 @@ private NamedList processErrorsAndResponse(Response response, String procCt = processor.getContentType(); if (procCt != null) { String procMimeType = ContentType.parse(procCt).getMimeType().trim().toLowerCase(Locale.ROOT); - String mimeType = ContentType.parse(contentType).getMimeType().trim().toLowerCase(Locale.ROOT); if (!procMimeType.equals(mimeType)) { // unexpected mime type String msg = "Expected mime type " + procMimeType + " but got " + mimeType + "."; + String exceptionEncoding = encoding != null? encoding : FALLBACK_CHARSET.name(); try { - msg = msg + " " + IOUtils.toString(is, encoding); + msg = msg + " " + IOUtils.toString(is, exceptionEncoding); } catch (IOException e) { - throw new RemoteSolrException(serverBaseUrl, httpStatus, "Could not parse response with encoding " + encoding, e); + throw new RemoteSolrException(serverBaseUrl, httpStatus, "Could not parse response with encoding " + exceptionEncoding, e); } throw new RemoteSolrException(serverBaseUrl, httpStatus, msg, null); } @@ -711,13 +705,24 @@ private NamedList processErrorsAndResponse(Response response, NamedList metadata = null; String reason = null; try { - NamedList err = (NamedList) rsp.get("error"); - if (err != null) { - reason = (String) err.get("msg"); - if (reason == null) { - reason = (String) err.get("trace"); + if (error != null) { + reason = (String) Utils.getObjectByPath(error, false, Collections.singletonList("msg")); + if(reason == null) { + reason = (String) Utils.getObjectByPath(error, false, Collections.singletonList("trace")); + } + Object metadataObj = Utils.getObjectByPath(error, false, Collections.singletonList("metadata")); + if (metadataObj instanceof NamedList) { + metadata = (NamedList) metadataObj; + } else if (metadataObj instanceof List) { + // NamedList parsed as List convert to NamedList again + List list = (List) metadataObj; + metadata = new NamedList<>(list.size()/2); + for (int i = 0; i < list.size(); i+=2) { + metadata.add((String)list.get(i), (String) list.get(i+1)); + } + } else if (metadataObj instanceof Map) { + metadata = new NamedList((Map) metadataObj); } - metadata = (NamedList) err.get("metadata"); } } catch (Exception ex) {} if (reason == null) { @@ -726,10 +731,7 @@ private NamedList processErrorsAndResponse(Response response, .append("\n\n") .append("request: ") .append(response.getRequest().getMethod()); - try { - reason = java.net.URLDecoder.decode(msg.toString(), UTF_8); - } catch (UnsupportedEncodingException e) { - } + reason = java.net.URLDecoder.decode(msg.toString(), FALLBACK_CHARSET); } RemoteSolrException rss = new RemoteSolrException(serverBaseUrl, httpStatus, reason, null); if (metadata != null) rss.setMetadata(metadata); @@ -749,7 +751,7 @@ private NamedList processErrorsAndResponse(Response response, } @Override - public NamedList request(SolrRequest request, String collection) throws SolrServerException, IOException { + public NamedList request(@SuppressWarnings({"rawtypes"})SolrRequest request, String collection) throws SolrServerException, IOException { return request(request, collection, null); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java index 07fd8f826ba9..947e4e7b0108 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java @@ -22,6 +22,7 @@ import org.apache.http.client.HttpClient; import org.apache.solr.client.solrj.SolrClient; +@SuppressWarnings({"unchecked"}) public class HttpClusterStateProvider extends BaseHttpClusterStateProvider { private final HttpClient httpClient; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java index 32c192d7c2e1..78106295e19a 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java @@ -25,6 +25,7 @@ import java.net.MalformedURLException; import java.net.SocketTimeoutException; import java.net.URL; +import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.security.Principal; import java.util.Arrays; @@ -67,7 +68,6 @@ import org.apache.http.entity.mime.content.StringBody; import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicNameValuePair; -import org.apache.http.util.EntityUtils; import org.apache.solr.client.solrj.ResponseParser; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; @@ -95,7 +95,7 @@ */ public class HttpSolrClient extends BaseHttpSolrClient { - private static final String UTF_8 = StandardCharsets.UTF_8.name(); + private static final Charset FALLBACK_CHARSET = StandardCharsets.UTF_8; private static final String DEFAULT_PATH = "/select"; private static final long serialVersionUID = -946812319974801896L; @@ -239,7 +239,7 @@ public void setQueryParams(Set queryParams) { * org.apache.solr.client.solrj.ResponseParser) */ @Override - public NamedList request(final SolrRequest request, String collection) + public NamedList request(@SuppressWarnings({"rawtypes"})final SolrRequest request, String collection) throws SolrServerException, IOException { ResponseParser responseParser = request.getResponseParser(); if (responseParser == null) { @@ -248,15 +248,16 @@ public NamedList request(final SolrRequest request, String collection) return request(request, responseParser, collection); } - public NamedList request(final SolrRequest request, final ResponseParser processor) throws SolrServerException, IOException { + public NamedList request(@SuppressWarnings({"rawtypes"})final SolrRequest request, final ResponseParser processor) throws SolrServerException, IOException { return request(request, processor, null); } - public NamedList request(final SolrRequest request, final ResponseParser processor, String collection) + public NamedList request(@SuppressWarnings({"rawtypes"})final SolrRequest request, final ResponseParser processor, String collection) throws SolrServerException, IOException { HttpRequestBase method = createMethod(request, collection); setBasicAuthHeader(request, method); if (request.getHeaders() != null) { + @SuppressWarnings({"unchecked"}) Map headers = request.getHeaders(); for (Map.Entry entry : headers.entrySet()) { method.setHeader(entry.getKey(), entry.getValue()); @@ -265,14 +266,14 @@ public NamedList request(final SolrRequest request, final ResponseParser return executeMethod(method, request.getUserPrincipal(), processor, isV2ApiRequest(request)); } - private boolean isV2ApiRequest(final SolrRequest request) { + private boolean isV2ApiRequest(@SuppressWarnings({"rawtypes"})final SolrRequest request) { return request instanceof V2Request || request.getPath().contains("/____v2"); } - private void setBasicAuthHeader(SolrRequest request, HttpRequestBase method) throws UnsupportedEncodingException { + private void setBasicAuthHeader(@SuppressWarnings({"rawtypes"})SolrRequest request, HttpRequestBase method) throws UnsupportedEncodingException { if (request.getBasicAuthUser() != null && request.getBasicAuthPassword() != null) { String userPass = request.getBasicAuthUser() + ":" + request.getBasicAuthPassword(); - String encoded = Base64.byteArrayToBase64(userPass.getBytes(UTF_8)); + String encoded = Base64.byteArrayToBase64(userPass.getBytes(FALLBACK_CHARSET)); method.setHeader(new BasicHeader("Authorization", "Basic " + encoded)); } } @@ -288,7 +289,7 @@ public static class HttpUriRequestResponse { /** * @lucene.experimental */ - public HttpUriRequestResponse httpUriRequest(final SolrRequest request) + public HttpUriRequestResponse httpUriRequest(@SuppressWarnings({"rawtypes"})final SolrRequest request) throws SolrServerException, IOException { ResponseParser responseParser = request.getResponseParser(); if (responseParser == null) { @@ -300,7 +301,7 @@ public HttpUriRequestResponse httpUriRequest(final SolrRequest request) /** * @lucene.experimental */ - public HttpUriRequestResponse httpUriRequest(final SolrRequest request, final ResponseParser processor) throws SolrServerException, IOException { + public HttpUriRequestResponse httpUriRequest(@SuppressWarnings({"rawtypes"})final SolrRequest request, final ResponseParser processor) throws SolrServerException, IOException { HttpUriRequestResponse mrr = new HttpUriRequestResponse(); final HttpRequestBase method = createMethod(request, null); ExecutorService pool = ExecutorUtil.newMDCAwareFixedThreadPool(1, new SolrNamedThreadFactory("httpUriRequest")); @@ -340,7 +341,8 @@ static String changeV2RequestEndpoint(String basePath) throws MalformedURLExcept return new URL(oldURL.getProtocol(), oldURL.getHost(), oldURL.getPort(), newPath).toString(); } - protected HttpRequestBase createMethod(SolrRequest request, String collection) throws IOException, SolrServerException { + @SuppressWarnings({"unchecked"}) + protected HttpRequestBase createMethod(@SuppressWarnings({"rawtypes"})SolrRequest request, String collection) throws IOException, SolrServerException { if (request instanceof V2RequestSupport) { request = ((V2RequestSupport) request).getV2Request(); } @@ -474,7 +476,11 @@ public boolean isRepeatable() { } - private HttpEntityEnclosingRequestBase fillContentStream(SolrRequest request, Collection streams, ModifiableSolrParams wparams, boolean isMultipart, LinkedList postOrPutParams, String fullQueryUrl) throws IOException { + private HttpEntityEnclosingRequestBase fillContentStream( + @SuppressWarnings({"rawtypes"})SolrRequest request, + Collection streams, ModifiableSolrParams wparams, + boolean isMultipart, LinkedList postOrPutParams, + String fullQueryUrl) throws IOException { HttpEntityEnclosingRequestBase postOrPut = SolrRequest.METHOD.POST == request.getMethod() ? new HttpPost(fullQueryUrl) : new HttpPut(fullQueryUrl); @@ -533,6 +539,7 @@ private HttpEntityEnclosingRequestBase fillContentStream(SolrRequest request, Co private static final List errPath = Arrays.asList("metadata", "error-class");//Utils.getObjectByPath(err, false,"metadata/error-class") + @SuppressWarnings({"unchecked", "rawtypes"}) protected NamedList executeMethod(HttpRequestBase method, Principal userPrincipal, final ResponseParser processor, final boolean isV2Api) throws SolrServerException { method.addHeader("User-Agent", AGENT); @@ -568,12 +575,18 @@ protected NamedList executeMethod(HttpRequestBase method, Principal user // Read the contents entity = response.getEntity(); respBody = entity.getContent(); - Header ctHeader = response.getLastHeader("content-type"); - String contentType; - if (ctHeader != null) { - contentType = ctHeader.getValue(); - } else { - contentType = ""; + String mimeType = null; + Charset charset = null; + String charsetName = null; + + ContentType contentType = ContentType.get(entity); + if (contentType != null) { + mimeType = contentType.getMimeType().trim().toLowerCase(Locale.ROOT); + charset = contentType.getCharset(); + + if (charset != null) { + charsetName = charset.name(); + } } // handle some http level checks before trying to parse the response @@ -590,7 +603,7 @@ protected NamedList executeMethod(HttpRequestBase method, Principal user } break; default: - if (processor == null || "".equals(contentType)) { + if (processor == null || contentType == null) { throw new RemoteSolrException(baseUrl, httpStatus, "non ok status: " + httpStatus + ", message:" + response.getStatusLine().getReasonPhrase(), null); @@ -606,34 +619,26 @@ protected NamedList executeMethod(HttpRequestBase method, Principal user shouldClose = false; return rsp; } - + String procCt = processor.getContentType(); if (procCt != null) { String procMimeType = ContentType.parse(procCt).getMimeType().trim().toLowerCase(Locale.ROOT); - String mimeType = ContentType.parse(contentType).getMimeType().trim().toLowerCase(Locale.ROOT); if (!procMimeType.equals(mimeType)) { // unexpected mime type String msg = "Expected mime type " + procMimeType + " but got " + mimeType + "."; - Header encodingHeader = response.getEntity().getContentEncoding(); - String encoding; - if (encodingHeader != null) { - encoding = encodingHeader.getValue(); - } else { - encoding = "UTF-8"; // try UTF-8 - } + Charset exceptionCharset = charset != null? charset : FALLBACK_CHARSET; try { - msg = msg + " " + IOUtils.toString(respBody, encoding); + msg = msg + " " + IOUtils.toString(respBody, exceptionCharset); } catch (IOException e) { - throw new RemoteSolrException(baseUrl, httpStatus, "Could not parse response with encoding " + encoding, e); + throw new RemoteSolrException(baseUrl, httpStatus, "Could not parse response with encoding " + exceptionCharset, e); } throw new RemoteSolrException(baseUrl, httpStatus, msg, null); } } NamedList rsp = null; - String charset = EntityUtils.getContentCharSet(response.getEntity()); try { - rsp = processor.processResponse(respBody, charset); + rsp = processor.processResponse(respBody, charsetName); } catch (Exception e) { throw new RemoteSolrException(baseUrl, httpStatus, e.getMessage(), e); } @@ -645,13 +650,24 @@ protected NamedList executeMethod(HttpRequestBase method, Principal user NamedList metadata = null; String reason = null; try { - NamedList err = (NamedList) rsp.get("error"); - if (err != null) { - reason = (String) err.get("msg"); + if (error != null) { + reason = (String) Utils.getObjectByPath(error, false, Collections.singletonList("msg")); if(reason == null) { - reason = (String) err.get("trace"); + reason = (String) Utils.getObjectByPath(error, false, Collections.singletonList("trace")); + } + Object metadataObj = Utils.getObjectByPath(error, false, Collections.singletonList("metadata")); + if (metadataObj instanceof NamedList) { + metadata = (NamedList) metadataObj; + } else if (metadataObj instanceof List) { + // NamedList parsed as List convert to NamedList again + List list = (List) metadataObj; + metadata = new NamedList<>(list.size()/2); + for (int i = 0; i < list.size(); i+=2) { + metadata.add((String)list.get(i), (String) list.get(i+1)); + } + } else if (metadataObj instanceof Map) { + metadata = new NamedList((Map) metadataObj); } - metadata = (NamedList)err.get("metadata"); } } catch (Exception ex) {} if (reason == null) { @@ -660,7 +676,7 @@ protected NamedList executeMethod(HttpRequestBase method, Principal user .append("\n\n") .append("request: ") .append(method.getURI()); - reason = java.net.URLDecoder.decode(msg.toString(), UTF_8); + reason = java.net.URLDecoder.decode(msg.toString(), FALLBACK_CHARSET); } RemoteSolrException rss = new RemoteSolrException(baseUrl, httpStatus, reason, null); if (metadata != null) rss.setMetadata(metadata); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java index a6b121775194..6c4b44a24ca9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java @@ -208,6 +208,7 @@ private static class SolrJaasConfiguration extends javax.security.auth.login.Con private javax.security.auth.login.Configuration baseConfig; // the com.sun.security.jgss appNames + @SuppressWarnings({"unchecked", "rawtypes"}) private Set initiateAppNames = new HashSet( Arrays.asList("com.sun.security.jgss.krb5.initiate", "com.sun.security.jgss.initiate")); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java index 63add2c5f561..bc4efbbf9f40 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java @@ -80,11 +80,11 @@ public class LBHttpSolrClient extends LBSolrClient { */ @Deprecated public static class Req extends LBSolrClient.Req { - public Req(SolrRequest request, List servers) { + public Req(@SuppressWarnings({"rawtypes"})SolrRequest request, List servers) { super(request, servers); } - public Req(SolrRequest request, List servers, Integer numServersToTry) { + public Req(@SuppressWarnings({"rawtypes"})SolrRequest request, List servers, Integer numServersToTry) { super(request, servers, numServersToTry); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java index 176f07d32af7..1654e32a595e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java @@ -138,22 +138,24 @@ public boolean equals(Object obj) { public static class Req { + @SuppressWarnings({"rawtypes"}) protected SolrRequest request; protected List servers; protected int numDeadServersToTry; private final Integer numServersToTry; - public Req(SolrRequest request, List servers) { + public Req(@SuppressWarnings({"rawtypes"})SolrRequest request, List servers) { this(request, servers, null); } - public Req(SolrRequest request, List servers, Integer numServersToTry) { + public Req(@SuppressWarnings({"rawtypes"})SolrRequest request, List servers, Integer numServersToTry) { this.request = request; this.servers = servers; this.numDeadServersToTry = servers.size(); this.numServersToTry = numServersToTry; } + @SuppressWarnings({"rawtypes"}) public SolrRequest getRequest() { return request; } @@ -349,7 +351,7 @@ else if (skipped.size() < numDeadServersToTry) { /** * @return time allowed in nanos, returns -1 if no time_allowed is specified. */ - private long getTimeAllowedInNanos(final SolrRequest req) { + private long getTimeAllowedInNanos(@SuppressWarnings({"rawtypes"})final SolrRequest req) { SolrParams reqParams = req.getParams(); return reqParams == null ? -1 : TimeUnit.NANOSECONDS.convert(reqParams.getInt(CommonParams.TIME_ALLOWED, -1), TimeUnit.MILLISECONDS); @@ -576,12 +578,12 @@ public String removeSolrServer(String server) { * @throws IOException If there is a low-level I/O error. */ @Override - public NamedList request(final SolrRequest request, String collection) + public NamedList request(@SuppressWarnings({"rawtypes"})final SolrRequest request, String collection) throws SolrServerException, IOException { return request(request, collection, null); } - public NamedList request(final SolrRequest request, String collection, + public NamedList request(@SuppressWarnings({"rawtypes"})final SolrRequest request, String collection, final Integer numServersToTry) throws SolrServerException, IOException { Exception ex = null; ServerWrapper[] serverList = aliveServerList; @@ -678,7 +680,8 @@ public NamedList request(final SolrRequest request, String collection, * @param request the request will be sent to the picked server * @return the picked server */ - protected ServerWrapper pickServer(ServerWrapper[] aliveServerList, SolrRequest request) { + protected ServerWrapper pickServer(ServerWrapper[] aliveServerList, + @SuppressWarnings({"rawtypes"})SolrRequest request) { int count = counter.incrementAndGet() & Integer.MAX_VALUE; return aliveServerList[count % aliveServerList.length]; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientCloudManager.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientCloudManager.java index fcefc2f0c31c..5ad7ff49272e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientCloudManager.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientCloudManager.java @@ -61,22 +61,36 @@ public class SolrClientCloudManager implements SolrCloudManager { private final ZkStateReader zkStateReader; private final SolrZkClient zkClient; private final ObjectCache objectCache; + private final boolean closeObjectCache; private volatile boolean isClosed; public SolrClientCloudManager(DistributedQueueFactory queueFactory, CloudSolrClient solrClient) { + this(queueFactory, solrClient, null); + } + + public SolrClientCloudManager(DistributedQueueFactory queueFactory, CloudSolrClient solrClient, + ObjectCache objectCache) { this.queueFactory = queueFactory; this.solrClient = solrClient; this.zkStateReader = solrClient.getZkStateReader(); this.zkClient = zkStateReader.getZkClient(); this.stateManager = new ZkDistribStateManager(zkClient); this.isClosed = false; - this.objectCache = new ObjectCache(); + if (objectCache == null) { + this.objectCache = new ObjectCache(); + closeObjectCache = true; + } else { + this.objectCache = objectCache; + this.closeObjectCache = false; + } } @Override public void close() { isClosed = true; - IOUtils.closeQuietly(objectCache); + if (closeObjectCache) { + IOUtils.closeQuietly(objectCache); + } } @Override @@ -110,7 +124,7 @@ public DistribStateManager getDistribStateManager() { } @Override - public SolrResponse request(SolrRequest req) throws IOException { + public SolrResponse request(@SuppressWarnings({"rawtypes"})SolrRequest req) throws IOException { try { return req.process(solrClient); } catch (SolrServerException e) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java index 4f63525b4d5e..1b394eec64c4 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java @@ -66,7 +66,7 @@ import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.WITH_COLLECTION; /** - * + * The real {@link NodeStateProvider}, which communicates with Solr via SolrJ. */ public class SolrClientNodeStateProvider implements NodeStateProvider, MapWriter { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -78,6 +78,7 @@ public class SolrClientNodeStateProvider implements NodeStateProvider, MapWriter private final CloudSolrClient solrClient; protected final Map>>> nodeVsCollectionVsShardVsReplicaInfo = new HashMap<>(); private Map snitchSession = new HashMap<>(); + @SuppressWarnings({"rawtypes"}) private Map nodeVsTags = new HashMap<>(); private Map withCollectionsMap = new HashMap<>(); @@ -147,6 +148,7 @@ public void forEachReplica(String node, Consumer consumer){ @Override public Map>> getReplicaInfo(String node, Collection keys) { + @SuppressWarnings({"unchecked"}) Map>> result = nodeVsCollectionVsShardVsReplicaInfo.computeIfAbsent(node, Utils.NEW_HASHMAP_FUN); if (!keys.isEmpty()) { Map> metricsKeyVsTagReplica = new HashMap<>(); @@ -197,6 +199,7 @@ static void fetchReplicaMetrics(String solrNode, ClientSnitchCtx ctx, Map { Object v = Utils.getObjectByPath(rsp.nl, true, Arrays.asList("metrics", key)); if (tag instanceof Function) { + @SuppressWarnings({"unchecked"}) Pair p = (Pair) ((Function) tag).apply(v); ctx.getTags().put(p.first(), p.second()); } else { @@ -254,8 +257,8 @@ protected void getRemoteInfo(String solrNode, Set requestedTags, SnitchC prefixes.add("CONTAINER.fs.totalSpace"); } if (requestedTags.contains(CORES)) { - groups.add("solr.core"); - prefixes.add("CORE.coreName"); + groups.add("solr.node"); + prefixes.add("CONTAINER.cores"); } if (requestedTags.contains(SYSLOADAVG)) { groups.add("solr.jvm"); @@ -273,30 +276,31 @@ protected void getRemoteInfo(String solrNode, Set requestedTags, SnitchC try { SimpleSolrResponse rsp = snitchContext.invokeWithRetry(solrNode, CommonParams.METRICS_PATH, params); + NamedList metrics = (NamedList) rsp.nl.get("metrics"); - Map m = rsp.nl.asMap(4); if (requestedTags.contains(FREEDISK.tagName)) { - Object n = Utils.getObjectByPath(m, true, "metrics/solr.node/CONTAINER.fs.usableSpace"); + Object n = Utils.getObjectByPath(metrics, true, "solr.node/CONTAINER.fs.usableSpace"); if (n != null) ctx.getTags().put(FREEDISK.tagName, FREEDISK.convertVal(n)); } if (requestedTags.contains(TOTALDISK.tagName)) { - Object n = Utils.getObjectByPath(m, true, "metrics/solr.node/CONTAINER.fs.totalSpace"); + Object n = Utils.getObjectByPath(metrics, true, "solr.node/CONTAINER.fs.totalSpace"); if (n != null) ctx.getTags().put(TOTALDISK.tagName, TOTALDISK.convertVal(n)); } if (requestedTags.contains(CORES)) { + NamedList node = (NamedList) metrics.get("solr.node"); int count = 0; - Map cores = (Map) m.get("metrics"); - for (Object o : cores.keySet()) { - if (o.toString().startsWith("solr.core.")) count++; + for (String leafCoreMetricName : new String[]{"lazy", "loaded", "unloaded"}) { + Number n = (Number) node.get("CONTAINER.cores." + leafCoreMetricName); + if (n != null) count += n.intValue(); } ctx.getTags().put(CORES, count); } if (requestedTags.contains(SYSLOADAVG)) { - Number n = (Number) Utils.getObjectByPath(m, true, "metrics/solr.jvm/os.systemLoadAverage"); + Number n = (Number) Utils.getObjectByPath(metrics, true, "solr.jvm/os.systemLoadAverage"); if (n != null) ctx.getTags().put(SYSLOADAVG, n.doubleValue() * 100.0d); } if (requestedTags.contains(HEAPUSAGE)) { - Number n = (Number) Utils.getObjectByPath(m, true, "metrics/solr.jvm/memory.heap.usage"); + Number n = (Number) Utils.getObjectByPath(metrics, true, "solr.jvm/memory.heap.usage"); if (n != null) ctx.getTags().put(HEAPUSAGE, n.doubleValue() * 100.0d); } } catch (Exception e) { @@ -333,6 +337,7 @@ public ClientSnitchCtx(SnitchInfo perSnitch, @Override + @SuppressWarnings({"rawtypes"}) public Map getZkJson(String path) throws KeeperException, InterruptedException { return Utils.getJson(zkClientClusterStateProvider.getZkStateReader().getZkClient(), path, true); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java index 5c41f6b81a0a..75609090e6a4 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java @@ -101,6 +101,7 @@ public void entry(DataEntry e) { EntryImpl entry = (EntryImpl) e; if( !entry.type().isContainer) return; if (e.isKeyValEntry() && entry.getTag() == Tag._SOLRDOCLST) { + @SuppressWarnings({"rawtypes"}) List l = (List) e.metadata(); e.listenContainer(fastCallback.initDocList( (Long) l.get(0), @@ -120,6 +121,7 @@ public void entry(DataEntry e) { private EntryListener docListener; + @SuppressWarnings({"unchecked"}) private NamedList streamDocs(InputStream body) { try (JavaBinCodec codec = new JavaBinCodec() { @@ -143,6 +145,7 @@ public SolrDocument readSolrDocument(DataInputInputStream dis) throws IOExceptio @Override public SolrDocumentList readSolrDocumentList(DataInputInputStream dis) throws IOException { SolrDocumentList solrDocs = new SolrDocumentList(); + @SuppressWarnings({"rawtypes"}) List list = (List) readVal(dis); solrDocs.setNumFound((Long) list.get(0)); solrDocs.setStart((Long) list.get(1)); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkClientClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkClientClusterStateProvider.java index 407349613e3f..3020c1aced53 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkClientClusterStateProvider.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkClientClusterStateProvider.java @@ -36,6 +36,7 @@ import org.slf4j.LoggerFactory; +@SuppressWarnings({"unchecked"}) public class ZkClientClusterStateProvider implements ClusterStateProvider { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkDistribStateManager.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkDistribStateManager.java index e2834db0476a..b15445b75eca 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkDistribStateManager.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkDistribStateManager.java @@ -181,6 +181,7 @@ public List multi(Iterable ops) throws BadVersionException, Alread } @Override + @SuppressWarnings({"unchecked"}) public AutoScalingConfig getAutoScalingConfig(Watcher watcher) throws InterruptedException, IOException { Map map = new HashMap<>(); Stat stat = new Stat(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ClassificationEvaluation.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ClassificationEvaluation.java index f1b0bf98bda8..0096f55e3207 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ClassificationEvaluation.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ClassificationEvaluation.java @@ -36,26 +36,28 @@ public void count(int actual, int predicted) { } } - public void putToMap(Map map) { + @SuppressWarnings({"unchecked"}) + public void putToMap(@SuppressWarnings({"rawtypes"})Map map) { map.put("truePositive_i",truePositive); map.put("trueNegative_i",trueNegative); map.put("falsePositive_i",falsePositive); map.put("falseNegative_i",falseNegative); } + @SuppressWarnings({"rawtypes"}) public Map toMap() { HashMap map = new HashMap(); putToMap(map); return map; } - public static ClassificationEvaluation create(Map map) { + public static ClassificationEvaluation create(@SuppressWarnings({"rawtypes"})Map map) { ClassificationEvaluation evaluation = new ClassificationEvaluation(); evaluation.addEvaluation(map); return evaluation; } - public void addEvaluation(Map map) { + public void addEvaluation(@SuppressWarnings({"rawtypes"})Map map) { this.truePositive += (long) map.get("truePositive_i"); this.trueNegative += (long) map.get("trueNegative_i"); this.falsePositive += (long) map.get("falsePositive_i"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java index 05ba98f28e1c..e2008be78095 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java @@ -28,6 +28,8 @@ import org.apache.solr.client.solrj.io.stream.metrics.MaxMetric; import org.apache.solr.client.solrj.io.stream.metrics.MeanMetric; import org.apache.solr.client.solrj.io.stream.metrics.MinMetric; +import org.apache.solr.client.solrj.io.stream.metrics.PercentileMetric; +import org.apache.solr.client.solrj.io.stream.metrics.StdMetric; import org.apache.solr.client.solrj.io.stream.metrics.SumMetric; public class Lang { @@ -103,6 +105,8 @@ public static void register(StreamFactory streamFactory) { .withFunctionName("max", MaxMetric.class) .withFunctionName("avg", MeanMetric.class) .withFunctionName("sum", SumMetric.class) + .withFunctionName("per", PercentileMetric.class) + .withFunctionName("std", StdMetric.class) .withFunctionName("count", CountMetric.class) // tuple manipulation operations diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java index 1d7e46fa4ef2..a77380a1ccac 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java @@ -136,7 +136,7 @@ public LRU(int maxSize) { this.maxSize = maxSize; } - public boolean removeEldestEntry(Map.Entry eldest) { + public boolean removeEldestEntry(@SuppressWarnings({"rawtypes"})Map.Entry eldest) { if(size()> maxSize) { return true; } else { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java index 56d86fe260df..de05a69020cd 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java @@ -25,6 +25,7 @@ import java.util.Map; import org.apache.solr.common.MapWriter; +import org.apache.solr.common.params.StreamParams; /** * A simple abstraction of a record containing key/value pairs. @@ -40,28 +41,61 @@ public class Tuple implements Cloneable, MapWriter { * The EOF Tuple will not contain a record from the stream, but it may contain * metrics/aggregates gathered by underlying streams. * */ - public boolean EOF; + /** + * When EXCEPTION field is true the Tuple marks an exception in the stream + * and the corresponding "EXCEPTION" field contains a related message. + */ public boolean EXCEPTION; - public Map fields = new HashMap(); + /** + * Tuple fields. + * @deprecated use {@link #getFields()} instead of this public field. + */ + @Deprecated + public Map fields = new HashMap<>(2); + /** + * External serializable field names. + * @deprecated use {@link #getFieldNames()} instead of this public field. + */ + @Deprecated public List fieldNames; + /** + * Mapping of external field names to internal tuple field names. + * @deprecated use {@link #getFieldLabels()} instead of this public field. + */ + @Deprecated public Map fieldLabels; - public Tuple(){ + public Tuple() { // just an empty tuple } - - public Tuple(Map fields) { - if(fields.containsKey("EOF")) { - EOF = true; - } - if(fields.containsKey("EXCEPTION")){ - EXCEPTION = true; + /** + * A copy constructor. + * @param fields map containing keys and values to be copied to this tuple + */ + public Tuple(Map fields) { + for (Map.Entry entry : fields.entrySet()) { + put(entry.getKey(), entry.getValue()); } + } - this.fields.putAll(fields); + /** + * Constructor that accepts an even number of arguments as key / value pairs. + * @param fields a list of key / value pairs, with keys at odd and values at + * even positions. + */ + public Tuple(Object... fields) { + if (fields == null) { + return; + } + if ((fields.length % 2) != 0) { + throw new RuntimeException("must have a matching number of key-value pairs"); + } + for (int i = 0; i < fields.length; i += 2) { + put(fields[i], fields[i + 1]); + } } public Object get(Object key) { @@ -70,9 +104,14 @@ public Object get(Object key) { public void put(Object key, Object value) { this.fields.put(key, value); + if (key.equals(StreamParams.EOF)) { + EOF = true; + } else if (key.equals(StreamParams.EXCEPTION)) { + EXCEPTION = true; + } } - - public void remove(Object key){ + + public void remove(Object key) { this.fields.remove(key); } @@ -80,16 +119,16 @@ public String getString(Object key) { return String.valueOf(this.fields.get(key)); } - public String getException(){ return (String)this.fields.get("EXCEPTION"); } + public String getException() { return (String)this.fields.get(StreamParams.EXCEPTION); } public Long getLong(Object key) { Object o = this.fields.get(key); - if(o == null) { + if (o == null) { return null; } - if(o instanceof Long) { + if (o instanceof Long) { return (Long) o; } else if (o instanceof Number) { return ((Number)o).longValue(); @@ -115,6 +154,7 @@ public Boolean getBool(Object key) { } } + @SuppressWarnings({"unchecked"}) public List getBools(Object key) { return (List) this.fields.get(key); } @@ -135,6 +175,7 @@ public Date getDate(Object key) { } } + @SuppressWarnings({"unchecked"}) public List getDates(Object key) { List vals = (List) this.fields.get(key); if (vals == null) return null; @@ -149,11 +190,11 @@ public List getDates(Object key) { public Double getDouble(Object key) { Object o = this.fields.get(key); - if(o == null) { + if (o == null) { return null; } - if(o instanceof Double) { + if (o instanceof Double) { return (Double)o; } else { //Attempt to parse the double @@ -161,51 +202,97 @@ public Double getDouble(Object key) { } } + @SuppressWarnings({"unchecked"}) public List getStrings(Object key) { return (List)this.fields.get(key); } + @SuppressWarnings({"unchecked"}) public List getLongs(Object key) { return (List)this.fields.get(key); } + @SuppressWarnings({"unchecked"}) public List getDoubles(Object key) { return (List)this.fields.get(key); } + /** + * Return all tuple fields and their values. + */ + public Map getFields() { + return this.fields; + } + + /** + * Return all tuple fields. + * @deprecated use {@link #getFields()} instead. + */ + @Deprecated(since = "8.6.0") + @SuppressWarnings({"rawtypes"}) public Map getMap() { return this.fields; } + /** + * This represents the mapping of external field labels to the tuple's + * internal field names if they are different from field names. + * @return field labels or null + */ + public Map getFieldLabels() { + return fieldLabels; + } + + public void setFieldLabels(Map fieldLabels) { + this.fieldLabels = fieldLabels; + } + + /** + * A list of field names to serialize. This list (together with + * the mapping in {@link #getFieldLabels()} determines what tuple values + * are serialized and their external (serialized) names. + * @return list of external field names or null + */ + public List getFieldNames() { + return fieldNames; + } + + public void setFieldNames(List fieldNames) { + this.fieldNames = fieldNames; + } + + @SuppressWarnings({"unchecked", "rawtypes"}) public List getMaps(Object key) { - return (List)this.fields.get(key); + return (List) this.fields.get(key); } - public void setMaps(Object key, List maps) { + public void setMaps(Object key, @SuppressWarnings({"rawtypes"})List maps) { this.fields.put(key, maps); } - public Map getMetrics() { - return (Map)this.fields.get("_METRICS_"); + @SuppressWarnings({"unchecked", "rawtypes"}) + public Map getMetrics() { + return (Map) this.fields.get(StreamParams.METRICS); } + @SuppressWarnings({"rawtypes"}) public void setMetrics(Map metrics) { - this.fields.put("_METRICS_", metrics); + this.fields.put(StreamParams.METRICS, metrics); } public Tuple clone() { - HashMap m = new HashMap(fields); - Tuple clone = new Tuple(m); + Tuple clone = new Tuple(); + clone.fields.putAll(fields); return clone; } - public void merge(Tuple other){ - fields.putAll(other.getMap()); + public void merge(Tuple other) { + fields.putAll(other.getFields()); } @Override public void writeMap(EntryWriter ew) throws IOException { - if(fieldNames == null) { + if (fieldNames == null) { fields.forEach((k, v) -> { try { ew.put((String) k, v); @@ -214,10 +301,33 @@ public void writeMap(EntryWriter ew) throws IOException { } }); } else { - for(String fieldName : fieldNames) { + for (String fieldName : fieldNames) { String label = fieldLabels.get(fieldName); ew.put(label, fields.get(label)); } } } + + /** + * Create a new empty tuple marked as EOF. + */ + public static Tuple EOF() { + Tuple tuple = new Tuple(); + tuple.put(StreamParams.EOF, true); + return tuple; + } + + /** + * Create a new empty tuple marked as EXCEPTION, and optionally EOF. + * @param msg exception message + * @param eof if true the tuple will be marked as EOF + */ + public static Tuple EXCEPTION(String msg, boolean eof) { + Tuple tuple = new Tuple(); + tuple.put(StreamParams.EXCEPTION, msg); + if (eof) { + tuple.put(StreamParams.EOF, true); + } + return tuple; + } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java index f2be53e8dfaf..36ecc5e20c31 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java @@ -108,10 +108,13 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { * check only once - we can do that in the constructor of this class, create a lambda, and then execute * that lambda in the compare function. A little bit of branch prediction savings right here. */ + @SuppressWarnings({"unchecked"}) private void assignComparator(){ if(ComparatorOrder.DESCENDING == order){ comparator = (leftTuple, rightTuple) -> { + @SuppressWarnings({"rawtypes"}) Comparable leftComp = (Comparable)leftTuple.get(leftFieldName); + @SuppressWarnings({"rawtypes"}) Comparable rightComp = (Comparable)rightTuple.get(rightFieldName); if(leftComp == rightComp){ return 0; } // if both null then they are equal. if both are same ref then are equal @@ -124,7 +127,9 @@ private void assignComparator(){ else{ // See above for black magic reasoning. comparator = (leftTuple, rightTuple) -> { + @SuppressWarnings({"rawtypes"}) Comparable leftComp = (Comparable)leftTuple.get(leftFieldName); + @SuppressWarnings({"rawtypes"}) Comparable rightComp = (Comparable)rightTuple.get(rightFieldName); if(leftComp == rightComp){ return 0; } // if both null then they are equal. if both are same ref then are equal diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eq/FieldEqualitor.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eq/FieldEqualitor.java index e7e207edb3d8..2d188cea6869 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eq/FieldEqualitor.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eq/FieldEqualitor.java @@ -73,9 +73,12 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { .withExpression(toExpression(factory).toString()); } + @SuppressWarnings({"unchecked"}) public boolean test(Tuple leftTuple, Tuple rightTuple) { + @SuppressWarnings({"rawtypes"}) Comparable leftComp = (Comparable)leftTuple.get(leftFieldName); + @SuppressWarnings({"rawtypes"}) Comparable rightComp = (Comparable)rightTuple.get(rightFieldName); if(leftComp == rightComp){ return true; } // if both null then they are equal. if both are same ref then are equal diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AkimaEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AkimaEvaluator.java index ff68963bbd47..5837b32b043d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AkimaEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AkimaEvaluator.java @@ -33,6 +33,7 @@ public AkimaEvaluator(StreamExpression expression, StreamFactory factory) throws } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object... objects) throws IOException{ Object first = objects[0]; @@ -56,7 +57,7 @@ public Object doWork(Object... objects) throws IOException{ AkimaSplineInterpolator interpolator = new AkimaSplineInterpolator(); PolynomialSplineFunction spline = interpolator.interpolate(x, y); - List list = new ArrayList(); + List list = new ArrayList<>(); for(double xvalue : x) { list.add(spline.value(xvalue)); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AnovaEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AnovaEvaluator.java index 197e4e2f0278..b570712818d2 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AnovaEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AnovaEvaluator.java @@ -18,16 +18,15 @@ import java.io.IOException; import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.stream.Collectors; import org.apache.commons.math3.stat.inference.OneWayAnova; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; +import org.apache.solr.common.params.StreamParams; public class AnovaEvaluator extends RecursiveNumericListEvaluator implements ManyValueWorker { protected static final long serialVersionUID = 1L; @@ -45,6 +44,7 @@ public Object doWork(Object... values) throws IOException { // at this point we know every incoming value is an array of BigDecimals + @SuppressWarnings({"unchecked"}) List anovaInput = Arrays.stream(values) // for each List, convert to double[] .map(value -> ((List)value).stream().mapToDouble(Number::doubleValue).toArray()) @@ -54,10 +54,10 @@ public Object doWork(Object... values) throws IOException { OneWayAnova anova = new OneWayAnova(); double p = anova.anovaPValue(anovaInput); double f = anova.anovaFValue(anovaInput); - Map m = new HashMap<>(); - m.put("p-value", p); - m.put("f-ratio", f); - return new Tuple(m); + Tuple tuple = new Tuple(); + tuple.put(StreamParams.P_VALUE, p); + tuple.put("f-ratio", f); + return tuple; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ArrayEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ArrayEvaluator.java index 67dbf1aced2f..e6fdf6f868bc 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ArrayEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ArrayEvaluator.java @@ -31,8 +31,10 @@ public class ArrayEvaluator extends RecursiveObjectEvaluator implements ManyValueWorker { protected static final long serialVersionUID = 1L; + @SuppressWarnings({"rawtypes"}) private Comparator sortComparator; + @SuppressWarnings({"unchecked"}) public ArrayEvaluator(StreamExpression expression, StreamFactory factory) throws IOException{ super(expression, factory, Arrays.asList("sort")); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java index 0d7321c12bd6..23e6fc5d9f3f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java @@ -32,6 +32,7 @@ public AscEvaluator(StreamExpression expression, StreamFactory factory) throws I } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object value) throws IOException { if(null == value){ return value; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/Attributes.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/Attributes.java index 10f3a330c066..355abd9ccdac 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/Attributes.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/Attributes.java @@ -22,5 +22,6 @@ public interface Attributes { Object getAttribute(String key); void setAttribute(String key, Object value); + @SuppressWarnings({"rawtypes"}) Map getAttributes(); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/BicubicSplineEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/BicubicSplineEvaluator.java index 34ee5b240033..26f9dcaa7b21 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/BicubicSplineEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/BicubicSplineEvaluator.java @@ -47,6 +47,7 @@ public Object doWork(Object... objects) throws IOException { double[][] grid = null; if(first instanceof List && second instanceof List && third instanceof Matrix) { + @SuppressWarnings({"unchecked"}) List xlist = (List) first; x = new double[xlist.size()]; @@ -54,6 +55,7 @@ public Object doWork(Object... objects) throws IOException { x[i]=xlist.get(i).doubleValue(); } + @SuppressWarnings({"unchecked"}) List ylist = (List) second; y = new double[ylist.size()]; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ChebyshevDistanceEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ChebyshevDistanceEvaluator.java index 3a9294e8b5b4..b332451e9664 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ChebyshevDistanceEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ChebyshevDistanceEvaluator.java @@ -33,6 +33,7 @@ public ChebyshevDistanceEvaluator(StreamExpression expression, StreamFactory fac } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object first, Object second) throws IOException{ if(null == first){ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - null found for the first value",toExpression(constructingFactory))); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ChiSquareDataSetEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ChiSquareDataSetEvaluator.java index 9eb963a7df66..26ab319ce327 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ChiSquareDataSetEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ChiSquareDataSetEvaluator.java @@ -18,14 +18,13 @@ package org.apache.solr.client.solrj.io.eval; import java.io.IOException; -import java.util.HashMap; import java.util.List; -import java.util.Map; import org.apache.commons.math3.stat.inference.ChiSquareTest; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; +import org.apache.solr.common.params.StreamParams; public class ChiSquareDataSetEvaluator extends RecursiveNumericListEvaluator implements TwoValueWorker { @@ -38,7 +37,9 @@ public ChiSquareDataSetEvaluator(StreamExpression expression, StreamFactory fact @Override public Object doWork(Object value1, Object value2) throws IOException { + @SuppressWarnings({"unchecked"}) List listA = (List) value1; + @SuppressWarnings({"unchecked"}) List listB = (List) value2; long[] sampleA = new long[listA.size()]; @@ -56,10 +57,10 @@ public Object doWork(Object value1, Object value2) throws IOException { double chiSquare = chiSquareTest.chiSquareDataSetsComparison(sampleA, sampleB); double p = chiSquareTest.chiSquareTestDataSetsComparison(sampleA, sampleB); - Map m = new HashMap<>(); - m.put("chisquare-statistic", chiSquare); - m.put("p-value", p); - return new Tuple(m); + Tuple tuple = new Tuple(); + tuple.put("chisquare-statistic", chiSquare); + tuple.put(StreamParams.P_VALUE, p); + return tuple; } } \ No newline at end of file diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ColumnAtEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ColumnAtEvaluator.java index 5714096c559b..1e37d540c1a4 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ColumnAtEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ColumnAtEvaluator.java @@ -43,7 +43,7 @@ public Object doWork(Object value1, Object value2) throws IOException { Matrix matrix = (Matrix) value1; Number index = (Number) value2; double[][] data = matrix.getData(); - List list = new ArrayList(); + List list = new ArrayList<>(); for(double[] row : data) { list.add(row[index.intValue()]); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConvexHullEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConvexHullEvaluator.java index 2be2ee382f1a..efff30b45678 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConvexHullEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConvexHullEvaluator.java @@ -45,7 +45,7 @@ public Object doWork(Object... objects) throws IOException{ public static ConvexHull2D getConvexHull(Matrix matrix) throws IOException { double[][] data = matrix.getData(); - List points = new ArrayList(data.length); + List points = new ArrayList<>(data.length); if(data[0].length == 2) { for(double[] row : data) { points.add(new Vector2D(row[0], row[1])); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConvolutionEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConvolutionEvaluator.java index 1f77b6eb7490..0ca8a988587a 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConvolutionEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConvolutionEvaluator.java @@ -34,6 +34,7 @@ public ConvolutionEvaluator(StreamExpression expression, StreamFactory factory) } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object first, Object second) throws IOException{ if(null == first){ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - null found for the first value",toExpression(constructingFactory))); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java index c8c72f414d00..a8d0cf95c592 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java @@ -60,6 +60,7 @@ public CorrelationEvaluator(StreamExpression expression, StreamFactory factory) } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object ... values) throws IOException{ if(values.length == 2) { @@ -150,7 +151,7 @@ public static List getColumnLabels(List labels, int length) { if(labels != null) { return labels; } else { - List l = new ArrayList(); + List l = new ArrayList<>(); for(int i=0; i ((Number) value).doubleValue()).toArray(); + @SuppressWarnings({"unchecked"}) double[] d2 = ((List) second).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray(); return cosineSimilarity(d1, d2); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java index 3cb316138a00..83988db33711 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java @@ -32,6 +32,7 @@ public CovarianceEvaluator(StreamExpression expression, StreamFactory factory) t } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object ... values) throws IOException{ if(values.length == 2) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DbscanEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DbscanEvaluator.java index 5467e25bb28a..52ad7a662b35 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DbscanEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DbscanEvaluator.java @@ -41,6 +41,7 @@ public DbscanEvaluator(StreamExpression expression, StreamFactory factory) throw } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object... values) throws IOException { Matrix matrix = null; @@ -74,8 +75,9 @@ public Object doWork(Object... values) throws IOException { distanceMeasure = (DistanceMeasure)values[3]; } + @SuppressWarnings({"rawtypes"}) DBSCANClusterer dbscan = new DBSCANClusterer(e, minPoints, distanceMeasure); - List points = new ArrayList(); + List points = new ArrayList<>(); double[][] data = matrix.getData(); List ids = matrix.getRowLabels(); @@ -88,6 +90,7 @@ public Object doWork(Object... values) throws IOException { } } + @SuppressWarnings({"rawtypes"}) Map fields = new HashMap(); fields.put("e", e); @@ -121,7 +124,7 @@ public static class ClusterTuple extends Tuple { private List columnLabels; private List> clusters; - public ClusterTuple(Map fields, + public ClusterTuple(@SuppressWarnings({"rawtypes"})Map fields, List> clusters, List columnLabels) { super(fields); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DensityEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DensityEvaluator.java index 4910e6f891c9..17d3bf602d7f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DensityEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DensityEvaluator.java @@ -42,6 +42,7 @@ public Object doWork(Object first, Object second) throws IOException{ } MultivariateRealDistribution multivariateRealDistribution = (MultivariateRealDistribution) first; + @SuppressWarnings({"unchecked"}) List nums = (List) second; double[] vec = new double[nums.size()]; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DescribeEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DescribeEvaluator.java index 2fce7a0a6a84..27ef0de392c9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DescribeEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DescribeEvaluator.java @@ -17,10 +17,8 @@ package org.apache.solr.client.solrj.io.eval; import java.io.IOException; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.apache.solr.client.solrj.io.Tuple; @@ -49,20 +47,20 @@ public Object doWork(Object value) throws IOException { DescriptiveStatistics descriptiveStatistics = new DescriptiveStatistics(); ((List)value).stream().mapToDouble(innerValue -> ((Number)innerValue).doubleValue()).forEach(innerValue -> descriptiveStatistics.addValue(innerValue)); - Map map = new HashMap<>(); - map.put("max", descriptiveStatistics.getMax()); - map.put("mean", descriptiveStatistics.getMean()); - map.put("min", descriptiveStatistics.getMin()); - map.put("stdev", descriptiveStatistics.getStandardDeviation()); - map.put("sum", descriptiveStatistics.getSum()); - map.put("N", descriptiveStatistics.getN()); - map.put("var", descriptiveStatistics.getVariance()); - map.put("kurtosis", descriptiveStatistics.getKurtosis()); - map.put("skewness", descriptiveStatistics.getSkewness()); - map.put("popVar", descriptiveStatistics.getPopulationVariance()); - map.put("geometricMean", descriptiveStatistics.getGeometricMean()); - map.put("sumsq", descriptiveStatistics.getSumsq()); + Tuple tuple = new Tuple(); + tuple.put("max", descriptiveStatistics.getMax()); + tuple.put("mean", descriptiveStatistics.getMean()); + tuple.put("min", descriptiveStatistics.getMin()); + tuple.put("stdev", descriptiveStatistics.getStandardDeviation()); + tuple.put("sum", descriptiveStatistics.getSum()); + tuple.put("N", descriptiveStatistics.getN()); + tuple.put("var", descriptiveStatistics.getVariance()); + tuple.put("kurtosis", descriptiveStatistics.getKurtosis()); + tuple.put("skewness", descriptiveStatistics.getSkewness()); + tuple.put("popVar", descriptiveStatistics.getPopulationVariance()); + tuple.put("geometricMean", descriptiveStatistics.getGeometricMean()); + tuple.put("sumsq", descriptiveStatistics.getSumsq()); - return new Tuple(map); + return tuple; } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DistanceEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DistanceEvaluator.java index 888e145ab833..684a651dda06 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DistanceEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DistanceEvaluator.java @@ -38,6 +38,7 @@ public DistanceEvaluator(StreamExpression expression, StreamFactory factory) thr } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object ... values) throws IOException{ if(values.length == 1) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DotProductEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DotProductEvaluator.java index 86d13d030447..7874c3285257 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DotProductEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DotProductEvaluator.java @@ -48,7 +48,9 @@ public Object doWork(Object first, Object second) throws IOException{ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - found type %s for the second value, expecting a list of numbers",toExpression(constructingFactory), first.getClass().getSimpleName())); } + @SuppressWarnings({"unchecked", "rawtypes"}) RealVector v = new ArrayRealVector(((List) first).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray()); + @SuppressWarnings({"unchecked", "rawtypes"}) RealVector v2 = new ArrayRealVector(((List) second).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray()); return v.dotProduct(v2); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEAddEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEAddEvaluator.java index 9409f15cc176..0c7f2d5e5106 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEAddEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEAddEvaluator.java @@ -34,6 +34,7 @@ public EBEAddEvaluator(StreamExpression expression, StreamFactory factory) throw } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object first, Object second) throws IOException{ if(null == first){ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - null found for the first value",toExpression(constructingFactory))); @@ -48,7 +49,7 @@ public Object doWork(Object first, Object second) throws IOException{ ((List) second).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray() ); - List numbers = new ArrayList(); + List numbers = new ArrayList<>(); for (double d : result) { numbers.add(d); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEDivideEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEDivideEvaluator.java index e80dfc50593c..bf89b49bd492 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEDivideEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEDivideEvaluator.java @@ -47,12 +47,13 @@ public Object doWork(Object first, Object second) throws IOException{ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - found type %s for the second value, expecting a list of numbers",toExpression(constructingFactory), first.getClass().getSimpleName())); } + @SuppressWarnings({"unchecked"}) double[] result = MathArrays.ebeDivide( ((List) first).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray(), ((List) second).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray() ); - List numbers = new ArrayList(); + List numbers = new ArrayList<>(); for(double d : result) { numbers.add(d); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEMultiplyEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEMultiplyEvaluator.java index f8c06e955ab1..0c1f14e0f048 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEMultiplyEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBEMultiplyEvaluator.java @@ -33,6 +33,7 @@ public EBEMultiplyEvaluator(StreamExpression expression, StreamFactory factory) } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object first, Object second) throws IOException{ if(null == first){ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - null found for the first value",toExpression(constructingFactory))); @@ -52,7 +53,7 @@ public Object doWork(Object first, Object second) throws IOException{ ((List) second).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray() ); - List numbers = new ArrayList(); + List numbers = new ArrayList<>(); for(double d : result) { numbers.add(d); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBESubtractEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBESubtractEvaluator.java index 87857de6a982..d52dd0602afb 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBESubtractEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EBESubtractEvaluator.java @@ -34,6 +34,7 @@ public EBESubtractEvaluator(StreamExpression expression, StreamFactory factory) } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object first, Object second) throws IOException{ if(null == first){ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - null found for the first value",toExpression(constructingFactory))); @@ -47,7 +48,7 @@ public Object doWork(Object first, Object second) throws IOException{ ((List) second).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray() ); - List numbers = new ArrayList(); + List numbers = new ArrayList<>(); for (double d : result) { numbers.add(d); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EnclosingDiskEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EnclosingDiskEvaluator.java index eb5d33f90647..1dbf183bf629 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EnclosingDiskEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EnclosingDiskEvaluator.java @@ -45,14 +45,17 @@ public Object doWork(Object... objects) throws IOException{ } } + @SuppressWarnings({"rawtypes"}) public static EnclosingBall getEnclosingDisk(Matrix matrix) throws IOException { double[][] data = matrix.getData(); - List points = new ArrayList(data.length); + List points = new ArrayList<>(data.length); if(data[0].length == 2) { for(double[] row : data) { points.add(new Vector2D(row[0], row[1])); } + + @SuppressWarnings({"unchecked"}) WelzlEncloser welzlEncloser = new WelzlEncloser(.001, new DiskGenerator()); EnclosingBall enclosingBall = welzlEncloser.enclose(points); return enclosingBall; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EnumeratedDistributionEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EnumeratedDistributionEvaluator.java index d88c76dfc419..ed6fe9f5351d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EnumeratedDistributionEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EnumeratedDistributionEvaluator.java @@ -39,13 +39,19 @@ public Object doWork(Object... values) throws IOException{ } if(values.length == 1) { + @SuppressWarnings({"unchecked"}) List first = (List)values[0]; + @SuppressWarnings({"unchecked", "rawtypes"}) int[] samples = ((List) first).stream().mapToInt(value -> ((Number) value).intValue()).toArray(); return new EnumeratedIntegerDistribution(samples); } else { + @SuppressWarnings({"unchecked"}) List first = (List)values[0]; + @SuppressWarnings({"unchecked"}) List second = (List)values[1]; + @SuppressWarnings({"unchecked", "rawtypes"}) int[] singletons = ((List) first).stream().mapToInt(value -> ((Number) value).intValue()).toArray(); + @SuppressWarnings({"unchecked", "rawtypes"}) double[] probs = ((List) second).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray(); return new EnumeratedIntegerDistribution(singletons, probs); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EuclideanDistanceEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EuclideanDistanceEvaluator.java index 3fa0c121892e..1f692dc90217 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EuclideanDistanceEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EuclideanDistanceEvaluator.java @@ -33,6 +33,7 @@ public EuclideanDistanceEvaluator(StreamExpression expression, StreamFactory fac } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object first, Object second) throws IOException{ if(null == first){ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - null found for the first value",toExpression(constructingFactory))); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FFTEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FFTEvaluator.java index 94fecbf77ce6..8320146d2a6a 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FFTEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FFTEvaluator.java @@ -31,7 +31,7 @@ public class FFTEvaluator extends RecursiveNumericEvaluator implements OneValueWorker { protected static final long serialVersionUID = 1L; - private static List clabels = new ArrayList(); + private static List clabels = new ArrayList<>(); static { clabels.add("real"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FeatureSelectEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FeatureSelectEvaluator.java index b3c06d824c55..10da5c771b15 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FeatureSelectEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FeatureSelectEvaluator.java @@ -44,10 +44,10 @@ public Object doWork(Object value1, Object value2) throws IOException { double[][] data = matrix.getData(); List labels = matrix.getColumnLabels(); - Set features = new HashSet(); + Set features = new HashSet<>(); loadFeatures(value2, features); - List newColumnLabels = new ArrayList(); + List newColumnLabels = new ArrayList<>(); for(String label : labels) { if(features.contains(label)) { @@ -81,6 +81,7 @@ public Object doWork(Object value1, Object value2) throws IOException { } private void loadFeatures(Object o, Set features) { + @SuppressWarnings({"rawtypes"}) List list = (List)o; for(Object v : list) { if(v instanceof List) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FieldValueEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FieldValueEvaluator.java index 73bc0c76441e..065f3d4b6a30 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FieldValueEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FieldValueEvaluator.java @@ -42,6 +42,7 @@ public FieldValueEvaluator(String fieldName) { } @Override + @SuppressWarnings({"unchecked"}) public Object evaluate(Tuple tuple) throws IOException { Object value = tuple.get(fieldName); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FindDelayEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FindDelayEvaluator.java index 630327202ba1..ef0b44752506 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FindDelayEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FindDelayEvaluator.java @@ -52,7 +52,9 @@ public Object doWork(Object first, Object second) throws IOException{ } // Get first and second lists as arrays, where second is in reverse order + @SuppressWarnings({"unchecked", "rawtypes"}) double[] firstArray = ((List)first).stream().mapToDouble(value -> ((Number)value).doubleValue()).toArray(); + @SuppressWarnings({"unchecked", "rawtypes"}) double[] secondArray = StreamSupport.stream(Spliterators.spliteratorUnknownSize( ((LinkedList)((List)second).stream().collect(Collectors.toCollection(LinkedList::new))).descendingIterator(), Spliterator.ORDERED), false).mapToDouble(value -> ((Number)value).doubleValue()).toArray(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FrequencyTableEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FrequencyTableEvaluator.java index b648e3f54930..200017870d42 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FrequencyTableEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FrequencyTableEvaluator.java @@ -19,11 +19,9 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; -import java.util.Map; import org.apache.commons.math3.stat.Frequency; @@ -67,17 +65,18 @@ public Object doWork(Object... values) throws IOException { List histogramBins = new ArrayList<>(); + @SuppressWarnings({"rawtypes"}) Iterator iterator = frequency.valuesIterator(); while(iterator.hasNext()){ Long value = (Long)iterator.next(); - Map map = new HashMap<>(); - map.put("value", value.longValue()); - map.put("count", frequency.getCount(value)); - map.put("cumFreq", frequency.getCumFreq(value)); - map.put("cumPct", frequency.getCumPct(value)); - map.put("pct", frequency.getPct(value)); - histogramBins.add(new Tuple(map)); + Tuple tuple = new Tuple(); + tuple.put("value", value.longValue()); + tuple.put("count", frequency.getCount(value)); + tuple.put("cumFreq", frequency.getCumFreq(value)); + tuple.put("cumPct", frequency.getCumPct(value)); + tuple.put("pct", frequency.getPct(value)); + histogramBins.add(tuple); } return histogramBins; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java index fbd5561ed9c0..355f32f9eafe 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java @@ -56,6 +56,7 @@ public FuzzyKmeansEvaluator(StreamExpression expression, StreamFactory factory) } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object value1, Object value2) throws IOException { @@ -75,11 +76,12 @@ public Object doWork(Object value1, Object value2) throws IOException { throw new IOException("The second parameter for fuzzyKmeans should be k."); } + @SuppressWarnings({"rawtypes"}) FuzzyKMeansClusterer kmeans = new FuzzyKMeansClusterer(k, fuzziness, maxIterations, new EuclideanDistance()); - List points = new ArrayList(); + List points = new ArrayList<>(); double[][] data = matrix.getData(); List ids = matrix.getRowLabels(); @@ -89,6 +91,7 @@ public Object doWork(Object value1, Object value2) throws IOException { points.add(new KmeansEvaluator.ClusterPoint(ids.get(i), vec)); } + @SuppressWarnings({"rawtypes"}) Map fields = new HashMap(); fields.put("k", k); @@ -101,7 +104,7 @@ public Object doWork(Object value1, Object value2) throws IOException { double[][] mmData = realMatrix.getData(); Matrix mmMatrix = new Matrix(mmData); mmMatrix.setRowLabels(matrix.getRowLabels()); - List clusterCols = new ArrayList(); + List clusterCols = new ArrayList<>(); for(int i=0; i listA = (List) value1; + @SuppressWarnings({"unchecked"}) List listB = (List) value2; long[] sampleA = new long[listA.size()]; @@ -56,9 +57,9 @@ public Object doWork(Object value1, Object value2) throws IOException { double g = gTest.gDataSetsComparison(sampleA, sampleB); double p = gTest.gTestDataSetsComparison(sampleA, sampleB); - Map m = new HashMap<>(); - m.put("G-statistic", g); - m.put("p-value", p); - return new Tuple(m); + Tuple tuple = new Tuple(); + tuple.put("G-statistic", g); + tuple.put(StreamParams.P_VALUE, p); + return tuple; } } \ No newline at end of file diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GaussFitEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GaussFitEvaluator.java index a26c8d70d1f5..8b5161bd06d1 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GaussFitEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GaussFitEvaluator.java @@ -37,6 +37,7 @@ public GaussFitEvaluator(StreamExpression expression, StreamFactory factory) thr } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Object doWork(Object... objects) throws IOException{ if(objects.length >= 3) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetBaryCenterEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetBaryCenterEvaluator.java index fd2f4e51f89a..dc6417fc1f35 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetBaryCenterEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetBaryCenterEvaluator.java @@ -41,7 +41,7 @@ public Object doWork(Object value) throws IOException { } else { ConvexHull2D convexHull2D = (ConvexHull2D)value; Vector2D vector2D = (Vector2D)convexHull2D.createRegion().getBarycenter(); - List vec = new ArrayList(); + List vec = new ArrayList<>(); vec.add(vector2D.getX()); vec.add(vector2D.getY()); return vec; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetCacheEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetCacheEvaluator.java index e340d80845ba..16b8050b920d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetCacheEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetCacheEvaluator.java @@ -37,12 +37,14 @@ public GetCacheEvaluator(StreamExpression expression, StreamFactory factory) thr @Override public Object doWork(Object... values) throws IOException { + @SuppressWarnings({"rawtypes"}) ConcurrentMap objectCache = this.streamContext.getObjectCache(); if(values.length == 2) { String space = (String)values[0]; String key = (String)values[1]; space = space.replace("\"", ""); key = key.replace("\"", ""); + @SuppressWarnings({"rawtypes"}) ConcurrentMap spaceCache = (ConcurrentMap)objectCache.get(space); if(spaceCache != null) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetCenterEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetCenterEvaluator.java index 45ecea1dfe0c..2fc1ae8cb3d3 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetCenterEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetCenterEvaluator.java @@ -41,9 +41,10 @@ public Object doWork(Object value) throws IOException { if(!(value instanceof EnclosingBall)){ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - found type %s for value, expecting an EnclosingBall",toExpression(constructingFactory), value.getClass().getSimpleName())); } else { + @SuppressWarnings({"rawtypes"}) EnclosingBall enclosingBall = (EnclosingBall)value; Vector2D vec = (Vector2D)enclosingBall.getCenter(); - List center = new ArrayList(); + List center = new ArrayList<>(); center.add(vec.getX()); center.add(vec.getY()); return center; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetClusterEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetClusterEvaluator.java index 903670d96143..a840967620bd 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetClusterEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/GetClusterEvaluator.java @@ -44,9 +44,11 @@ public Object doWork(Object value1, Object value2) throws IOException { List> clusters = clusterTuple.getClusters(); Number index = (Number)value2; + @SuppressWarnings({"rawtypes"}) CentroidCluster cluster = clusters.get(index.intValue()); + @SuppressWarnings({"rawtypes"}) List points = cluster.getPoints(); - List rowLabels = new ArrayList(); + List rowLabels = new ArrayList<>(); double[][] data = new double[points.size()][]; for(int i=0; i 3) { @@ -76,12 +77,14 @@ public Object doWork(Object... objects) throws IOException{ double[] coef = curveFitter.fit(points.toList()); HarmonicOscillator pf = new HarmonicOscillator(coef[0], coef[1], coef[2]); + @SuppressWarnings({"rawtypes"}) List list = new ArrayList(); for(double xvalue : x) { double yvalue= pf.value(xvalue); list.add(yvalue); } + @SuppressWarnings({"unchecked"}) VectorFunction vectorFunction = new VectorFunction(pf, list); vectorFunction.addToContext("amplitude", coef[0]); vectorFunction.addToContext("angularFrequency", coef[1]); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/HistogramEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/HistogramEvaluator.java index 8d2761469f9f..fd6fcf65c84c 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/HistogramEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/HistogramEvaluator.java @@ -19,10 +19,8 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import org.apache.commons.math3.random.EmpiricalDistribution; import org.apache.commons.math3.stat.descriptive.SummaryStatistics; @@ -71,17 +69,17 @@ public Object doWork(Object... values) throws IOException { List histogramBins = new ArrayList<>(); for(SummaryStatistics binSummary : distribution.getBinStats()) { - Map map = new HashMap<>(); - map.put("max", binSummary.getMax()); - map.put("mean", binSummary.getMean()); - map.put("min", binSummary.getMin()); - map.put("stdev", binSummary.getStandardDeviation()); - map.put("sum", binSummary.getSum()); - map.put("N", binSummary.getN()); - map.put("var", binSummary.getVariance()); - map.put("cumProb", distribution.cumulativeProbability(binSummary.getMean())); - map.put("prob", distribution.probability(binSummary.getMin(), binSummary.getMax())); - histogramBins.add(new Tuple(map)); + Tuple tuple = new Tuple(); + tuple.put("max", binSummary.getMax()); + tuple.put("mean", binSummary.getMean()); + tuple.put("min", binSummary.getMin()); + tuple.put("stdev", binSummary.getStandardDeviation()); + tuple.put("sum", binSummary.getSum()); + tuple.put("N", binSummary.getN()); + tuple.put("var", binSummary.getVariance()); + tuple.put("cumProb", distribution.cumulativeProbability(binSummary.getMean())); + tuple.put("prob", distribution.probability(binSummary.getMin(), binSummary.getMax())); + histogramBins.add(tuple); } return histogramBins; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IFFTEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IFFTEvaluator.java index 2ca230f66b1b..d812dc94d781 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IFFTEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IFFTEvaluator.java @@ -58,7 +58,7 @@ public Object doWork(Object v) throws IOException { FastFourierTransformer fastFourierTransformer = new FastFourierTransformer(DftNormalization.STANDARD); Complex[] result = fastFourierTransformer.transform(complex, TransformType.INVERSE); - List realResult = new ArrayList(); + List realResult = new ArrayList<>(); for (int i = 0; i < result.length; ++i) { realResult.add(result[i].getReal()); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IndexOfEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IndexOfEvaluator.java index 60136ffd29ba..81714a23d742 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IndexOfEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IndexOfEvaluator.java @@ -36,6 +36,7 @@ public Object doWork(Object value1, Object value2) throws IOException { if(!(value1 instanceof List)){ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - found type %s for value, expecting an array",toExpression(constructingFactory), value1.getClass().getSimpleName())); } else { + @SuppressWarnings({"rawtypes"}) List list = (List)value1; String find = value2.toString().replace("\"",""); for(int i=0; i out = new ArrayList(); + ArrayList out = new ArrayList<>(); out.add(0); for(int i=1; i kmeans = new KMeansPlusPlusClusterer(k, maxIterations); - List points = new ArrayList(); + List points = new ArrayList<>(); double[][] data = matrix.getData(); List ids = matrix.getRowLabels(); @@ -85,6 +87,7 @@ public Object doWork(Object value1, Object value2) throws IOException { } } + @SuppressWarnings({"rawtypes"}) Map fields = new HashMap(); fields.put("k", k); @@ -119,7 +122,7 @@ public static class ClusterTuple extends Tuple { private List> clusters; private Matrix membershipMatrix; - public ClusterTuple(Map fields, + public ClusterTuple(@SuppressWarnings({"rawtypes"})Map fields, List> clusters, List columnLabels) { super(fields); @@ -127,7 +130,7 @@ public ClusterTuple(Map fields, this.columnLabels = columnLabels; } - public ClusterTuple(Map fields, + public ClusterTuple(@SuppressWarnings({"rawtypes"})Map fields, List> clusters, List columnLabels, Matrix membershipMatrix) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KnnEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KnnEvaluator.java index 17fb0110af28..1479ed3b3469 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KnnEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KnnEvaluator.java @@ -52,6 +52,7 @@ public Object doWork(Object... values) throws IOException { } if(values[1] instanceof List) { + @SuppressWarnings({"unchecked"}) List nums = (List)values[1]; vec = new double[nums.size()]; for(int i=0; i neighbors = new TreeSet(); + TreeSet neighbors = new TreeSet<>(); for(int i=0; i rowLabels = observations.getRowLabels(); - List newRowLabels = new ArrayList(); - List indexes = new ArrayList(); - List distances = new ArrayList(); + List newRowLabels = new ArrayList<>(); + List indexes = new ArrayList<>(); + List distances = new ArrayList<>(); int i=-1; while(neighbors.size() > 0) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KnnRegressionEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KnnRegressionEvaluator.java index e16e60e68c40..80cd64030ed9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KnnRegressionEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KnnRegressionEvaluator.java @@ -54,6 +54,7 @@ public KnnRegressionEvaluator(StreamExpression expression, StreamFactory factory } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object ... values) throws IOException { if(values.length < 3) { @@ -105,6 +106,7 @@ public Object doWork(Object ... values) throws IOException { outcomeData[i] = outcomes.get(i).doubleValue(); } + @SuppressWarnings({"rawtypes"}) Map map = new HashMap(); map.put("k", k); map.put("observations", observations.getRowCount()); @@ -222,6 +224,7 @@ public double predict(double[] values) { Matrix obs = scaledObservations != null ? scaledObservations : observations; Matrix knn = KnnEvaluator.search(obs, values, k, distanceMeasure); + @SuppressWarnings({"unchecked"}) List indexes = (List)knn.getAttribute("indexes"); if(robust) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KolmogorovSmirnovEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KolmogorovSmirnovEvaluator.java index 58e783e7a299..27256b14d575 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KolmogorovSmirnovEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KolmogorovSmirnovEvaluator.java @@ -17,16 +17,15 @@ package org.apache.solr.client.solrj.io.eval; import java.io.IOException; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import org.apache.commons.math3.distribution.RealDistribution; import org.apache.commons.math3.stat.inference.KolmogorovSmirnovTest; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; +import org.apache.solr.common.params.StreamParams; public class KolmogorovSmirnovEvaluator extends RecursiveObjectEvaluator implements TwoValueWorker { @@ -54,17 +53,17 @@ public Object doWork(Object first, Object second) throws IOException{ if(first instanceof RealDistribution){ RealDistribution realDistribution = (RealDistribution)first; - Map m = new HashMap<>(); - m.put("p-value", ks.kolmogorovSmirnovTest(realDistribution, data)); - m.put("d-statistic", ks.kolmogorovSmirnovStatistic(realDistribution, data)); - return new Tuple(m); + Tuple tuple = new Tuple(); + tuple.put(StreamParams.P_VALUE, ks.kolmogorovSmirnovTest(realDistribution, data)); + tuple.put("d-statistic", ks.kolmogorovSmirnovStatistic(realDistribution, data)); + return tuple; } else if(first instanceof List && ((List) first).stream().noneMatch(item -> !(item instanceof Number))){ double[] data2 = ((List)first).stream().mapToDouble(item -> ((Number)item).doubleValue()).toArray(); - - Map m = new HashMap<>(); - m.put("d-statistic", ks.kolmogorovSmirnovTest(data, data2)); - return new Tuple(m); + + Tuple tuple = new Tuple(); + tuple.put("d-statistic", ks.kolmogorovSmirnovTest(data, data2)); + return tuple; } else{ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - found type %s for the first value, expecting a RealDistribution or list of numbers",toExpression(constructingFactory), first.getClass().getSimpleName())); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/L1NormEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/L1NormEvaluator.java index 8ab316e6a6c1..3d71c2260a4e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/L1NormEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/L1NormEvaluator.java @@ -41,6 +41,7 @@ public Object doWork(Object value) throws IOException{ throw new IOException(String.format(Locale.ROOT, "Unable to find %s(...) because the value is null", constructingFactory.getFunctionName(getClass()))); } else if(value instanceof List){ + @SuppressWarnings({"unchecked"}) List c = (List) value; double[] data = new double[c.size()]; for(int i=0; i< c.size(); i++) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LInfNormEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LInfNormEvaluator.java index e354e4e7f958..bbb0de9623d7 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LInfNormEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LInfNormEvaluator.java @@ -41,6 +41,7 @@ public Object doWork(Object value) throws IOException{ throw new IOException(String.format(Locale.ROOT, "Unable to find %s(...) because the value is null", constructingFactory.getFunctionName(getClass()))); } else if(value instanceof List){ + @SuppressWarnings({"unchecked"}) List c = (List) value; double[] data = new double[c.size()]; for(int i=0; i< c.size(); i++) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LatLonVectorsEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LatLonVectorsEvaluator.java index de8168a127c6..39212b3df880 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LatLonVectorsEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LatLonVectorsEvaluator.java @@ -67,6 +67,7 @@ public Object doWork(Object... objects) throws IOException { if(!(objects[0] instanceof List)) { throw new IOException("The latlonVectors function expects a list of Tuples as a parameter."); } else { + @SuppressWarnings({"rawtypes"}) List list = (List)objects[0]; if(list.size() > 0) { Object o = list.get(0); @@ -78,14 +79,15 @@ public Object doWork(Object... objects) throws IOException { } } + @SuppressWarnings({"unchecked"}) List tuples = (List) objects[0]; double[][] locationVectors = new double[tuples.size()][2]; - List features = new ArrayList(); + List features = new ArrayList<>(); features.add("lat"); features.add("lon"); - List rowLabels = new ArrayList(); + List rowLabels = new ArrayList<>(); for(int i=0; i< tuples.size(); i++) { Tuple tuple = tuples.get(i); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LerpEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LerpEvaluator.java index 5df9ca9289e2..e43992f7aa2d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LerpEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LerpEvaluator.java @@ -33,6 +33,7 @@ public LerpEvaluator(StreamExpression expression, StreamFactory factory) throws } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object... objects) throws IOException{ Object first = objects[0]; @@ -56,7 +57,7 @@ public Object doWork(Object... objects) throws IOException{ LinearInterpolator interpolator = new LinearInterpolator(); PolynomialSplineFunction spline = interpolator.interpolate(x, y); - List list = new ArrayList(); + List list = new ArrayList<>(); for(double xvalue : x) { list.add(spline.value(xvalue)); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ListCacheEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ListCacheEvaluator.java index 99c25e7e9e89..05048379d462 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ListCacheEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ListCacheEvaluator.java @@ -41,12 +41,17 @@ public ListCacheEvaluator(StreamExpression expression, StreamFactory factory) th } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object... values) throws IOException { + @SuppressWarnings({"rawtypes"}) ConcurrentMap objectCache = this.streamContext.getObjectCache(); + @SuppressWarnings({"rawtypes"}) List list = new ArrayList(); if(values.length == 0) { + @SuppressWarnings({"rawtypes"}) ConcurrentHashMap m = (ConcurrentHashMap)objectCache; + @SuppressWarnings({"rawtypes"}) Enumeration en = m.keys(); while(en.hasMoreElements()) { list.add(en.nextElement()); @@ -55,9 +60,12 @@ public Object doWork(Object... values) throws IOException { } else if(values.length == 1) { String space = (String)values[0]; space = space.replace("\"", ""); + @SuppressWarnings({"rawtypes"}) ConcurrentMap spaceCache = (ConcurrentMap)objectCache.get(space); if(spaceCache != null) { + @SuppressWarnings({"rawtypes"}) ConcurrentHashMap spaceMap = (ConcurrentHashMap)objectCache.get(space); + @SuppressWarnings({"rawtypes"}) Enumeration en = spaceMap.keys(); while(en.hasMoreElements()) { list.add(en.nextElement()); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LoessEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LoessEvaluator.java index 31f6962fe399..436958890132 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LoessEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/LoessEvaluator.java @@ -52,6 +52,7 @@ public LoessEvaluator(StreamExpression expression, StreamFactory factory) throws } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object... objects) throws IOException{ Object first = objects[0]; @@ -75,7 +76,7 @@ public Object doWork(Object... objects) throws IOException{ LoessInterpolator interpolator = new LoessInterpolator(bandwidth, robustIterations); double[] smooth = interpolator.smooth(x, y); - List list = new ArrayList(); + List list = new ArrayList<>(); for(double yvalue : smooth) { list.add(yvalue); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MannWhitneyUEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MannWhitneyUEvaluator.java index defa91924969..6c6e278724b2 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MannWhitneyUEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MannWhitneyUEvaluator.java @@ -19,16 +19,15 @@ import java.io.IOException; import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.stream.Collectors; import org.apache.commons.math3.stat.inference.MannWhitneyUTest; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; +import org.apache.solr.common.params.StreamParams; public class MannWhitneyUEvaluator extends RecursiveNumericListEvaluator implements ManyValueWorker { @@ -44,6 +43,7 @@ public MannWhitneyUEvaluator(StreamExpression expression, StreamFactory factory) @Override public Object doWork(Object... values) throws IOException { + @SuppressWarnings({"unchecked"}) List mannWhitneyUInput = Arrays.stream(values) .map(value -> ((List) value).stream().mapToDouble(Number::doubleValue).toArray()) .collect(Collectors.toList()); @@ -51,10 +51,10 @@ public Object doWork(Object... values) throws IOException { MannWhitneyUTest mannwhitneyutest = new MannWhitneyUTest(); double u = mannwhitneyutest.mannWhitneyU(mannWhitneyUInput.get(0), mannWhitneyUInput.get(1)); double p = mannwhitneyutest.mannWhitneyUTest(mannWhitneyUInput.get(0), mannWhitneyUInput.get(1)); - Map m = new HashMap<>(); - m.put("u-statistic", u); - m.put("p-value", p); - return new Tuple(m); + Tuple tuple = new Tuple(); + tuple.put("u-statistic", u); + tuple.put(StreamParams.P_VALUE, p); + return tuple; }else{ throw new IOException(String.format(Locale.ROOT,"%s(...) only works with a list of 2 arrays but a list of %d array(s) was provided.", constructingFactory.getFunctionName(getClass()), mannWhitneyUInput.size())); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/Matrix.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/Matrix.java index ed10a8c3e4d5..87940931eff6 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/Matrix.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/Matrix.java @@ -23,18 +23,20 @@ import java.util.Iterator; +@SuppressWarnings({"rawtypes"}) public class Matrix implements Iterable, Attributes { private double[][] data; private List columnLabels; private List rowLabels; - private Map attributes = new HashMap(); + private Map attributes = new HashMap<>(); public Matrix(double[][] data) { this.data = data; } + @SuppressWarnings({"rawtypes"}) public Map getAttributes() { return this.attributes; } @@ -75,10 +77,12 @@ public int getColumnCount() { return data[0].length; } + @SuppressWarnings({"rawtypes"}) public Iterator iterator() { return new MatrixIterator(data); } + @SuppressWarnings({"rawtypes"}) private static class MatrixIterator implements Iterator { private double[][] d; @@ -88,6 +92,7 @@ public MatrixIterator(double[][] data) { d = data; } + @SuppressWarnings({"unchecked"}) public Object next() { double[] row = d[index++]; List list = new ArrayList(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MatrixEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MatrixEvaluator.java index f17af331a808..ede88d5db2d3 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MatrixEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MatrixEvaluator.java @@ -39,6 +39,7 @@ public MatrixEvaluator(StreamExpression expression, StreamFactory factory) throw public Object doWork(Object... values) throws IOException { double[][] data = new double[values.length][]; for(int i=0; i vec = (List)values[i]; double[] array = new double[vec.size()]; for(int j=0; j vec = (List)o; double[][] data1 = new double[1][vec.size()]; for(int i=0; i c = (List) value; double[] data = new double[c.size()]; for(int i=0; i< c.size(); i++) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MemsetEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MemsetEvaluator.java index e8ad9407a86f..635df893a65e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MemsetEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MemsetEvaluator.java @@ -119,14 +119,14 @@ public Object evaluate(Tuple tuple) throws IOException { try { in.setStreamContext(streamContext); in.open(); - Map> arrays = new HashMap(); + Map> arrays = new HashMap<>(); //Initialize the variables for(String var : vars) { if(size > -1) { - arrays.put(var, new ArrayList(size)); + arrays.put(var, new ArrayList<>(size)); } else { - arrays.put(var, new ArrayList()); + arrays.put(var, new ArrayList<>()); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MinMaxScaleEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MinMaxScaleEvaluator.java index 399691022b8f..1bb0855f83d0 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MinMaxScaleEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MinMaxScaleEvaluator.java @@ -57,6 +57,7 @@ public Object doWork(Object... values) throws IOException { return new Matrix(scaled); } else if(values[0] instanceof List) { + @SuppressWarnings({"unchecked"}) List vec = (List)values[0]; double[] data = new double[vec.size()]; @@ -65,7 +66,7 @@ public Object doWork(Object... values) throws IOException { } data = scale(data, min, max); - List scaled = new ArrayList(data.length); + List scaled = new ArrayList<>(data.length); for(double d : data) { scaled.add(d); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ModeEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ModeEvaluator.java index b2529c640580..72156d6471af 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ModeEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ModeEvaluator.java @@ -43,6 +43,7 @@ public Object doWork(Object value) throws IOException{ throw new IOException(String.format(Locale.ROOT, "Unable to find %s(...) because the value is null", constructingFactory.getFunctionName(getClass()))); } else if(value instanceof List){ + @SuppressWarnings({"unchecked"}) List c = (List) value; double[] data = new double[c.size()]; for(int i=0; i< c.size(); i++) { @@ -50,7 +51,7 @@ else if(value instanceof List){ } double[] mode = StatUtils.mode(data); - List l = new ArrayList(); + List l = new ArrayList<>(); for(double d : mode) { l.add(d); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MonteCarloEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MonteCarloEvaluator.java index b0ec8c5d1e18..24df98b1ccb5 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MonteCarloEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MonteCarloEvaluator.java @@ -35,8 +35,10 @@ public class MonteCarloEvaluator extends RecursiveEvaluator { protected static final long serialVersionUID = 1L; + @SuppressWarnings({"rawtypes"}) private Map variables = new LinkedHashMap(); + @SuppressWarnings({"unchecked"}) public MonteCarloEvaluator(StreamExpression expression, StreamFactory factory) throws IOException{ super(expression, factory); @@ -44,11 +46,8 @@ public MonteCarloEvaluator(StreamExpression expression, StreamFactory factory) t //Get all the named params Set echo = null; boolean echoAll = false; - String currentName = null; for(StreamExpressionParameter np : namedParams) { String name = ((StreamExpressionNamedParameter)np).getName(); - currentName = name; - StreamExpressionParameter param = ((StreamExpressionNamedParameter)np).getParameter(); if(factory.isEvaluator((StreamExpression)param)) { @@ -83,7 +82,7 @@ public Object evaluate(Tuple tuple) throws IOException { StreamEvaluator iterationsEvaluator = containedEvaluators.get(1); Number itNum = (Number)iterationsEvaluator.evaluate(tuple); int it = itNum.intValue(); - List results = new ArrayList(); + List results = new ArrayList<>(); for(int i=0; i> entries = variables.entrySet(); for(Map.Entry entry : entries) { String name = entry.getKey(); Object o = entry.getValue(); if(o instanceof TupleStream) { - List tuples = new ArrayList(); + List tuples = new ArrayList<>(); TupleStream tStream = (TupleStream)o; tStream.setStreamContext(streamContext); try { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MultiKmeansEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MultiKmeansEvaluator.java index 86f1d85d6b7b..c41ce5bca62f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MultiKmeansEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MultiKmeansEvaluator.java @@ -49,6 +49,7 @@ public MultiKmeansEvaluator(StreamExpression expression, StreamFactory factory) } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object... values) throws IOException { if(values.length != 3) { @@ -81,10 +82,12 @@ public Object doWork(Object... values) throws IOException { throw new IOException("The third parameter for multiKmeans should be trials."); } + @SuppressWarnings({"rawtypes"}) KMeansPlusPlusClusterer kmeans = new KMeansPlusPlusClusterer(k, maxIterations); + @SuppressWarnings({"rawtypes"}) MultiKMeansPlusPlusClusterer multiKmeans = new MultiKMeansPlusPlusClusterer(kmeans, trials); - List points = new ArrayList(); + List points = new ArrayList<>(); double[][] data = matrix.getData(); List ids = matrix.getRowLabels(); @@ -94,6 +97,7 @@ public Object doWork(Object... values) throws IOException { points.add(new KmeansEvaluator.ClusterPoint(ids.get(i), vec)); } + @SuppressWarnings({"rawtypes"}) Map fields = new HashMap(); fields.put("k", k); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MultiVariateNormalDistributionEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MultiVariateNormalDistributionEvaluator.java index bc2fbcb80a56..1a06701b8c81 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MultiVariateNormalDistributionEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/MultiVariateNormalDistributionEvaluator.java @@ -41,6 +41,7 @@ public Object doWork(Object first, Object second) throws IOException{ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - null found for the second value",toExpression(constructingFactory))); } + @SuppressWarnings({"unchecked"}) List means = (List)first; Matrix covar = (Matrix)second; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NaturalEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NaturalEvaluator.java index f8469eea2a62..5ad00e23cfcd 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NaturalEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NaturalEvaluator.java @@ -38,7 +38,7 @@ public NaturalEvaluator(StreamExpression expression, StreamFactory factory) thro @Override public Object doWork(Object value){ int natural = ((Number)value).intValue(); - List naturals = new ArrayList(); + List naturals = new ArrayList<>(); for(int i=0; i c = (List) value; double[] data = new double[c.size()]; for(int i=0; i< c.size(); i++) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java index 6717909e9df3..77c90ea997b6 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java @@ -67,13 +67,14 @@ public Object doWork(Object... values) throws IOException{ m.setColumnLabels(matrix.getColumnLabels()); return m; } else if(value instanceof List) { + @SuppressWarnings({"unchecked"}) List vals = (List)value; double[] doubles = new double[vals.size()]; for(int i=0; i unitList = new ArrayList(doubles.length); + List unitList = new ArrayList<>(doubles.length); double[] unitArray = MathArrays.normalizeArray(doubles, sumTo); for(double d : unitArray) { unitList.add(d); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NotNullEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NotNullEvaluator.java index 264135e579b7..a7282d19ed6f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NotNullEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NotNullEvaluator.java @@ -42,6 +42,7 @@ public Object doWork(Object ... values) throws IOException { if(values[0] instanceof String) { //Check to see if the this tuple had a null value for that string. + @SuppressWarnings({"rawtypes"}) Map tupleContext = getStreamContext().getTupleContext(); String nullField = (String)tupleContext.get("null"); if(nullField != null && nullField.equals(values[0])) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/OLSRegressionEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/OLSRegressionEvaluator.java index c708a1447726..f35299af95d2 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/OLSRegressionEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/OLSRegressionEvaluator.java @@ -37,6 +37,7 @@ public OLSRegressionEvaluator(StreamExpression expression, StreamFactory factory } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object ... values) throws IOException { Matrix observations = null; @@ -62,6 +63,7 @@ public Object doWork(Object ... values) throws IOException { OLSMultipleLinearRegression multipleLinearRegression = (OLSMultipleLinearRegression)regress(observationData, outcomeData); + @SuppressWarnings({"rawtypes"}) Map map = new HashMap(); map.put("regressandVariance", multipleLinearRegression.estimateRegressandVariance()); @@ -80,7 +82,9 @@ public Object doWork(Object ... values) throws IOException { return new MultipleRegressionTuple(multipleLinearRegression, map); } + @SuppressWarnings({"unchecked"}) private List list(double[] values) { + @SuppressWarnings({"rawtypes"}) List list = new ArrayList(); for(double d : values) { list.add(d); @@ -105,9 +109,10 @@ public MultipleRegressionTuple(MultipleLinearRegression multipleLinearRegression } public double predict(double[] values) { + @SuppressWarnings({"unchecked"}) List weights = (List)get("regressionParameters"); double prediction = 0.0; - List predictors = new ArrayList(); + List predictors = new ArrayList<>(); predictors.add(1.0D); for(double d : values) { predictors.add(d); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/OnesEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/OnesEvaluator.java index bef53fd2151b..97227a9ffc7f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/OnesEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/OnesEvaluator.java @@ -38,7 +38,7 @@ public OnesEvaluator(StreamExpression expression, StreamFactory factory) throws @Override public Object doWork(Object value){ int size = ((Number)value).intValue(); - List ones = new ArrayList(); + List ones = new ArrayList<>(); for(int i=0; i(); for(int i=0; i outliers = new ArrayList(); + List outliers = new ArrayList<>(); if(dist instanceof IntegerDistribution) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PairSortEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PairSortEvaluator.java index 177475587922..5e2b75d77dc1 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PairSortEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PairSortEvaluator.java @@ -34,6 +34,7 @@ public PairSortEvaluator(StreamExpression expression, StreamFactory factory) thr } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object first, Object second) throws IOException{ if(null == first){ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - null found for the first value",toExpression(constructingFactory))); @@ -55,7 +56,7 @@ public Object doWork(Object first, Object second) throws IOException{ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - first list (%d) has a different size than the second list (%d)",toExpression(constructingFactory), l1.size(), l2.size())); } - List pairs = new ArrayList(); + List pairs = new ArrayList<>(); for(int idx = 0; idx < l1.size(); ++idx){ double[] pair = new double[2]; pair[0]= l1.get(idx).doubleValue(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PairedTTestEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PairedTTestEvaluator.java index 56c2dc9104ad..fc865db43bd7 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PairedTTestEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PairedTTestEvaluator.java @@ -17,15 +17,14 @@ package org.apache.solr.client.solrj.io.eval; import java.io.IOException; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import org.apache.commons.math3.stat.inference.TTest; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; +import org.apache.solr.common.params.StreamParams; public class PairedTTestEvaluator extends RecursiveNumericListEvaluator implements TwoValueWorker { protected static final long serialVersionUID = 1L; @@ -42,9 +41,9 @@ public PairedTTestEvaluator(StreamExpression expression, StreamFactory factory) public Object doWork(Object value1, Object value2) throws IOException { TTest tTest = new TTest(); - Map map = new HashMap(); - Tuple tuple = new Tuple(map); + Tuple tuple = new Tuple(); if(value1 instanceof List) { + @SuppressWarnings({"unchecked"}) List values1 = (List)value1; double[] samples1 = new double[values1.size()]; @@ -53,6 +52,7 @@ public Object doWork(Object value1, Object value2) throws IOException { } if(value2 instanceof List) { + @SuppressWarnings({"unchecked"}) List values2 = (List) value2; double[] samples2 = new double[values2.size()]; @@ -63,7 +63,7 @@ public Object doWork(Object value1, Object value2) throws IOException { double tstat = tTest.pairedT(samples1, samples2); double pval = tTest.pairedTTest(samples1, samples2); tuple.put("t-statistic", tstat); - tuple.put("p-value", pval); + tuple.put(StreamParams.P_VALUE, pval); return tuple; } else { throw new IOException("Second parameter for pairedTtest must be a double array"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PercentileEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PercentileEvaluator.java index 63fce524d2b4..506a7289686b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PercentileEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PercentileEvaluator.java @@ -50,8 +50,9 @@ public Object doWork(Object first, Object second) throws IOException{ } else if(second instanceof List){ Percentile percentile = new Percentile(); percentile.setData(((List) first).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray()); + @SuppressWarnings({"unchecked"}) List values = (List) second; - List percentiles = new ArrayList(); + List percentiles = new ArrayList<>(); for(Number value : values) { percentiles.add(percentile.evaluate(value.doubleValue())); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PivotEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PivotEvaluator.java index 766665073b3c..c060d9896896 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PivotEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PivotEvaluator.java @@ -53,6 +53,7 @@ public Object doWork(Object... values) throws IOException { Object value4 = values[3]; if(value1 instanceof List) { + @SuppressWarnings({"unchecked"}) List tuples = (List)value1; String x = (String)value2; x = x.replace("\"", ""); @@ -62,8 +63,8 @@ public Object doWork(Object... values) throws IOException { String vlabel = (String)value4; vlabel = vlabel.replace("\"", ""); - Set xset = new TreeSet(); - Set yset = new TreeSet(); + Set xset = new TreeSet<>(); + Set yset = new TreeSet<>(); for(int i=0; i xlabels = new ArrayList(xset.size()); - Map xindexes = new HashMap(); + List xlabels = new ArrayList<>(xset.size()); + Map xindexes = new HashMap<>(); int xindex = 0; for (String xlabel :xset) { xlabels.add(xlabel); @@ -82,8 +83,8 @@ public Object doWork(Object... values) throws IOException { ++xindex; } - List ylabels = new ArrayList(yset.size()); - Map yindexes = new HashMap(); + List ylabels = new ArrayList<>(yset.size()); + Map yindexes = new HashMap<>(); int yindex = 0; for (String ylabel : yset) { ylabels.add(ylabel); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PolyFitDerivativeEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PolyFitDerivativeEvaluator.java index 6e1a5dca4afc..ec02316c7bd7 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PolyFitDerivativeEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PolyFitDerivativeEvaluator.java @@ -36,6 +36,7 @@ public PolyFitDerivativeEvaluator(StreamExpression expression, StreamFactory fac } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object... objects) throws IOException{ if(objects.length > 3) { @@ -92,6 +93,7 @@ public Object doWork(Object... objects) throws IOException{ PolynomialFunction pf = new PolynomialFunction(coef); UnivariateFunction univariateFunction = pf.derivative(); + @SuppressWarnings({"rawtypes"}) List list = new ArrayList(); for(double xvalue : x) { double yvalue= univariateFunction.value(xvalue); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PolyFitEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PolyFitEvaluator.java index 5292c9404698..2346a7b02658 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PolyFitEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PolyFitEvaluator.java @@ -35,6 +35,7 @@ public PolyFitEvaluator(StreamExpression expression, StreamFactory factory) thro } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Object doWork(Object... objects) throws IOException{ if(objects.length > 3) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PowerEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PowerEvaluator.java index 4de0906fdb07..97c484572a54 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PowerEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PowerEvaluator.java @@ -49,8 +49,9 @@ public Object doWork(Object first, Object second) throws IOException { Number exponent = (Number) second; return Math.pow(value.doubleValue(), exponent.doubleValue()); } else if(second instanceof List) { + @SuppressWarnings({"unchecked"}) List exponents = (List) second; - List pows = new ArrayList(); + List pows = new ArrayList<>(); for(Number exponent : exponents) { pows.add(Math.pow(value.doubleValue(), exponent.doubleValue())); } @@ -59,11 +60,12 @@ public Object doWork(Object first, Object second) throws IOException { throw new IOException("The second parameter to the pow function must either be a scalar or list of scalars"); } } else if(first instanceof List) { + @SuppressWarnings({"unchecked"}) List values = (List) first; if(second instanceof Number) { Number exponent = (Number) second; - List out = new ArrayList(values.size()); + List out = new ArrayList<>(values.size()); for (Number value : values) { out.add(Math.pow(value.doubleValue(), exponent.doubleValue())); } @@ -71,7 +73,8 @@ public Object doWork(Object first, Object second) throws IOException { return out; } else if(second instanceof List) { - List out = new ArrayList(values.size()); + List out = new ArrayList<>(values.size()); + @SuppressWarnings({"unchecked"}) List exponents = (List)second; if(values.size() != exponents.size()) { throw new IOException("The pow function requires vectors of equal size if two vectors are provided."); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PredictEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PredictEvaluator.java index 3d876874bfe3..2be562d59330 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PredictEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PredictEvaluator.java @@ -68,6 +68,7 @@ public Object doWork(Object ... objects) throws IOException { OLSRegressionEvaluator.MultipleRegressionTuple regressedTuple = (OLSRegressionEvaluator.MultipleRegressionTuple) first; if (second instanceof List) { + @SuppressWarnings({"unchecked"}) List list = (List) second; double[] predictors = new double[list.size()]; @@ -80,7 +81,7 @@ public Object doWork(Object ... objects) throws IOException { Matrix m = (Matrix) second; double[][] data = m.getData(); - List predictions = new ArrayList(); + List predictions = new ArrayList<>(); for (double[] predictors : data) { predictions.add(regressedTuple.predict(predictors)); } @@ -97,8 +98,9 @@ public Object doWork(Object ... objects) throws IOException { predictors[0] = ((Number)second).doubleValue(); return regressedTuple.predict(predictors); } else if(second instanceof List) { + @SuppressWarnings({"unchecked"}) List vec = (List)second; - List predictions = new ArrayList(); + List predictions = new ArrayList<>(); for(Number num : vec) { double[] predictors = new double[1]; predictors[0] = num.doubleValue(); @@ -109,6 +111,7 @@ public Object doWork(Object ... objects) throws IOException { } else { //Handle multi-variate regression if (second instanceof List) { + @SuppressWarnings({"unchecked"}) List list = (List) second; double[] predictors = new double[list.size()]; @@ -128,7 +131,7 @@ public Object doWork(Object ... objects) throws IOException { m = regressedTuple.scale(m); } double[][] data = m.getData(); - List predictions = new ArrayList(); + List predictions = new ArrayList<>(); for (double[] predictors : data) { predictions.add(regressedTuple.predict(predictors)); } @@ -162,7 +165,7 @@ public Object doWork(Object ... objects) throws IOException { Matrix m = (Matrix) second; double[][] data = m.getData(); if (data[0].length == 2) { - List out = new ArrayList(); + List out = new ArrayList<>(); for (double[] row : data) { out.add(bivariateFunction.value(row[0], row[1])); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PrimesEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PrimesEvaluator.java index 94a362873753..f0d88a168989 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PrimesEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PrimesEvaluator.java @@ -44,7 +44,7 @@ public Object doWork(Object... values) throws IOException { int sizeNum = ((Number)values[0]).intValue(); int startNum = ((Number)values[1]).intValue(); - List primes = new ArrayList(); + List primes = new ArrayList<>(); for(int i=0; i< sizeNum; i++) { int prime = Primes.nextPrime(startNum); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PutCacheEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PutCacheEvaluator.java index dfd46f90c91b..0fb9379800a7 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PutCacheEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PutCacheEvaluator.java @@ -37,6 +37,7 @@ public PutCacheEvaluator(StreamExpression expression, StreamFactory factory) thr } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Object doWork(Object... values) throws IOException { ConcurrentMap objectCache = this.streamContext.getObjectCache(); if(values.length == 3) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java index 97224dd6018f..04f987ae0a0c 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java @@ -22,8 +22,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; import java.util.List; import java.util.Locale; import java.util.Set; @@ -76,7 +74,7 @@ else if(value instanceof Collection){ //Let's first check to see if we have a List of Strings. //If we do let's try and convert to a list of doubles and see what happens try { - List vector = new ArrayList(); + List vector = new ArrayList<>(); boolean allDoubles = true; for(Object o : (Collection)value) { if(o instanceof String) { @@ -145,12 +143,12 @@ else if(value instanceof List){ //can be contained within a tuple. Tuple tuple = (Tuple)value; - Map map = new HashMap(); - for(Object o : tuple.fields.keySet()) { - Object v = tuple.fields.get(o); - map.put(o, normalizeOutputType(v)); + Tuple newTuple = new Tuple(); + for(Object o : tuple.getFields().keySet()) { + Object v = tuple.get(o); + newTuple.put(o, normalizeOutputType(v)); } - return new Tuple(map); + return newTuple; } else{ // anything else can just be returned as is diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RegressionEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RegressionEvaluator.java index 35627497c824..a06a2c7a8fe7 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RegressionEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RegressionEvaluator.java @@ -49,7 +49,9 @@ public Object doWork(Object first, Object second) throws IOException{ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - found type %s for the second value, expecting a list of numbers",toExpression(constructingFactory), first.getClass().getSimpleName())); } + @SuppressWarnings({"unchecked"}) List l1 = (List)first; + @SuppressWarnings({"unchecked"}) List l2 = (List)second; if(l2.size() < l1.size()){ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RemoveCacheEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RemoveCacheEvaluator.java index 92fa3cd1035f..c3bd47b4c8c4 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RemoveCacheEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RemoveCacheEvaluator.java @@ -37,12 +37,14 @@ public RemoveCacheEvaluator(StreamExpression expression, StreamFactory factory) @Override public Object doWork(Object... values) throws IOException { + @SuppressWarnings({"rawtypes"}) ConcurrentMap objectCache = this.streamContext.getObjectCache(); if(values.length == 2) { String space = (String)values[0]; String key = (String)values[1]; space = space.replace("\"", ""); key = key.replace("\"", ""); + @SuppressWarnings({"rawtypes"}) ConcurrentMap spaceCache = (ConcurrentMap)objectCache.get(space); if(spaceCache != null) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RepeatEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RepeatEvaluator.java index c2f8aecc99c8..9e795191f5fe 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RepeatEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RepeatEvaluator.java @@ -39,7 +39,7 @@ public RepeatEvaluator(StreamExpression expression, StreamFactory factory) throw public Object doWork(Object value1, Object value2){ double d = ((Number)value1).doubleValue(); int size = ((Number)value2).intValue(); - List repeated = new ArrayList(); + List repeated = new ArrayList<>(); for(int i=0; i list = new ArrayList(); + List list = new ArrayList<>(); for(double d : row) { list.add(d); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SampleEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SampleEvaluator.java index 5ea29e6d41e1..fea307179b9e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SampleEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SampleEvaluator.java @@ -83,7 +83,7 @@ public Object doWork(Object ... objects) throws IOException{ } else { MultivariateNormalDistribution multivariateNormalDistribution = (MultivariateNormalDistribution)first; double[] sample = multivariateNormalDistribution.sample(); - List sampleList = new ArrayList(sample.length); + List sampleList = new ArrayList<>(sample.length); for(int i=0; i nums = (List)value2; - List out = new ArrayList(); + List out = new ArrayList<>(); for(Number num : nums) { out.add(operate(num.doubleValue(), d)); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ScaleEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ScaleEvaluator.java index 5377fe20f088..01801baa80a5 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ScaleEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ScaleEvaluator.java @@ -51,7 +51,7 @@ public Object doWork(Object first, Object second) throws IOException{ double[] scaleOver; if(second instanceof Number){ - scaleOver = Arrays.asList((Number)second).stream().mapToDouble(value -> ((Number)value).doubleValue()).toArray(); + scaleOver = Arrays.asList((Number)second).stream().mapToDouble(value -> (value).doubleValue()).toArray(); } else{ scaleOver = ((List)second).stream().mapToDouble(value -> ((Number)value).doubleValue()).toArray(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetColumnLabelsEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetColumnLabelsEvaluator.java index a178c2e9dabf..d4610c0ceab0 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetColumnLabelsEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetColumnLabelsEvaluator.java @@ -41,9 +41,10 @@ public Object doWork(Object value1, Object value2) throws IOException { } else { Matrix matrix = (Matrix)value1; + @SuppressWarnings({"rawtypes"}) List colLabels = (List)value2; //Convert numeric labels to strings. - List strLabels = new ArrayList(colLabels.size()); + List strLabels = new ArrayList<>(colLabels.size()); for(Object o : colLabels) { strLabels.add(o.toString()); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetRowLabelsEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetRowLabelsEvaluator.java index 390299e33384..cc6782f3f007 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetRowLabelsEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetRowLabelsEvaluator.java @@ -40,11 +40,12 @@ public Object doWork(Object value1, Object value2) throws IOException { throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - found type %s for value, expecting an array of labels.",toExpression(constructingFactory), value2.getClass().getSimpleName())); } else { Matrix matrix = (Matrix)value1; + @SuppressWarnings({"rawtypes"}) List rowlabels = (List)value2; //Convert numeric labels to strings. - List strLabels = new ArrayList(rowlabels.size()); + List strLabels = new ArrayList<>(rowlabels.size()); for(Object o : rowlabels) { strLabels.add(o.toString()); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetValueEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetValueEvaluator.java index 8ded259e20f7..c56ecc2d87e2 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetValueEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetValueEvaluator.java @@ -19,8 +19,6 @@ import java.io.IOException; import java.util.Locale; -import java.util.Map; -import java.util.HashMap; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; @@ -47,9 +45,9 @@ public Object doWork(Object... values) throws IOException { value = ((String)value).replace("\"", ""); } key = key.replace("\"", ""); - Map map = new HashMap(tuple.fields); - map.put(key, value); - return new Tuple(map); + Tuple newTuple = tuple.clone(); + newTuple.put(key, value); + return newTuple; } else { throw new IOException("The setValue function expects a Tuple as the first parameter"); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SplineEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SplineEvaluator.java index 433594552a13..eb4261668fc6 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SplineEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SplineEvaluator.java @@ -33,6 +33,7 @@ public SplineEvaluator(StreamExpression expression, StreamFactory factory) throw } @Override + @SuppressWarnings({"unchecked"}) public Object doWork(Object... objects) throws IOException{ Object first = objects[0]; @@ -56,7 +57,7 @@ public Object doWork(Object... objects) throws IOException{ SplineInterpolator interpolator = new SplineInterpolator(); PolynomialSplineFunction spline = interpolator.interpolate(x, y); - List list = new ArrayList(); + List list = new ArrayList<>(); for(double xvalue : x) { list.add(spline.value(xvalue)); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SplitEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SplitEvaluator.java index 5fd7901494d4..4eff612644f9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SplitEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SplitEvaluator.java @@ -43,7 +43,7 @@ public Object doWork(Object value1, Object value2){ String s = value1.toString(); String p = value2.toString(); String[] tokens = s.split(p, -1); - List strings = new ArrayList(tokens.length); + List strings = new ArrayList<>(tokens.length); for(String tok : tokens) { strings.add(tok); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/StandardDeviationEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/StandardDeviationEvaluator.java index 83132d099879..57e396dcd4b3 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/StandardDeviationEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/StandardDeviationEvaluator.java @@ -44,6 +44,7 @@ public Object doWork(Object value) throws IOException{ throw new IOException(String.format(Locale.ROOT, "Unable to find %s(...) because the value is null", constructingFactory.getFunctionName(getClass()))); } else if(value instanceof List){ + @SuppressWarnings({"unchecked"}) List c = (List) value; double[] data = new double[c.size()]; for(int i=0; i< c.size(); i++) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SumColumnsEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SumColumnsEvaluator.java index 510099db8672..e43c8edaba85 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SumColumnsEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SumColumnsEvaluator.java @@ -48,7 +48,7 @@ public Object doWork(Object value) throws IOException{ double[][] data = matrix.getData(); RealMatrix realMatrix = new Array2DRowRealMatrix(data, false); - List sums = new ArrayList(data[0].length); + List sums = new ArrayList<>(data[0].length); for(int i=0; i sums = new ArrayList(data.length); + List sums = new ArrayList<>(data.length); for(int i=0; i)){ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - found type %s for value, expecting a List",toExpression(constructingFactory), value.getClass().getSimpleName())); } + @SuppressWarnings({"unchecked"}) List list = (List)value; if(0 == list.size()){ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TTestEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TTestEvaluator.java index 6273376d7f11..acc5e80d2721 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TTestEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TTestEvaluator.java @@ -17,15 +17,14 @@ package org.apache.solr.client.solrj.io.eval; import java.io.IOException; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import org.apache.commons.math3.stat.inference.TTest; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; +import org.apache.solr.common.params.StreamParams; public class TTestEvaluator extends RecursiveNumericEvaluator implements TwoValueWorker { protected static final long serialVersionUID = 1L; @@ -42,12 +41,12 @@ public TTestEvaluator(StreamExpression expression, StreamFactory factory) throws public Object doWork(Object value1, Object value2) throws IOException { TTest tTest = new TTest(); - Map map = new HashMap(); - Tuple tuple = new Tuple(map); + Tuple tuple = new Tuple(); if(value1 instanceof Number) { double mean = ((Number) value1).doubleValue(); if(value2 instanceof List) { + @SuppressWarnings({"unchecked"}) List values = (List) value2; double[] samples = new double[values.size()]; for (int i = 0; i < samples.length; i++) { @@ -58,12 +57,13 @@ public Object doWork(Object value1, Object value2) throws IOException { double pval = tTest.tTest(mean, samples); tuple.put("t-statistic", tstat); - tuple.put("p-value", pval); + tuple.put(StreamParams.P_VALUE, pval); return tuple; } else { throw new IOException("Second parameter for ttest must be a double array"); } } else if(value1 instanceof List) { + @SuppressWarnings({"unchecked"}) List values1 = (List)value1; double[] samples1 = new double[values1.size()]; @@ -73,6 +73,7 @@ public Object doWork(Object value1, Object value2) throws IOException { } if(value2 instanceof List) { + @SuppressWarnings({"unchecked"}) List values2 = (List) value2; double[] samples2 = new double[values2.size()]; @@ -83,7 +84,7 @@ public Object doWork(Object value1, Object value2) throws IOException { double tstat = tTest.t(samples1, samples2); double pval = tTest.tTest(samples1, samples2); tuple.put("t-statistic", tstat); - tuple.put("p-value", pval); + tuple.put(StreamParams.P_VALUE, pval); return tuple; } else { throw new IOException("Second parameter for ttest must be a double array"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TermVectorsEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TermVectorsEvaluator.java index 5d6dba96deff..6973a8dead94 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TermVectorsEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TermVectorsEvaluator.java @@ -74,6 +74,7 @@ public Object doWork(Object... objects) throws IOException { if(!(objects[0] instanceof List)) { throw new IOException("The termVectors function expects a list of Tuples as a parameter."); } else { + @SuppressWarnings({"rawtypes"}) List list = (List)objects[0]; if(list.size() > 0) { Object o = list.get(0); @@ -85,18 +86,20 @@ public Object doWork(Object... objects) throws IOException { } } + @SuppressWarnings({"unchecked"}) List tuples = (List) objects[0]; - TreeMap docFreqs = new TreeMap(); - List rowLabels = new ArrayList(); + TreeMap docFreqs = new TreeMap<>(); + List rowLabels = new ArrayList<>(); for (Tuple tuple : tuples) { - Set docTerms = new HashSet(); + Set docTerms = new HashSet<>(); if (tuple.get("terms") == null) { throw new IOException("The document tuples must contain a terms field"); } + @SuppressWarnings({"unchecked"}) List terms = (List) tuple.get("terms"); String id = tuple.getString("id"); @@ -147,12 +150,13 @@ public Object doWork(Object... objects) throws IOException { } int totalTerms = docFreqs.size(); Set keys = docFreqs.keySet(); - List features = new ArrayList(keys); + List features = new ArrayList<>(keys); double[][] docVec = new double[tuples.size()][]; for (int t = 0; t < tuples.size(); t++) { Tuple tuple = tuples.get(t); + @SuppressWarnings({"unchecked"}) List terms = (List) tuple.get("terms"); - Map termFreq = new HashMap(); + Map termFreq = new HashMap<>(); for (String term : terms) { if (docFreqs.containsKey(term)) { @@ -170,7 +174,7 @@ public Object doWork(Object... objects) throws IOException { String feature = features.get(i); int df = docFreqs.get(feature); int tf = termFreq.containsKey(feature) ? termFreq.get(feature) : 0; - termVec[i] = Math.sqrt(tf) * (double) (Math.log((tuples.size() + 1) / (double) (df + 1)) + 1.0); + termVec[i] = Math.sqrt(tf) * (Math.log((tuples.size() + 1) / (double) (df + 1)) + 1.0); } docVec[t] = termVec; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TimeDifferencingEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TimeDifferencingEvaluator.java index 6424d0a780ff..a47979c1877c 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TimeDifferencingEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TimeDifferencingEvaluator.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.List; +import java.util.ArrayList; import java.util.Locale; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -26,7 +27,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; -public class TimeDifferencingEvaluator extends RecursiveNumericEvaluator implements ManyValueWorker{ +public class TimeDifferencingEvaluator extends RecursiveObjectEvaluator implements ManyValueWorker{ protected static final long serialVersionUID = 1L; @@ -38,32 +39,86 @@ public TimeDifferencingEvaluator(StreamExpression expression, StreamFactory fact } @Override public Object doWork(Object... values) throws IOException { - if (!(1 == values.length || values.length == 2)){ - throw new IOException(String.format(Locale.ROOT,"%s(...) only works with 1 or 2 values but %d were provided", constructingFactory.getFunctionName(getClass()), values.length)); + if (!(1 == values.length || values.length == 2)) { + throw new IOException(String.format(Locale.ROOT, "%s(...) only works with 1 or 2 values but %d were provided", constructingFactory.getFunctionName(getClass()), values.length)); } - List timeseriesValues = (List )values[0]; - Number lagValue = 1; + if (values[0] instanceof List) { + @SuppressWarnings({"unchecked"}) + List timeseriesValues = (List) values[0]; + Number lagValue = 1; - if(1 == values.length) { - if (!(timeseriesValues instanceof List)) { - throw new IOException(String.format(Locale.ROOT, "Invalid expression %s - found type %s for the first value, expecting a List", toExpression(constructingFactory), values[0].getClass().getSimpleName())); + if (1 == values.length) { + if (!(timeseriesValues instanceof List)) { + throw new IOException(String.format(Locale.ROOT, "Invalid expression %s - found type %s for the first value, expecting a List", toExpression(constructingFactory), values[0].getClass().getSimpleName())); + } + if (!(timeseriesValues.size() > 1)) { + throw new IOException(String.format(Locale.ROOT, "Invalid expression %s - found list size of %s for the first value, expecting a List of size > 0.", toExpression(constructingFactory), timeseriesValues.size())); + } } - if (!(timeseriesValues.size() > 1)) { - throw new IOException(String.format(Locale.ROOT, "Invalid expression %s - found list size of %s for the first value, expecting a List of size > 0.", toExpression(constructingFactory), timeseriesValues.size())); + if (2 == values.length) { + lagValue = (Number) values[1]; + if (!(lagValue instanceof Number)) { + throw new IOException(String.format(Locale.ROOT, "Invalid expression %s - found type %s for the second value, expecting a Number", toExpression(constructingFactory), values[1].getClass().getSimpleName())); + } + if (lagValue.intValue() > timeseriesValues.size()) { + throw new IOException(String.format(Locale.ROOT, "Invalid expression %s - found a lag size of %s for the second value, the first value has a List size of %s, expecting a lag value less than the List size", toExpression(constructingFactory), lagValue.intValue(), timeseriesValues.size())); + } } - } - if(2 == values.length) { - lagValue = (Number) values[1]; - if(!(lagValue instanceof Number)){ - throw new IOException(String.format(Locale.ROOT, "Invalid expression %s - found type %s for the second value, expecting a Number", toExpression(constructingFactory), values[1].getClass().getSimpleName())); + final int lag = lagValue.intValue(); + return IntStream.range(lag, timeseriesValues.size()) + .mapToObj(n -> (timeseriesValues.get(n).doubleValue() - timeseriesValues.get(n - lag).doubleValue())) + .collect(Collectors.toList()); + } else if(values[0] instanceof Matrix) { + + //Diff each row of the matrix + + Matrix matrix = (Matrix)values[0]; + double[][] data = matrix.getData(); + double[][] diffedData = new double[data.length][]; + Number lagValue = 1; + + if (2 == values.length) { + lagValue = (Number) values[1]; + if (!(lagValue instanceof Number)) { + throw new IOException(String.format(Locale.ROOT, "Invalid expression %s - found type %s for the second value, expecting a Number", toExpression(constructingFactory), values[1].getClass().getSimpleName())); + } + } + + int lag = lagValue.intValue(); + + for(int i=0; i timeseriesValues = new ArrayList<>(row.length); + for(double d : row) { + timeseriesValues.add(d); + } + + List diffedList = IntStream.range(lag, timeseriesValues.size()) + .mapToObj(n -> (timeseriesValues.get(n).doubleValue() - timeseriesValues.get(n - lag).doubleValue())) + .collect(Collectors.toList()); + double[] diffedRow = new double[diffedList.size()]; + for(int r=0; r timeseriesValues.size()) { - throw new IOException(String.format(Locale.ROOT, "Invalid expression %s - found a lag size of %s for the second value, the first value has a List size of %s, expecting a lag value less than the List size", toExpression(constructingFactory), lagValue.intValue(), timeseriesValues.size())); + + Matrix diffedMatrix = new Matrix(diffedData); + diffedMatrix.setRowLabels(matrix.getRowLabels()); + List columns = matrix.getColumnLabels(); + if(columns != null) { + List newColumns = new ArrayList<>(columns.size() - lag); + + for (int i = lag; i < columns.size(); i++) { + newColumns.add(columns.get(i)); + } + + diffedMatrix.setColumnLabels(newColumns); } + return diffedMatrix; + + } else { + throw new IOException(String.format(Locale.ROOT, "Invalid expression %s - first parameter must be list of matrix", toExpression(constructingFactory))); } - final int lag = lagValue.intValue(); - return IntStream.range(lag, timeseriesValues.size()) - .mapToObj(n -> (timeseriesValues.get(n).doubleValue()-timeseriesValues.get(n-lag).doubleValue())) - .collect(Collectors.toList()); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java index e2dddfb8ff7f..d8d843f8d0e5 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java @@ -52,11 +52,11 @@ public Object doWork(Object value1, Object value2) throws IOException { } double[][] data = matrix.getData(); - List> topFeatures = new ArrayList(); + List> topFeatures = new ArrayList<>(); for(int i=0; i featuresRow = new ArrayList(); + List featuresRow = new ArrayList<>(); List indexes = getMaxIndexes(row, k); for(int index : indexes) { featuresRow.add(features.get(index)); @@ -71,7 +71,7 @@ public Object doWork(Object value1, Object value2) throws IOException { } private List getMaxIndexes(double[] values, int k) { - TreeSet set = new TreeSet(); + TreeSet set = new TreeSet<>(); for(int i=0; i 0){ set.add(new Pair(i, values[i])); @@ -81,7 +81,7 @@ private List getMaxIndexes(double[] values, int k) { } } - List top = new ArrayList(k); + List top = new ArrayList<>(k); while(set.size() > 0) { top.add(set.pollLast().getIndex()); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java index f6463cd9a470..0deadee4f427 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java @@ -58,6 +58,7 @@ public Object doWork(Object value) throws IOException{ m.setColumnLabels(matrix.getColumnLabels()); return m; } else if(value instanceof List) { + @SuppressWarnings({"unchecked"}) List values = (List)value; double[] doubles = new double[values.size()]; for(int i=0; i unitList = new ArrayList(doubles.length); + List unitList = new ArrayList<>(doubles.length); double[] unitArray = unitVector.toArray(); for(double d : unitArray) { unitList.add(d); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ValueAtEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ValueAtEvaluator.java index 6df3709a040e..733f7873bb9b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ValueAtEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ValueAtEvaluator.java @@ -33,6 +33,7 @@ public ValueAtEvaluator(StreamExpression expression, StreamFactory factory) thro public Object doWork(Object... values) throws IOException { if(values[0] instanceof List) { + @SuppressWarnings({"unchecked"}) List c = (List) values[0]; int index = -1; if(values.length == 2) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/VarianceEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/VarianceEvaluator.java index e7a3f11b7d6f..f5085bc74afc 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/VarianceEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/VarianceEvaluator.java @@ -44,6 +44,7 @@ public Object doWork(Object value) throws IOException{ throw new IOException(String.format(Locale.ROOT, "Unable to find %s(...) because the value is null", constructingFactory.getFunctionName(getClass()))); } else if(value instanceof List){ + @SuppressWarnings({"unchecked"}) List c = (List) value; double[] data = new double[c.size()]; for(int i=0; i< c.size(); i++) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/VectorFunction.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/VectorFunction.java index 8a5a66745a28..f25813b5012b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/VectorFunction.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/VectorFunction.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.Map; +@SuppressWarnings({"rawtypes"}) public class VectorFunction extends ArrayList { protected static final long serialVersionUID = 1L; @@ -28,6 +29,7 @@ public class VectorFunction extends ArrayList { private Object function; private Map context = new HashMap(); + @SuppressWarnings({"unchecked"}) public VectorFunction(Object function, double[] results) { this.function = function; for(double d : results) { @@ -35,6 +37,7 @@ public VectorFunction(Object function, double[] results) { } } + @SuppressWarnings({"unchecked"}) public VectorFunction(Object function, List values) { this.function = function; addAll(values); @@ -44,6 +47,7 @@ public Object getFunction() { return this.function; } + @SuppressWarnings({"unchecked"}) public void addToContext(Object key, Object value) { this.context.put(key, value); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ZerosEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ZerosEvaluator.java index c3791858a5f3..9fc4a1118cc9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ZerosEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ZerosEvaluator.java @@ -38,7 +38,7 @@ public ZerosEvaluator(StreamExpression expression, StreamFactory factory) throws @Override public Object doWork(Object value){ int size = ((Number)value).intValue(); - List ones = new ArrayList(); + List ones = new ArrayList<>(); for(int i=0; i metrics, boolean trackTraversal, Set scatter, @@ -115,7 +115,7 @@ public GatherNodesStream(StreamExpression expression, StreamFactory factory) thr } - Set scatter = new HashSet(); + Set scatter = new HashSet<>(); StreamExpressionNamedParameter scatterExpression = factory.getNamedOperand(expression, "scatter"); @@ -169,7 +169,7 @@ public GatherNodesStream(StreamExpression expression, StreamFactory factory) thr } String[] rootNodes = fields[0].split(","); - List l = new ArrayList(); + List l = new ArrayList<>(); for(String n : rootNodes) { l.add(n.trim()); } @@ -181,7 +181,7 @@ public GatherNodesStream(StreamExpression expression, StreamFactory factory) thr } List metricExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, Metric.class); - List metrics = new ArrayList(); + List metrics = new ArrayList<>(); for(int idx = 0; idx < metricExpressions.size(); ++idx){ metrics.add(factory.constructMetric(metricExpressions.get(idx))); } @@ -245,13 +245,14 @@ public GatherNodesStream(StreamExpression expression, StreamFactory factory) thr docFreq); } + @SuppressWarnings({"unchecked"}) private void init(String zkHost, String collection, TupleStream tupleStream, String traverseFrom, String traverseTo, String gather, - Map queryParams, + @SuppressWarnings({"rawtypes"})Map queryParams, List metrics, boolean trackTraversal, Set scatter, @@ -295,7 +296,7 @@ private StreamExpression toExpression(StreamFactory factory, boolean includeStre Set> entries = queryParams.entrySet(); // parameters - for(Map.Entry param : entries){ + for(@SuppressWarnings({"rawtypes"})Map.Entry param : entries){ String value = param.getValue().toString(); // SOLR-8409: This is a special case where the params contain a " character @@ -400,7 +401,7 @@ public void setStreamContext(StreamContext context) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); l.add(tupleStream); return l; } @@ -409,10 +410,11 @@ public void open() throws IOException { tupleStream.open(); } + @SuppressWarnings({"unchecked"}) private class JoinRunner implements Callable> { private List nodes; - private List edges = new ArrayList(); + private List edges = new ArrayList<>(); public JoinRunner(List nodes) { this.nodes = nodes; @@ -421,7 +423,7 @@ public JoinRunner(List nodes) { public List call() { - Set flSet = new HashSet(); + Set flSet = new HashSet<>(); flSet.add(gather); flSet.add(traverseTo); @@ -508,24 +510,26 @@ public void close() throws IOException { tupleStream.close(); } + @SuppressWarnings({"unchecked"}) public Tuple read() throws IOException { if (out == null) { - List joinBatch = new ArrayList(); - List>> futures = new ArrayList(); - Map level = new HashMap(); + List joinBatch = new ArrayList<>(); + List>> futures = new ArrayList<>(); + Map level = new HashMap<>(); ExecutorService threadPool = null; try { threadPool = ExecutorUtil.newMDCAwareFixedThreadPool(4, new SolrNamedThreadFactory("GatherNodesStream")); - Map roots = new HashMap(); + Map roots = new HashMap<>(); while (true) { Tuple tuple = tupleStream.read(); if (tuple.EOF) { if (joinBatch.size() > 0) { JoinRunner joinRunner = new JoinRunner(joinBatch); + @SuppressWarnings({"rawtypes"}) Future future = threadPool.submit(joinRunner); futures.add(future); } @@ -541,7 +545,7 @@ public Tuple read() throws IOException { if(!roots.containsKey(key)) { Node node = new Node(value, trackTraversal); if (metrics != null) { - List _metrics = new ArrayList(); + List _metrics = new ArrayList<>(); for (Metric metric : metrics) { _metrics.add(metric.newInstance()); } @@ -557,9 +561,10 @@ public Tuple read() throws IOException { joinBatch.add(value); if (joinBatch.size() == 400) { JoinRunner joinRunner = new JoinRunner(joinBatch); + @SuppressWarnings({"rawtypes"}) Future future = threadPool.submit(joinRunner); futures.add(future); - joinBatch = new ArrayList(); + joinBatch = new ArrayList<>(); } } @@ -588,7 +593,7 @@ public Tuple read() throws IOException { } else { node = new Node(_gather, trackTraversal); if (metrics != null) { - List _metrics = new ArrayList(); + List _metrics = new ArrayList<>(); for (Metric metric : metrics) { _metrics.add(metric.newInstance()); } @@ -613,10 +618,7 @@ public Tuple read() throws IOException { if (out.hasNext()) { return out.next(); } else { - Map map = new HashMap(); - map.put("EOF", true); - Tuple tuple = new Tuple(map); - return tuple; + return Tuple.EOF(); } } @@ -641,18 +643,14 @@ public NodeStream(List ids) { public void open() {this.it = ids.iterator();} public void close() {} public StreamComparator getStreamSort() {return null;} - public List children() {return new ArrayList();} + public List children() {return new ArrayList<>();} public void setStreamContext(StreamContext context) {} public Tuple read() { - HashMap map = new HashMap(); if(it.hasNext()) { - map.put("node",it.next()); - return new Tuple(map); + return new Tuple("node",it.next()); } else { - - map.put("EOF", true); - return new Tuple(map); + return Tuple.EOF(); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/Node.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/Node.java index befa5a7721c3..6b551d784d88 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/Node.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/Node.java @@ -30,7 +30,7 @@ public class Node { public Node(String id, boolean track) { this.id=id; if(track) { - ancestors = new HashSet(); + ancestors = new HashSet<>(); } } @@ -51,18 +51,18 @@ public void add(String ancestor, Tuple tuple) { } public Tuple toTuple(String collection, String field, int level, Traversal traversal) { - Map map = new HashMap(); + Tuple tuple = new Tuple(); - map.put("node", id); - map.put("collection", collection); - map.put("field", field); - map.put("level", level); + tuple.put("node", id); + tuple.put("collection", collection); + tuple.put("field", field); + tuple.put("level", level); boolean prependCollection = traversal.isMultiCollection(); List cols = traversal.getCollections(); if(ancestors != null) { - List l = new ArrayList(); + List l = new ArrayList<>(); for(String ancestor : ancestors) { String[] ancestorParts = ancestor.split("\\^"); @@ -76,15 +76,15 @@ public Tuple toTuple(String collection, String field, int level, Traversal trave } } - map.put("ancestors", l); + tuple.put("ancestors", l); } if(metrics != null) { for(Metric metric : metrics) { - map.put(metric.getIdentifier(), metric.getValue()); + tuple.put(metric.getIdentifier(), metric.getValue()); } } - return new Tuple(map); + return tuple; } } \ No newline at end of file diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java index 314ab92d5cf1..6f2b8bd40264 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java @@ -68,7 +68,7 @@ public class ShortestPathStream extends TupleStream implements Expressible { private int maxDepth; private String zkHost; private String collection; - private LinkedList shortestPaths = new LinkedList(); + private LinkedList shortestPaths = new LinkedList<>(); private boolean found; private StreamContext streamContext; private int threads; @@ -282,20 +282,21 @@ public void setStreamContext(StreamContext context) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); return l; } + @SuppressWarnings({"unchecked", "rawtypes"}) public void open() throws IOException { - List>> allVisited = new ArrayList(); + List>> allVisited = new ArrayList<>(); Map visited = new HashMap(); visited.put(this.fromNode, null); allVisited.add(visited); int depth = 0; Map> nextVisited = null; - List targets = new ArrayList(); + List targets = new ArrayList<>(); ExecutorService threadPool = null; try { @@ -309,8 +310,8 @@ public void open() throws IOException { Iterator it = nodes.iterator(); nextVisited = new HashMap(); int batchCount = 0; - List queryNodes = new ArrayList(); - List futures = new ArrayList(); + List queryNodes = new ArrayList<>(); + List futures = new ArrayList<>(); JOIN: //Queue up all the batches while (it.hasNext()) { @@ -343,7 +344,7 @@ public void open() throws IOException { List parents = nextVisited.get(edge.to); parents.add(edge.from); } else { - List parents = new ArrayList(); + List parents = new ArrayList<>(); parents.add(edge.from); nextVisited.put(edge.to, parents); } @@ -353,7 +354,7 @@ public void open() throws IOException { List parents = nextVisited.get(edge.to); parents.add(edge.from); } else { - List parents = new ArrayList(); + List parents = new ArrayList<>(); parents.add(edge.from); nextVisited.put(edge.to, parents); } @@ -376,7 +377,7 @@ public void open() throws IOException { Set finalPaths = new HashSet(); if(targets.size() > 0) { for(Edge edge : targets) { - List paths = new ArrayList(); + List paths = new ArrayList<>(); LinkedList path = new LinkedList(); path.addFirst(edge.to); paths.add(path); @@ -403,8 +404,7 @@ public void open() throws IOException { for(LinkedList p : paths) { String s = p.toString(); if (!finalPaths.contains(s)){ - Tuple shortestPath = new Tuple(new HashMap()); - shortestPath.put("path", p); + Tuple shortestPath = new Tuple("path", p); shortestPaths.add(shortestPath); finalPaths.add(s); } @@ -416,7 +416,7 @@ public void open() throws IOException { private class JoinRunner implements Callable> { private List nodes; - private List edges = new ArrayList(); + private List edges = new ArrayList<>(); public JoinRunner(List nodes) { this.nodes = nodes; @@ -501,12 +501,11 @@ public Tuple read() throws IOException { Tuple t = shortestPaths.removeFirst(); return t; } else { - Map m = new HashMap(); - m.put("EOF", true); + Tuple tuple = Tuple.EOF(); if(!found) { - m.put("sorry", "No path found"); + tuple.put("sorry", "No path found"); } - return new Tuple(m); + return tuple; } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/Traversal.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/Traversal.java index 43d23b33b196..e5afd4d8a626 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/Traversal.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/Traversal.java @@ -22,11 +22,11 @@ public class Traversal { - private List> graph = new ArrayList(); - private List fields = new ArrayList(); - private List collections = new ArrayList(); - private Set scatter = new HashSet(); - private Set collectionSet = new HashSet(); + private List> graph = new ArrayList<>(); + private List fields = new ArrayList<>(); + private List collections = new ArrayList<>(); + private Set scatter = new HashSet<>(); + private Set collectionSet = new HashSet<>(); private boolean trackTraversal; private int depth; @@ -90,6 +90,7 @@ public enum Scatter { LEAVES; } + @SuppressWarnings({"unchecked"}) public Iterator iterator() { return new TraversalIterator(this, scatter); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/TraversalIterator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/TraversalIterator.java index 7cfe3756fb72..e2df8d51b6fe 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/TraversalIterator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/TraversalIterator.java @@ -26,6 +26,7 @@ import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.graph.Traversal.Scatter; +@SuppressWarnings({"rawtypes"}) class TraversalIterator implements Iterator { private List> graph; @@ -49,10 +50,10 @@ public TraversalIterator(Traversal traversal, Set scatter) { collections = traversal.getCollections(); fields = traversal.getFields(); - List outCollections = new ArrayList(); - List outFields = new ArrayList(); - List levelNums = new ArrayList(); - List> levelIterators = new ArrayList(); + List outCollections = new ArrayList<>(); + List outFields = new ArrayList<>(); + List levelNums = new ArrayList<>(); + List> levelIterators = new ArrayList<>(); if(scatter.contains(Scatter.BRANCHES)) { if(graph.size() > 1) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java index 3db76ec2047b..70f24675c337 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java @@ -20,7 +20,6 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.Comparator; -import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; @@ -47,6 +46,7 @@ public class GroupOperation implements ReduceOperation { private UUID operationNodeId = UUID.randomUUID(); private PriorityQueue priorityQueue; + @SuppressWarnings({"rawtypes"}) private Comparator comp; private StreamComparator streamComparator; private int size; @@ -76,6 +76,7 @@ public GroupOperation(StreamComparator streamComparator, int size) { init(streamComparator, size); } + @SuppressWarnings({"unchecked", "rawtypes"}) private void init(StreamComparator streamComparator, int size) { this.size = size; this.streamComparator = streamComparator; @@ -105,18 +106,22 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { }); } + @SuppressWarnings({"unchecked"}) public Tuple reduce() { + @SuppressWarnings({"rawtypes"}) LinkedList ll = new LinkedList(); while(priorityQueue.size() > 0) { - ll.addFirst(priorityQueue.poll().getMap()); + ll.addFirst(priorityQueue.poll().getFields()); //This will clear priority queue and so it will be ready for the next group. } - List list = new ArrayList(ll); + @SuppressWarnings({"rawtypes"}) + List list = new ArrayList<>(ll); + @SuppressWarnings({"rawtypes"}) Map groupHead = list.get(0); - Map map = new HashMap(groupHead); - map.put("group", list); - return new Tuple(map); + Tuple tuple = new Tuple(groupHead); + tuple.put("group", list); + return tuple; } public void operate(Tuple tuple) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetMetaDataImpl.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetMetaDataImpl.java index c32ed43e727d..63ff2262b530 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetMetaDataImpl.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetMetaDataImpl.java @@ -35,6 +35,7 @@ class ResultSetMetaDataImpl implements ResultSetMetaData { this.firstTuple = this.resultSet.getFirstTuple(); } + @SuppressWarnings({"rawtypes"}) private Class getColumnClass(int column) throws SQLException { Object o = this.firstTuple.get(this.getColumnLabel(column)); if(o == null) { @@ -90,6 +91,7 @@ public int getColumnDisplaySize(int column) throws SQLException { @Override public String getColumnLabel(int column) throws SQLException { + @SuppressWarnings({"unchecked"}) Map aliases = (Map) metadataTuple.get("aliases"); return aliases.get(this.getColumnName(column)); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CalculatorStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CalculatorStream.java index 49b5953b2120..d2efbb248dc6 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CalculatorStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CalculatorStream.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import org.apache.solr.client.solrj.io.Tuple; @@ -86,16 +85,11 @@ public void close() throws IOException { public Tuple read() throws IOException { - if(finished) { - HashMap m = new HashMap(); - m.put("EOF", true); - Tuple tuple = new Tuple(m); - return tuple; + if (finished) { + return Tuple.EOF(); } else { - HashMap m = new HashMap(); - Tuple tuple = new Tuple(m); finished = true; - return tuple; + return new Tuple(); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java index 24bf31b4cc83..4e4563e55df8 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java @@ -199,6 +199,7 @@ public Tuple read() throws IOException { return generatedTuples.pop(); } + @SuppressWarnings({"unchecked"}) private LinkedList generateTupleList(Tuple original) throws IOException{ Map evaluatedValues = new HashMap<>(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java index a87c9ee6fa45..6eccac02675b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java @@ -18,10 +18,8 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.StreamComparator; @@ -123,7 +121,7 @@ public void close() throws IOException { public void open() throws IOException { try { stream.open(); - List list = new ArrayList(); + List list = new ArrayList<>(); while(true) { Tuple tuple = stream.read(); if(tuple.EOF) { @@ -134,9 +132,8 @@ public void open() throws IOException { } } - Map map = new HashMap(); - map.put(name, list); - tuple = new Tuple(map); + tuple = new Tuple(); + tuple.put(name, list); } finally { stream.close(); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java index 95cf2399528b..c2a87000bd42 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java @@ -272,15 +272,15 @@ public void setStreamContext(StreamContext context) { * ***/ public void open() throws IOException { - this.tuples = new TreeSet(); - this.solrStreams = new ArrayList(); - this.eofTuples = Collections.synchronizedMap(new HashMap()); + this.tuples = new TreeSet<>(); + this.solrStreams = new ArrayList<>(); + this.eofTuples = Collections.synchronizedMap(new HashMap<>()); constructStreams(); openStreams(); } - public Map getEofTuples() { + public Map getEofTuples() { return this.eofTuples; } @@ -288,9 +288,11 @@ public List children() { return solrStreams; } + @SuppressWarnings({"unchecked"}) private StreamComparator parseComp(String sort, String fl) throws IOException { String[] fls = fl.split(","); + @SuppressWarnings({"rawtypes"}) HashSet fieldSet = new HashSet(); for(String f : fls) { fieldSet.add(f.trim()); //Handle spaces in the field list. @@ -339,7 +341,7 @@ public static Slice[] getSlices(String collectionName, ZkStateReader zkStateRead // check for alias or collection - List allCollections = new ArrayList(); + List allCollections = new ArrayList<>(); String[] collectionNames = collectionName.split(","); for(String col : collectionNames) { List collections = checkAlias @@ -397,7 +399,7 @@ protected void constructStreams() throws IOException { private void openStreams() throws IOException { ExecutorService service = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("CloudSolrStream")); try { - List> futures = new ArrayList(); + List> futures = new ArrayList<>(); for (TupleStream solrStream : solrStreams) { StreamOpener so = new StreamOpener((SolrStream) solrStream, comp); Future future = service.submit(so); @@ -453,17 +455,15 @@ protected Tuple _read() throws IOException { } return t; } else { - Map m = new HashMap(); + Tuple tuple = Tuple.EOF(); if(trace) { - m.put("_COLLECTION_", this.collection); + tuple.put("_COLLECTION_", this.collection); } - - m.put("EOF", true); - - return new Tuple(m); + return tuple; } } + @SuppressWarnings({"overrides"}) protected class TupleWrapper implements Comparable { private Tuple tuple; private SolrStream stream; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java index b29ea09f6a03..5885862d601d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java @@ -123,7 +123,7 @@ public Tuple read() throws IOException { // if the read document contains field 'batchIndexed' then it's a summary // document and we can update our count based on it's value. If not then // just increment by 1 - if(tuple.fields.containsKey(UpdateStream.BATCH_INDEXED_FIELD_NAME) && isInteger(tuple.getString(UpdateStream.BATCH_INDEXED_FIELD_NAME))){ + if(tuple.getFields().containsKey(UpdateStream.BATCH_INDEXED_FIELD_NAME) && isInteger(tuple.getString(UpdateStream.BATCH_INDEXED_FIELD_NAME))){ docsSinceCommit += Integer.parseInt(tuple.getString(UpdateStream.BATCH_INDEXED_FIELD_NAME)); } else{ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CsvStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CsvStream.java index 561204fa3025..386cb5d5aaa2 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CsvStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CsvStream.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -130,7 +129,7 @@ public Tuple read() throws IOException { if(fields.length != headers.length) { throw new IOException("Headers and lines must have the same number of fields [file:"+file+" line number:"+lineNumber+"]"); } - Tuple out = new Tuple(new HashMap()); + Tuple out = new Tuple(); out.put("id", file+"_"+lineNumber); for(int i=0; i 0) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java index 0257be9e3d35..2a2bc4ac96ba 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java @@ -21,7 +21,6 @@ import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Date; -import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; @@ -169,6 +168,7 @@ public void init(TupleStream tupleStream, String id, long runInterval, int queue init(tupleStream, id, runInterval, queueSize, false); } + @SuppressWarnings({"unchecked", "rawtypes"}) public void init(TupleStream tupleStream, String id, long runInterval, int queueSize, boolean terminate) { this.tupleStream = tupleStream; this.id = id; @@ -246,19 +246,19 @@ public void close() { } public List children() { - List children = new ArrayList(); + List children = new ArrayList<>(); children.add(tupleStream); return children; } public synchronized Tuple getInfo() { - Tuple tuple = new Tuple(new HashMap()); + Tuple tuple = new Tuple(); tuple.put(ID, id); tuple.put("startTime", startTime); tuple.put("stopTime", stopTime); tuple.put("iterations", iterations.get()); tuple.put("state", streamRunner.getState().toString()); - if(exception != null) { + if (exception != null) { tuple.put("exception", exception.getMessage()); } @@ -338,7 +338,7 @@ private void stream() { Tuple tuple = tupleStream.read(); if (tuple.EOF) { errors = 0; // Reset errors on successful run. - if (tuple.fields.containsKey("sleepMillis")) { + if (tuple.getFields().containsKey("sleepMillis")) { this.sleepMillis = tuple.getLong("sleepMillis"); if(terminate && sleepMillis > 0) { @@ -400,11 +400,8 @@ private void stream() { } if(!eatTuples) { - Map m = new HashMap(); - m.put("EOF", true); - Tuple tuple = new Tuple(m); try { - queue.put(tuple); + queue.put(Tuple.EOF()); } catch (InterruptedException e) { log.error("Error in DaemonStream:{}", id, e); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java index 9c9c2015cdd4..00b3d3c74f51 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java @@ -257,9 +257,9 @@ public void setStreamContext(StreamContext context) { } public void open() throws IOException { - this.tuples = new LinkedList(); - this.solrStreams = new ArrayList(); - this.eofTuples = Collections.synchronizedMap(new HashMap()); + this.tuples = new LinkedList<>(); + this.solrStreams = new ArrayList<>(); + this.eofTuples = Collections.synchronizedMap(new HashMap<>()); constructStreams(); openStreams(); } @@ -278,7 +278,7 @@ public static Slice[] getSlices(String collectionName, ZkStateReader zkStateRead // check for alias or collection - List allCollections = new ArrayList(); + List allCollections = new ArrayList<>(); String[] collectionNames = collectionName.split(","); for(String col : collectionNames) { List collections = checkAlias @@ -349,7 +349,7 @@ protected void constructStreams() throws IOException { private void openStreams() throws IOException { ExecutorService service = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("DeepRandomStream")); try { - List> futures = new ArrayList(); + List> futures = new ArrayList<>(); for (TupleStream solrStream : solrStreams) { StreamOpener so = new StreamOpener((SolrStream) solrStream, comp); Future future = service.submit(so); @@ -403,17 +403,15 @@ protected Tuple _read() throws IOException { } return t; } else { - Map m = new HashMap(); + Tuple tuple = Tuple.EOF(); if(trace) { - m.put("_COLLECTION_", this.collection); + tuple.put("_COLLECTION_", this.collection); } - - m.put("EOF", true); - - return new Tuple(m); + return tuple; } } + @SuppressWarnings({"overrides"}) protected class TupleWrapper implements Comparable { private Tuple tuple; private SolrStream stream; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EchoStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EchoStream.java index 38e1cca7186c..7749a0f3bb84 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EchoStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EchoStream.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import org.apache.solr.client.solrj.io.Tuple; @@ -96,16 +95,10 @@ public void close() throws IOException { public Tuple read() throws IOException { if(finished) { - HashMap m = new HashMap(); - m.put("EOF", true); - Tuple tuple = new Tuple(m); - return tuple; + return Tuple.EOF(); } else { - HashMap m = new HashMap(); - m.put("echo", echo); - Tuple tuple = new Tuple(m); finished = true; - return tuple; + return new Tuple("echo", echo); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EvalStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EvalStream.java index 957f5b4fdb42..cbab9b0e21ed 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EvalStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EvalStream.java @@ -100,7 +100,7 @@ public void setStreamContext(StreamContext streamContext) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); l.add(stream); return l; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExceptionStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExceptionStream.java index 9d1f45081c31..d6cabf14f623 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExceptionStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExceptionStream.java @@ -18,9 +18,7 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; -import java.util.HashMap; import java.util.List; -import java.util.Map; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.StreamComparator; @@ -60,21 +58,15 @@ public void open() { public Tuple read() { if(openException != null) { //There was an exception during the open. - Map fields = new HashMap(); - fields.put("EXCEPTION", openException.getMessage()); - fields.put("EOF", true); SolrException.log(log, openException); - return new Tuple(fields); + return Tuple.EXCEPTION(openException.getMessage(), true); } try { return stream.read(); } catch (Exception e) { - Map fields = new HashMap(); - fields.put("EXCEPTION", e.getMessage()); - fields.put("EOF", true); SolrException.log(log, e); - return new Tuple(fields); + return Tuple.EXCEPTION(e.getMessage(), true); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java index 10b6873856f7..85be58fad480 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java @@ -132,7 +132,7 @@ public void setStreamContext(StreamContext streamContext) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); l.add(stream); return l; } @@ -153,6 +153,7 @@ public void close() throws IOException { } public Tuple read() throws IOException { + @SuppressWarnings({"unchecked", "rawtypes"}) ArrayBlockingQueue queue = new ArrayBlockingQueue(10000); while(true) { Tuple tuple = stream.read(); @@ -183,10 +184,12 @@ public static class StreamTask implements Runnable { private StreamFactory streamFactory; private StreamContext streamContext; - public StreamTask(ArrayBlockingQueue queue, StreamFactory streamFactory, StreamContext streamContext) { + @SuppressWarnings({"unchecked"}) + public StreamTask(@SuppressWarnings({"rawtypes"})ArrayBlockingQueue queue, StreamFactory streamFactory, StreamContext streamContext) { this.queue = queue; this.streamFactory = streamFactory; this.streamContext = new StreamContext(); + this.streamContext.setObjectCache(streamContext.getObjectCache()); this.streamContext.setSolrClientCache(streamContext.getSolrClientCache()); this.streamContext.setModelCache(streamContext.getModelCache()); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/Facet2DStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/Facet2DStream.java index 2ccb147d4724..cb19b90a7041 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/Facet2DStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/Facet2DStream.java @@ -20,11 +20,9 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.stream.Collectors; @@ -273,7 +271,7 @@ public void setStreamContext(StreamContext context) { } public List children() { - return new ArrayList(); + return new ArrayList<>(); } public void open() throws IOException { @@ -295,6 +293,7 @@ public void open() throws IOException { QueryRequest request = new QueryRequest(paramsLoc, SolrRequest.METHOD.POST); try { + @SuppressWarnings({"rawtypes"}) NamedList response = cloudSolrClient.request(request, collection); getTuples(response, x, y, metric); this.out = tuples.iterator(); @@ -308,11 +307,8 @@ public Tuple read() throws IOException { if (out.hasNext()) { return out.next(); } else { - Map fields = new HashMap(); - fields.put("rows", tuples.size()); - - fields.put("EOF", true); - Tuple tuple = new Tuple(fields); + Tuple tuple = Tuple.EOF(); + tuple.put("rows", tuples.size()); return tuple; } @@ -394,21 +390,26 @@ private String getFacetSort(String id, Metric metric) { return null; } - private void getTuples(NamedList response, Bucket x, Bucket y, Metric metric) { - Tuple tuple = new Tuple(new HashMap()); + private void getTuples(@SuppressWarnings({"rawtypes"})NamedList response, Bucket x, Bucket y, Metric metric) { + Tuple tuple = new Tuple(); + @SuppressWarnings({"rawtypes"}) NamedList facets = (NamedList) response.get("facets"); fillTuples(0, tuples, tuple, facets, x, y, metric); } - private void fillTuples(int level, List tuples, Tuple currentTuple, NamedList facets, Bucket x, Bucket y, Metric metric) { + private void fillTuples(int level, List tuples, Tuple currentTuple, + @SuppressWarnings({"rawtypes"})NamedList facets, Bucket x, Bucket y, Metric metric) { String bucketXName = x.toString(); String bucketYName = y.toString(); + @SuppressWarnings({"rawtypes"}) NamedList allXBuckets = (NamedList) facets.get("x"); for (int b = 0; b < allXBuckets.size(); b++) { + @SuppressWarnings({"rawtypes"}) List buckets = (List) allXBuckets.get("buckets"); for(int s=0; s tuples, Tuple currentTuple, Named Tuple tx = currentTuple.clone(); tx.put(bucketXName, val); + @SuppressWarnings({"rawtypes"}) NamedList allYBuckets = (NamedList) bucket.get("y"); + @SuppressWarnings({"rawtypes"}) List ybuckets = (List)allYBuckets.get("buckets"); for (int d = 0; d < ybuckets.size(); d++) { + @SuppressWarnings({"rawtypes"}) NamedList bucketY = (NamedList) ybuckets.get(d); Object valY = bucketY.get("val"); if (valY instanceof Integer) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java index b2b28095b226..638550f79ded 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java @@ -20,10 +20,8 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.stream.Collectors; @@ -349,18 +347,24 @@ public String getCollection() { private FieldComparator[] parseBucketSorts(String bucketSortString, Bucket[] buckets) throws IOException { - String[] sorts = bucketSortString.split(","); + String[] sorts = parseSorts(bucketSortString); + FieldComparator[] comps = new FieldComparator[sorts.length]; for(int i=0; i sorts = new ArrayList<>(); + boolean inParam = false; + StringBuilder buff = new StringBuilder(); + for(int i=0; i 0) { + sorts.add(buff.toString()); + } + + return sorts.toArray(new String[sorts.size()]); + } + + private void init(String collection, SolrParams params, Bucket[] buckets, FieldComparator[] bucketSorts, Metric[] metrics, int rows, int offset, int bucketSizeLimit, boolean refine, String method, boolean serializeBucketSizeLimit, int overfetch, String zkHost) throws IOException { this.zkHost = zkHost; this.params = new ModifiableSolrParams(params); @@ -501,7 +533,7 @@ public void setStreamContext(StreamContext context) { } public List children() { - return new ArrayList(); + return new ArrayList<>(); } public void open() throws IOException { @@ -524,6 +556,7 @@ public void open() throws IOException { QueryRequest request = new QueryRequest(paramsLoc, SolrRequest.METHOD.POST); try { + @SuppressWarnings({"rawtypes"}) NamedList response = cloudSolrClient.request(request, collection); getTuples(response, buckets, metrics); @@ -568,7 +601,7 @@ private boolean expectedJson(String json) { for(Metric metric: metrics) { String func = metric.getFunctionName(); - if(!func.equals("count")) { + if(!func.equals("count") && !func.equals("per") && !func.equals("std")) { if (!json.contains(metric.getIdentifier())) { return false; } @@ -592,15 +625,11 @@ public Tuple read() throws IOException { ++index; return tuple; } else { - Map fields = new HashMap(); + Tuple tuple = Tuple.EOF(); if(bucketSizeLimit == Integer.MAX_VALUE) { - fields.put("totalRows", tuples.size()); + tuple.put("totalRows", tuples.size()); } - - fields.put("EOF", true); - - Tuple tuple = new Tuple(fields); return tuple; } } @@ -680,18 +709,27 @@ private void appendJson(StringBuilder buf, ++level; + boolean comma = false; for(Metric metric : _metrics) { //Only compute the metric if it's a leaf node or if the branch level sort equals is the metric String facetKey = "facet_"+metricCount; - if(level == _buckets.length || fsort.equals(facetKey) ) { - String identifier = metric.getIdentifier(); - if (!identifier.startsWith("count(")) { - if (metricCount > 0) { - buf.append(","); + String identifier = metric.getIdentifier(); + if (!identifier.startsWith("count(")) { + if (comma) { + buf.append(","); + } + + if(level == _buckets.length || fsort.equals(facetKey) ) { + comma = true; + if (identifier.startsWith("per(")) { + buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("per", "percentile")).append('"'); + } else if (identifier.startsWith("std(")) { + buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("std", "stddev")).append('"'); + } else { + buf.append('"').append(facetKey).append("\":\"").append(identifier).append('"'); } - buf.append('"').append(facetKey).append("\":\"").append(identifier).append('"'); - ++metricCount; } + ++metricCount; } } @@ -724,11 +762,12 @@ private String getFacetSort(String id, Metric[] _metrics) { return "index"; } - private void getTuples(NamedList response, + private void getTuples(@SuppressWarnings({"rawtypes"})NamedList response, Bucket[] buckets, Metric[] metrics) { - Tuple tuple = new Tuple(new HashMap()); + Tuple tuple = new Tuple(); + @SuppressWarnings({"rawtypes"}) NamedList facets = (NamedList)response.get("facets"); fillTuples(0, tuples, @@ -742,17 +781,20 @@ private void getTuples(NamedList response, private void fillTuples(int level, List tuples, Tuple currentTuple, - NamedList facets, + @SuppressWarnings({"rawtypes"}) NamedList facets, Bucket[] _buckets, Metric[] _metrics) { String bucketName = _buckets[level].toString(); + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList)facets.get(bucketName); if(nl == null) { return; } + @SuppressWarnings({"rawtypes"}) List allBuckets = (List)nl.get("buckets"); for(int b=0; b getShardUrls() throws IOException { } } + @SuppressWarnings({"rawtypes"}) private List> callShards(List baseUrls) throws IOException { List> futures = new ArrayList<>(); @@ -336,10 +338,13 @@ public Tuple read() throws IOException { long numDocs = 0; - for (Future getTopTermsCall : callShards(getShardUrls())) { + for (@SuppressWarnings({"rawtypes"})Future getTopTermsCall : callShards(getShardUrls())) { + @SuppressWarnings({"rawtypes"}) NamedList resp = getTopTermsCall.get(); + @SuppressWarnings({"unchecked"}) NamedList shardTopTerms = (NamedList)resp.get("featuredTerms"); + @SuppressWarnings({"unchecked"}) NamedList shardDocFreqs = (NamedList)resp.get("docFreq"); numDocs += (Integer)resp.get("numDocs"); @@ -362,21 +367,19 @@ public Tuple read() throws IOException { for (Map.Entry termScore : termScores.entrySet()) { if (tuples.size() == numTerms) break; index++; - Map map = new HashMap(); - map.put(ID, featureSet + "_" + index); - map.put("index_i", index); - map.put("term_s", termScore.getKey()); - map.put("score_f", termScore.getValue()); - map.put("featureSet_s", featureSet); + Tuple tuple = new Tuple(); + tuple.put(ID, featureSet + "_" + index); + tuple.put("index_i", index); + tuple.put("term_s", termScore.getKey()); + tuple.put("score_f", termScore.getValue()); + tuple.put("featureSet_s", featureSet); long docFreq = docFreqs.get(termScore.getKey()); double d = Math.log(((double)numDocs / (double)(docFreq + 1))); - map.put("idf_d", d); - tuples.add(new Tuple(map)); + tuple.put("idf_d", d); + tuples.add(tuple); } - Map map = new HashMap(); - map.put("EOF", true); - tuples.add(new Tuple(map)); + tuples.add(Tuple.EOF()); tupleIterator = tuples.iterator(); } @@ -399,6 +402,7 @@ private > Map sortByValue( Map m return result; } + @SuppressWarnings({"rawtypes"}) protected class FeaturesSelectionCall implements Callable { private String baseUrl; @@ -417,6 +421,7 @@ public FeaturesSelectionCall(String baseUrl, this.paramsMap = paramsMap; } + @SuppressWarnings({"unchecked"}) public NamedList call() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); HttpSolrClient solrClient = cache.getHttpSolrClient(baseUrl); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java index fbdba168fa9c..2a0e49c6921d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java @@ -199,11 +199,12 @@ public void setStreamContext(StreamContext streamContext) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); l.add(stream); return l; } + @SuppressWarnings({"unchecked", "rawtypes"}) public void open() throws IOException { tuples = new ArrayList().iterator(); stream.open(); @@ -239,6 +240,7 @@ private void fetchBatch() throws IOException { CloudSolrStream cloudSolrStream = new CloudSolrStream(zkHost, collection, params); StreamContext newContext = new StreamContext(); newContext.setSolrClientCache(streamContext.getSolrClientCache()); + newContext.setObjectCache(streamContext.getObjectCache()); cloudSolrStream.setStreamContext(newContext); Map fetched = new HashMap<>(); try { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/GetStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/GetStream.java index 1655bfba0fbc..2088b35fdcb2 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/GetStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/GetStream.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -84,28 +83,27 @@ public void setStreamContext(StreamContext context) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); return l; } public Tuple read() throws IOException { - Map map = new HashMap(); - if(tupleIterator.hasNext()) { + if (tupleIterator.hasNext()) { Tuple t = tupleIterator.next(); - map.putAll(t.fields); - return new Tuple(map); + return t.clone(); } else { - map.put("EOF", true); - return new Tuple(map); + return Tuple.EOF(); } } public void close() throws IOException { } + @SuppressWarnings({"unchecked"}) public void open() throws IOException { Map lets = streamContext.getLets(); Object o = lets.get(name); + @SuppressWarnings({"rawtypes"}) List l = null; if(o instanceof List) { l = (List)o; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/HashRollupStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/HashRollupStream.java index 8bf82c63151c..30bc59e5345b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/HashRollupStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/HashRollupStream.java @@ -190,27 +190,27 @@ public void close() throws IOException { tupleIterator = null; } + @SuppressWarnings({"unchecked"}) public Tuple read() throws IOException { //On the first call to read build the tupleIterator. if(tupleIterator == null) { - Map metricMap = new HashMap(); + Map metricMap = new HashMap<>(); while (true) { Tuple tuple = tupleStream.read(); if (tuple.EOF) { + @SuppressWarnings({"rawtypes"}) List tuples = new ArrayList(); for(Map.Entry entry : metricMap.entrySet()) { - Map map = new HashMap(); + Tuple t = new Tuple(); Metric[] finishedMetrics = entry.getValue(); for (Metric metric : finishedMetrics) { - map.put(metric.getIdentifier(), metric.getValue()); + t.put(metric.getIdentifier(), metric.getValue()); } HashKey hashKey = entry.getKey(); for (int i = 0; i < buckets.length; i++) { - map.put(buckets[i].toString(), hashKey.getParts()[i]); + t.put(buckets[i].toString(), hashKey.getParts()[i]); } - - Tuple t = new Tuple(map); tuples.add(t); } tuples.add(tuple); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java index ea4071501f40..778f60cfc1ad 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java @@ -31,11 +31,9 @@ import java.sql.Types; import java.time.format.DateTimeFormatter; import java.util.ArrayList; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Properties; import org.apache.solr.client.solrj.io.Tuple; @@ -50,6 +48,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter; import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; +import org.apache.solr.common.params.StreamParams; import static org.apache.solr.common.params.CommonParams.SORT; @@ -515,22 +514,20 @@ public void close() throws IOException { public Tuple read() throws IOException { - try{ - Map fields = new HashMap<>(); - if(resultSet.next()){ + try { + Tuple tuple = new Tuple(); + if (resultSet.next()) { // we have a record - for(ResultSetValueSelector selector : valueSelectors){ - fields.put(selector.getColumnName(), selector.selectValue(resultSet)); + for (ResultSetValueSelector selector : valueSelectors) { + tuple.put(selector.getColumnName(), selector.selectValue(resultSet)); } - } - else{ + } else { // we do not have a record - fields.put("EOF", true); + tuple.put(StreamParams.EOF, true); } - return new Tuple(fields); - } - catch(SQLException e){ + return tuple; + } catch (SQLException e) { throw new IOException(String.format(Locale.ROOT, "Failed to read next record with error '%s'", e.getMessage()), e); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JSONTupleStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JSONTupleStream.java index 335b1740b525..b2321e62b2d3 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JSONTupleStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JSONTupleStream.java @@ -73,6 +73,7 @@ public static JSONTupleStream create(SolrClient server, SolrParams requestParams /** returns the next Tuple or null */ @Override + @SuppressWarnings({"unchecked"}) public Map next() throws IOException { if (!atDocs) { boolean found = advanceToDocs(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JavabinTupleStreamParser.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JavabinTupleStreamParser.java index dfe8cc7e1868..f9f1790530f2 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JavabinTupleStreamParser.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JavabinTupleStreamParser.java @@ -96,6 +96,7 @@ private boolean isObjectType(DataInputInputStream dis) throws IOException { return tagByte == SOLRDOCLST; } + @SuppressWarnings({"unchecked", "rawtypes"}) private Map readAsMap(DataInputInputStream dis) throws IOException { int sz = readSize(dis); Map m = new LinkedHashMap<>(); @@ -107,6 +108,7 @@ private Map readAsMap(DataInputInputStream dis) throws IOException { return m; } + @SuppressWarnings({"unchecked", "rawtypes"}) private Map readSolrDocumentAsMap(DataInputInputStream dis) throws IOException { tagByte = dis.readByte(); int size = readSize(dis); @@ -174,6 +176,7 @@ protected Object readObject(DataInputInputStream dis) throws IOException { @Override + @SuppressWarnings({"unchecked"}) public Map next() throws IOException { if (arraySize == 0) return null; Object o = readVal(fis); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java index c03db3820d37..78d247044482 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java @@ -183,7 +183,7 @@ public void setStreamContext(StreamContext context) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); return l; } @@ -225,17 +225,14 @@ public void close() throws IOException { public Tuple read() throws IOException { if(documentIterator.hasNext()) { - Map map = new HashMap(); + Tuple tuple = new Tuple(); SolrDocument doc = documentIterator.next(); for(Entry entry : doc.entrySet()) { - map.put(entry.getKey(), entry.getValue()); + tuple.put(entry.getKey(), entry.getValue()); } - return new Tuple(map); - } else { - Map fields = new HashMap(); - fields.put("EOF", true); - Tuple tuple = new Tuple(fields); return tuple; + } else { + return Tuple.EOF(); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java index 23881c3a446f..79cdcbfe8f5c 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java @@ -47,8 +47,10 @@ public class LetStream extends TupleStream implements Expressible { private static final long serialVersionUID = 1; private TupleStream stream; private StreamContext streamContext; + @SuppressWarnings({"rawtypes"}) private Map letParams = new LinkedHashMap(); + @SuppressWarnings({"unchecked", "rawtypes"}) public LetStream(StreamExpression expression, StreamFactory factory) throws IOException { List streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class); @@ -160,6 +162,7 @@ public void close() throws IOException { stream.close(); } + @SuppressWarnings({"unchecked"}) public void open() throws IOException { Map lets = streamContext.getLets(); Set> entries = letParams.entrySet(); @@ -169,7 +172,7 @@ public void open() throws IOException { String name = entry.getKey(); Object o = entry.getValue(); if(o instanceof TupleStream) { - List tuples = new ArrayList(); + List tuples = new ArrayList<>(); TupleStream tStream = (TupleStream)o; tStream.setStreamContext(streamContext); try { @@ -196,6 +199,7 @@ public void open() throws IOException { evaluator.setStreamContext(streamContext); Object eo = evaluator.evaluate(eTuple); if(evaluator instanceof MemsetEvaluator) { + @SuppressWarnings({"rawtypes"}) Map mem = (Map)eo; lets.putAll(mem); } else { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java index 33f8fd59bbee..3858df5521cd 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import org.apache.solr.client.solrj.io.Tuple; @@ -114,9 +113,7 @@ public Tuple read() throws IOException { streams[streamIndex] = null; currentStream.open(); } else { - HashMap map = new HashMap(); - map.put("EOF", true); - return new Tuple(map); + return Tuple.EOF(); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java index ffaf313185ae..77c50909be3c 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java @@ -162,7 +162,7 @@ public void open() throws IOException { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); return l; } @@ -193,9 +193,7 @@ public Tuple read() throws IOException { tuple = model; model = null; } else { - Map map = new HashMap(); - map.put("EOF", true); - tuple = new Tuple(map); + tuple = Tuple.EOF(); } return tuple; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/NoOpStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/NoOpStream.java index 8d55c313c052..85a0f5544279 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/NoOpStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/NoOpStream.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import org.apache.solr.client.solrj.io.Tuple; @@ -88,10 +87,7 @@ public void close() throws IOException { } public Tuple read() throws IOException { - HashMap m = new HashMap(); - m.put("EOF", true); - Tuple tuple = new Tuple(m); - return tuple; + return Tuple.EOF(); } /** Return the stream sort - ie, the order in which records are returned */ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/NullStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/NullStream.java index 4f0181b23838..067acb561b9a 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/NullStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/NullStream.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Date; @@ -133,7 +132,7 @@ public Tuple read() throws IOException { if(tuple.EOF) { eof = tuple; long end = new Date().getTime(); - Tuple t = new Tuple(new HashMap()); + Tuple t = new Tuple(); t.put("nullCount", count); t.put("timer", end-start); return t; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelListStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelListStream.java index aeadd90321f7..591a4ef4258a 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelListStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelListStream.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; @@ -107,14 +106,12 @@ public List children() { } public Tuple read() throws IOException { - while(true) { + while (true) { if (currentStream == null) { if (streamIndex < streams.length) { currentStream = streams[streamIndex]; } else { - HashMap map = new HashMap(); - map.put("EOF", true); - return new Tuple(map); + return Tuple.EOF(); } } @@ -139,7 +136,7 @@ public void open() throws IOException { private void openStreams() throws IOException { ExecutorService service = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("ParallelListStream")); try { - List> futures = new ArrayList(); + List> futures = new ArrayList<>(); int i=0; for (TupleStream tupleStream : streams) { StreamOpener so = new StreamOpener(new StreamIndex(tupleStream, i++)); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java index d81aa5414f90..0d48b0bae3bc 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java @@ -18,10 +18,8 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.FieldComparator; @@ -208,7 +206,7 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); l.add(tupleStream); return l; } @@ -217,10 +215,6 @@ public Tuple read() throws IOException { Tuple tuple = _read(); if(tuple.EOF) { - Map m = new HashMap(); - m.put("EOF", true); - Tuple t = new Tuple(m); - /* Map metrics = new HashMap(); Iterator> it = this.eofTuples.entrySet().iterator(); @@ -235,7 +229,7 @@ public Tuple read() throws IOException { t.setMetrics(metrics); } */ - return t; + return Tuple.EOF(); } return tuple; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/PlotStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/PlotStream.java index a83349ae5fbc..f281536633b2 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/PlotStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/PlotStream.java @@ -49,8 +49,8 @@ public class PlotStream extends TupleStream implements Expressible { private Map stringParams = new HashMap<>(); private Map evaluatorParams = new HashMap<>(); private Map streamParams = new HashMap<>(); - private List fieldNames = new ArrayList(); - private Map fieldLabels = new HashMap(); + private List fieldNames = new ArrayList<>(); + private Map fieldLabels = new HashMap<>(); private boolean finished; @@ -148,12 +148,11 @@ public List children() { return l; } + @SuppressWarnings({"unchecked"}) public Tuple read() throws IOException { - if(finished) { - Map m = new HashMap<>(); - m.put("EOF", true); - return new Tuple(m); + if (finished) { + return Tuple.EOF(); } else { finished = true; Map values = new HashMap<>(); @@ -179,15 +178,15 @@ public Tuple read() throws IOException { if(x == null) { //x is null so add a sequence - x = new ArrayList(); + x = new ArrayList<>(); for(int i=0; i> xy = new ArrayList(); + List> xy = new ArrayList<>(); for(int i=0; i pair = new ArrayList(); + List pair = new ArrayList<>(); pair.add(x.get(i)); pair.add(y.get(i)); xy.add(pair); @@ -197,8 +196,8 @@ public Tuple read() throws IOException { values.put("data", xy); Tuple tup = new Tuple(values); - tup.fieldLabels = fieldLabels; - tup.fieldNames = fieldNames; + tup.setFieldLabels(fieldLabels); + tup.setFieldNames(fieldNames); return tup; } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/PriorityStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/PriorityStream.java index edb345526165..5a10d659e82b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/PriorityStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/PriorityStream.java @@ -120,7 +120,7 @@ public void setStreamContext(StreamContext streamContext) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); l.add(highPriorityTasks); l.add(tasks); return l; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java index aca0e3d4af81..ba2a79936596 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java @@ -193,7 +193,7 @@ public void setStreamContext(StreamContext context) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); return l; } @@ -234,25 +234,22 @@ public void close() throws IOException { public Tuple read() throws IOException { if(documentIterator.hasNext()) { - Map map = new HashMap(); + Tuple tuple = new Tuple(); SolrDocument doc = documentIterator.next(); // Put the generated x-axis first. If there really is an x field it will overwrite it. if(outputX) { - map.put("x", x++); + tuple.put("x", x++); } for(Entry entry : doc.entrySet()) { - map.put(entry.getKey(), entry.getValue()); + tuple.put(entry.getKey(), entry.getValue()); } - return new Tuple(map); - } else { - Map fields = new HashMap(); - fields.put("EOF", true); - Tuple tuple = new Tuple(fields); return tuple; + } else { + return Tuple.EOF(); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RollupStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RollupStream.java index c1b6894f3f3e..cdd864178e49 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RollupStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RollupStream.java @@ -18,10 +18,8 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.HashKey; @@ -206,15 +204,14 @@ public Tuple read() throws IOException { return tuple; } - Map map = new HashMap(); + Tuple t = new Tuple(); for(Metric metric : currentMetrics) { - map.put(metric.getIdentifier(), metric.getValue()); + t.put(metric.getIdentifier(), metric.getValue()); } for(int i=0; i map = new HashMap(); + t = new Tuple(); for(Metric metric : currentMetrics) { - map.put(metric.getIdentifier(), metric.getValue()); + t.put(metric.getIdentifier(), metric.getValue()); } for(int i=0; i nodes = new HashMap(); + private Map nodes = new HashMap<>(); private Iterator tuples; private String termFreq; private boolean facet; @@ -165,7 +165,7 @@ public void setStreamContext(StreamContext context) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); l.add(stream); return l; } @@ -191,7 +191,7 @@ public void open() throws IOException { node.put("field", bucket); } - if(!node.fields.containsKey("node")) { + if(!node.getFields().containsKey("node")) { throw new IOException("node field not present in the Tuple"); } @@ -222,9 +222,12 @@ public void open() throws IOException { try { //Get the response from the terms component + @SuppressWarnings({"rawtypes"}) NamedList response = client.request(request, collection); + @SuppressWarnings({"unchecked"}) NamedList stats = (NamedList)response.get("indexstats"); long numDocs = stats.get("numDocs").longValue(); + @SuppressWarnings({"unchecked"}) NamedList> fields = (NamedList>)response.get("terms"); int size = fields.size(); @@ -236,7 +239,7 @@ public void open() throws IOException { String term = terms.getName(t); Number docFreq = terms.get(term); Tuple tuple = nodes.get(term); - if(!tuple.fields.containsKey(termFreq)) { + if(!tuple.getFields().containsKey(termFreq)) { throw new Exception("termFreq field not present in the Tuple"); } Number termFreqValue = (Number)tuple.get(termFreq); @@ -265,9 +268,7 @@ public Tuple read() throws IOException { if(tuples.hasNext()) { return tuples.next(); } else { - Map map = new HashMap(); - map.put("EOF", true); - return new Tuple(map); + return Tuple.EOF(); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java index 24368a0a9821..6067783ac98e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java @@ -19,12 +19,10 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Map.Entry; import java.util.Optional; @@ -176,7 +174,7 @@ public void setStreamContext(StreamContext context) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); return l; } @@ -208,17 +206,14 @@ public void close() throws IOException { public Tuple read() throws IOException { if(documentIterator.hasNext()) { - Map map = new HashMap(); + Tuple tuple = new Tuple(); SolrDocument doc = documentIterator.next(); for(Entry entry : doc.entrySet()) { - map.put(entry.getKey(), entry.getValue()); + tuple.put(entry.getKey(), entry.getValue()); } - return new Tuple(map); - } else { - Map fields = new HashMap(); - fields.put("EOF", true); - Tuple tuple = new Tuple(fields); return tuple; + } else { + return Tuple.EOF(); } } @@ -232,6 +227,7 @@ public StreamComparator getStreamSort() { return comp; } + @SuppressWarnings({"unchecked", "rawtypes"}) private StreamComparator parseComp(String sort, String fl) throws IOException { HashSet fieldSet = null; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SelectStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SelectStream.java index c538560da421..62e490aeba1b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SelectStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SelectStream.java @@ -64,16 +64,17 @@ public SelectStream(TupleStream stream, List selectedFields) throws IOEx this.selectedFields.put(selectedField, selectedField); } operations = new ArrayList<>(); - selectedEvaluators = new LinkedHashMap(); + selectedEvaluators = new LinkedHashMap<>(); } public SelectStream(TupleStream stream, Map selectedFields) throws IOException { this.stream = stream; this.selectedFields = selectedFields; operations = new ArrayList<>(); - selectedEvaluators = new LinkedHashMap(); + selectedEvaluators = new LinkedHashMap<>(); } + @SuppressWarnings({"unchecked"}) public SelectStream(StreamExpression expression,StreamFactory factory) throws IOException { // grab all parameters out List streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class); @@ -101,7 +102,7 @@ public SelectStream(StreamExpression expression,StreamFactory factory) throws IO stream = factory.constructStream(streamExpressions.get(0)); selectedFields = new HashMap(); - selectedEvaluators = new LinkedHashMap(); + selectedEvaluators = new LinkedHashMap<>(); for(StreamExpressionParameter parameter : selectAsFieldsExpressions){ StreamExpressionValue selectField = (StreamExpressionValue)parameter; String value = selectField.getValue().trim(); @@ -236,7 +237,7 @@ public void setStreamContext(StreamContext context) { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); l.add(stream); return l; } @@ -257,8 +258,8 @@ public Tuple read() throws IOException { } // create a copy with the limited set of fields - Tuple workingToReturn = new Tuple(new HashMap<>()); - Tuple workingForEvaluators = new Tuple(new HashMap<>()); + Tuple workingToReturn = new Tuple(); + Tuple workingForEvaluators = new Tuple(); //Clear the TupleContext before running the evaluators. //The TupleContext allows evaluators to cache values within the scope of a single tuple. @@ -267,7 +268,7 @@ public Tuple read() throws IOException { streamContext.getTupleContext().clear(); - for(Object fieldName : original.fields.keySet()){ + for(Object fieldName : original.getFields().keySet()){ workingForEvaluators.put(fieldName, original.get(fieldName)); if(selectedFields.containsKey(fieldName)){ workingToReturn.put(selectedFields.get(fieldName), original.get(fieldName)); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java index c3a120f7c740..e050d381f5a9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java @@ -77,7 +77,7 @@ public class SignificantTermsStream extends TupleStream implements Expressible{ public SignificantTermsStream(String zkHost, String collectionName, - Map params, + @SuppressWarnings({"rawtypes"})Map params, String field, float minDocFreq, float maxDocFreq, @@ -202,9 +202,10 @@ public StreamExpressionParameter toExpression(StreamFactory factory) throws IOEx return expression; } + @SuppressWarnings({"unchecked"}) private void init(String collectionName, String zkHost, - Map params, + @SuppressWarnings({"rawtypes"})Map params, String field, float minDocFreq, float maxDocFreq, @@ -240,6 +241,7 @@ public List children() { return null; } + @SuppressWarnings({"rawtypes"}) private List> callShards(List baseUrls) throws IOException { List> futures = new ArrayList<>(); @@ -252,6 +254,7 @@ private List> callShards(List baseUrls) throws IOExcep this.minTermLength, this.numTerms); + @SuppressWarnings({"rawtypes"}) Future future = executorService.submit(lc); futures.add(future); } @@ -281,14 +284,17 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { .withExpression(toExpression(factory).toString()); } + @SuppressWarnings({"unchecked"}) public Tuple read() throws IOException { try { if (tupleIterator == null) { Map mergeFreqs = new HashMap<>(); long numDocs = 0; long resultCount = 0; - for (Future getTopTermsCall : callShards(getShards(zkHost, collection, streamContext))) { + for (@SuppressWarnings({"rawtypes"})Future getTopTermsCall : callShards(getShards(zkHost, collection, streamContext))) { + @SuppressWarnings({"rawtypes"}) NamedList fullResp = getTopTermsCall.get(); + @SuppressWarnings({"rawtypes"}) Map stResp = (Map)fullResp.get("significantTerms"); List terms = (List)stResp.get("sterms"); @@ -313,10 +319,12 @@ public Tuple read() throws IOException { } } - List maps = new ArrayList(); + @SuppressWarnings({"rawtypes"}) + List maps = new ArrayList<>(); for(Map.Entry entry : mergeFreqs.entrySet()) { int[] freqs = entry.getValue(); + @SuppressWarnings({"rawtypes"}) Map map = new HashMap(); map.put("term", entry.getKey()); map.put("background", freqs[0]); @@ -329,15 +337,13 @@ public Tuple read() throws IOException { } Collections.sort(maps, new ScoreComp()); - List tuples = new ArrayList(); - for (Map map : maps) { + List tuples = new ArrayList<>(); + for (@SuppressWarnings({"rawtypes"})Map map : maps) { if (tuples.size() == numTerms) break; tuples.add(new Tuple(map)); } - Map map = new HashMap(); - map.put("EOF", true); - tuples.add(new Tuple(map)); + tuples.add(Tuple.EOF()); tupleIterator = tuples.iterator(); } @@ -347,6 +353,7 @@ public Tuple read() throws IOException { } } + @SuppressWarnings({"rawtypes"}) private static class ScoreComp implements Comparator { public int compare(Map a, Map b) { Float scorea = (Float)a.get("score"); @@ -355,6 +362,7 @@ public int compare(Map a, Map b) { } } + @SuppressWarnings({"unchecked", "rawtypes"}) protected class SignificantTermsCall implements Callable { private String baseUrl; @@ -382,6 +390,7 @@ public SignificantTermsCall(String baseUrl, this.minTermLength = minTermLength; } + @SuppressWarnings({"unchecked", "rawtypes"}) public NamedList call() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); HttpSolrClient solrClient = cache.getHttpSolrClient(baseUrl); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SolrStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SolrStream.java index fad08d2a40bb..107d9e83f8d3 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SolrStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SolrStream.java @@ -21,7 +21,6 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -43,6 +42,7 @@ import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.params.StreamParams; import org.apache.solr.common.util.NamedList; /** @@ -85,7 +85,7 @@ public void setFieldMappings(Map fieldMappings) { } public List children() { - return new ArrayList(); + return new ArrayList<>(); } public String getBaseUrl() { @@ -196,18 +196,18 @@ public void close() throws IOException { * Reads a Tuple from the stream. The Stream is completed when Tuple.EOF == true. **/ + @SuppressWarnings({"unchecked"}) public Tuple read() throws IOException { try { + @SuppressWarnings({"rawtypes"}) Map fields = tupleStreamParser.next(); if (fields == null) { //Return the EOF tuple. - Map m = new HashMap(); - m.put("EOF", true); - return new Tuple(m); + return Tuple.EOF(); } else { - String msg = (String) fields.get("EXCEPTION"); + String msg = (String) fields.get(StreamParams.EXCEPTION); if (msg != null) { HandledException ioException = new HandledException(msg); throw ioException; @@ -252,6 +252,7 @@ public StreamComparator getStreamSort(){ return null; } + @SuppressWarnings({"unchecked", "rawtypes"}) private Map mapFields(Map fields, Map mappings) { Iterator> it = mappings.entrySet().iterator(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java index c497290d5858..ea07ef0f2ace 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java @@ -17,8 +17,9 @@ package org.apache.solr.client.solrj.io.stream; import java.io.IOException; + import java.util.ArrayList; -import java.util.HashMap; +import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Map; @@ -40,67 +41,75 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter; import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; +import org.apache.solr.client.solrj.io.stream.metrics.CountMetric; import org.apache.solr.client.solrj.io.stream.metrics.Metric; import org.apache.solr.client.solrj.request.QueryRequest; -import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; /** -* @since 6.0.0 -*/ + * @since 6.6.0 + */ public class StatsStream extends TupleStream implements Expressible { private static final long serialVersionUID = 1; + + private Metric[] metrics; - private String zkHost; private Tuple tuple; + private int index; + private String zkHost; private SolrParams params; private String collection; - private boolean done; - private boolean doCount; - private Map metricMap; protected transient SolrClientCache cache; protected transient CloudSolrClient cloudSolrClient; - protected StreamContext streamContext; + private StreamContext context; public StatsStream(String zkHost, - String collection, - SolrParams params, - Metric[] metrics) { - init(zkHost, collection, params, metrics); - } - - private void init(String zkHost, String collection, SolrParams params, Metric[] metrics) { - this.zkHost = zkHost; - this.params = params; - this.metrics = metrics; - this.collection = collection; - metricMap = new HashMap(); - for(Metric metric : metrics) { - metricMap.put(metric.getIdentifier(), metric); - } + String collection, + SolrParams params, + Metric[] metrics + ) throws IOException { + init(collection, params, metrics, zkHost); } public StatsStream(StreamExpression expression, StreamFactory factory) throws IOException{ // grab all parameters out String collectionName = factory.getValueOperand(expression, 0); + + if(collectionName.indexOf('"') > -1) { + collectionName = collectionName.replaceAll("\"", "").replaceAll(" ", ""); + } + List namedParams = factory.getNamedOperands(expression); - List metricExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Metric.class); - StreamExpressionNamedParameter zkHostExpression = factory.getNamedOperand(expression, "zkHost"); + StreamExpressionNamedParameter zkHostExpression = factory.getNamedOperand(expression, "zkHost"); + List metricExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, Metric.class); // Collection Name if(null == collectionName){ throw new IOException(String.format(Locale.ROOT,"invalid expression %s - collectionName expected as first operand",expression)); } + // Construct the metrics + Metric[] metrics = null; + if(metricExpressions.size() > 0) { + metrics = new Metric[metricExpressions.size()]; + for(int idx = 0; idx < metricExpressions.size(); ++idx){ + metrics[idx] = factory.constructMetric(metricExpressions.get(idx)); + } + } else { + metrics = new Metric[1]; + metrics[0] = new CountMetric(); + } + + // pull out known named params ModifiableSolrParams params = new ModifiableSolrParams(); for(StreamExpressionNamedParameter namedParam : namedParams){ if(!namedParam.getName().equals("zkHost")){ - params.set(namedParam.getName(), namedParam.getParameter().toString().trim()); + params.add(namedParam.getName(), namedParam.getParameter().toString().trim()); } } @@ -115,51 +124,55 @@ public StatsStream(StreamExpression expression, StreamFactory factory) throws IO if(zkHost == null) { zkHost = factory.getDefaultZkHost(); } - } - else if(zkHostExpression.getParameter() instanceof StreamExpressionValue){ + } else if(zkHostExpression.getParameter() instanceof StreamExpressionValue){ zkHost = ((StreamExpressionValue)zkHostExpression.getParameter()).getValue(); } - /* - if(null == zkHost){ - throw new IOException(String.format(Locale.ROOT,"invalid expression %s - zkHost not found for collection '%s'",expression,collectionName)); - } - */ + // We've got all the required items + init(collectionName, params, metrics, zkHost); + } - // metrics, optional - if not provided then why are you using this? - Metric[] metrics = new Metric[metricExpressions.size()]; - for(int idx = 0; idx < metricExpressions.size(); ++idx){ - metrics[idx] = factory.constructMetric(metricExpressions.get(idx)); - } + public String getCollection() { + return this.collection; + } - // We've got all the required items - init(zkHost, collectionName, params, metrics); + private void init(String collection, + SolrParams params, + Metric[] metrics, + String zkHost) throws IOException { + this.zkHost = zkHost; + this.collection = collection; + this.metrics = metrics; + this.params = params; } @Override public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException { - // functionName(collectionName, param1, param2, ..., paramN, sort="comp", sum(fieldA), avg(fieldB)) - // function name StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass())); - // collection - expression.addParameter(collection); + if(collection.indexOf(',') > -1) { + expression.addParameter("\""+collection+"\""); + } else { + expression.addParameter(collection); + } // parameters - ModifiableSolrParams mParams = new ModifiableSolrParams(params); - for (Entry param : mParams.getMap().entrySet()) { - expression.addParameter(new StreamExpressionNamedParameter(param.getKey(), String.join(",", param.getValue()))); - } + ModifiableSolrParams tmpParams = new ModifiableSolrParams(params); - // zkHost - expression.addParameter(new StreamExpressionNamedParameter("zkHost", zkHost)); + for (Entry param : tmpParams.getMap().entrySet()) { + expression.addParameter(new StreamExpressionNamedParameter(param.getKey(), + String.join(",", param.getValue()))); + } // metrics for(Metric metric : metrics){ expression.addParameter(metric.toExpression(factory)); } + // zkHost + expression.addParameter(new StreamExpressionNamedParameter("zkHost", zkHost)); + return expression; } @@ -173,22 +186,24 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { explanation.setExpressionType(ExpressionType.STREAM_SOURCE); explanation.setExpression(toExpression(factory).toString()); + // child is a datastore so add it at this point StreamExplanation child = new StreamExplanation(getStreamNodeId() + "-datastore"); - child.setFunctionName(String.format(Locale.ROOT, "solr (worker ? of ?)")); - // TODO: fix this so we know the # of workers - check with Joel about a Stat's ability to be in a - // parallel stream. + child.setFunctionName(String.format(Locale.ROOT, "solr (%s)", collection)); + // TODO: fix this so we know the # of workers - check with Joel about a Topic's ability to be in a + // parallel stream. child.setImplementingClass("Solr/Lucene"); child.setExpressionType(ExpressionType.DATASTORE); - ModifiableSolrParams mParams = new ModifiableSolrParams(params); - child.setExpression(mParams.getMap().entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(","))); + + child.setExpression(params.stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), Arrays.toString(e.getValue()))).collect(Collectors.joining(","))); + explanation.addChild(child); return explanation; } public void setStreamContext(StreamContext context) { - streamContext = context; + this.context = context; cache = context.getSolrClientCache(); } @@ -197,21 +212,22 @@ public List children() { } public void open() throws IOException { - ModifiableSolrParams paramsLoc = new ModifiableSolrParams(this.params); - addStats(paramsLoc, metrics); - paramsLoc.set("stats", "true"); + + String json = getJsonFacetString(metrics); + + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(params); + paramsLoc.set("json.facet", json); paramsLoc.set("rows", "0"); - if (streamContext.isLocal()) { - paramsLoc.set("distrib", "false"); - } - Map> shardsMap = (Map>)streamContext.get("shards"); + @SuppressWarnings({"unchecked"}) + Map> shardsMap = (Map>)context.get("shards"); if(shardsMap == null) { QueryRequest request = new QueryRequest(paramsLoc, SolrRequest.METHOD.POST); - CloudSolrClient cloudSolrClient = cache.getCloudSolrClient(zkHost); + cloudSolrClient = cache.getCloudSolrClient(zkHost); try { + @SuppressWarnings({"rawtypes"}) NamedList response = cloudSolrClient.request(request, collection); - this.tuple = getTuple(response); + getTuples(response, metrics); } catch (Exception e) { throw new IOException(e); } @@ -227,8 +243,9 @@ public void open() throws IOException { QueryRequest request = new QueryRequest(paramsLoc, SolrRequest.METHOD.POST); try { + @SuppressWarnings({"rawtypes"}) NamedList response = client.request(request); - this.tuple = getTuple(response); + getTuples(response, metrics); } catch (Exception e) { throw new IOException(e); } @@ -246,117 +263,94 @@ private String getShardString(List shards) { return builder.toString(); } - - public void close() throws IOException { } public Tuple read() throws IOException { - if(!done) { - done = true; + if(index == 0) { + ++index; return tuple; } else { - Map fields = new HashMap<>(); - fields.put("EOF", true); - return new Tuple(fields); + return Tuple.EOF(); } } - public StreamComparator getStreamSort() { - return null; + private String getJsonFacetString(Metric[] _metrics) { + StringBuilder buf = new StringBuilder(); + appendJson(buf, _metrics); + return "{"+buf.toString()+"}"; } - private void addStats(ModifiableSolrParams params, Metric[] _metrics) { - Map> m = new HashMap<>(); + private void appendJson(StringBuilder buf, + Metric[] _metrics) { + + int metricCount = 0; for(Metric metric : _metrics) { - String metricId = metric.getIdentifier(); - if(metricId.contains("(")) { - metricId = metricId.substring(0, metricId.length()-1); - String[] parts = metricId.split("\\("); - String function = parts[0]; - String column = parts[1]; - List stats = m.get(column); - - if(stats == null) { - stats = new ArrayList<>(); - } - - if(!column.equals("*")) { - m.put(column, stats); + String identifier = metric.getIdentifier(); + if(!identifier.startsWith("count(")) { + if(metricCount>0) { + buf.append(","); } - - if(function.equals("min")) { - stats.add("min"); - } else if(function.equals("max")) { - stats.add("max"); - } else if(function.equals("sum")) { - stats.add("sum"); - } else if(function.equals("avg")) { - stats.add("mean"); - } else if(function.equals("count")) { - this.doCount = true; + if(identifier.startsWith("per(")) { + buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("per", "percentile")).append('"'); + } else if(identifier.startsWith("std(")) { + buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("std", "stddev")).append('"'); + } else { + buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier).append('"'); } + ++metricCount; } } - - for(Entry> entry : m.entrySet()) { - StringBuilder buf = new StringBuilder(); - List stats = entry.getValue(); - buf.append("{!"); - - for(String stat : stats) { - buf.append(stat).append("=").append("true "); - } - - buf.append("}").append(entry.getKey()); - params.add("stats.field", buf.toString()); - } } - private Tuple getTuple(NamedList response) { + private void getTuples(@SuppressWarnings({"rawtypes"})NamedList response, + Metric[] metrics) { - Map map = new HashMap<>(); - SolrDocumentList solrDocumentList = (SolrDocumentList) response.get("response"); + this.tuple = new Tuple(); + @SuppressWarnings({"rawtypes"}) + NamedList facets = (NamedList)response.get("facets"); + fillTuple(tuple, facets, metrics); + } - long count = solrDocumentList.getNumFound(); + private void fillTuple(Tuple t, + @SuppressWarnings({"rawtypes"})NamedList nl, + Metric[] _metrics) { - if(doCount) { - map.put("count(*)", count); + if(nl == null) { + return; } - if(count != 0) { - NamedList stats = (NamedList)response.get("stats"); - NamedList statsFields = (NamedList)stats.get("stats_fields"); - - for(int i=0; i map, String field, String stat, Object val) { - if(stat.equals("mean")) { - String name = "avg("+field+")"; - Metric m = metricMap.get(name); - if(m.outputLong) { - Number num = (Number) val; - map.put(name, Math.round(num.doubleValue())); - } else { - map.put(name, val); - } - } else { - map.put(stat+"("+field+")", val); - } + @Override + public StreamComparator getStreamSort() { + return null; } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java index 7f7e5e660a1e..921370a3c499 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java @@ -38,9 +38,12 @@ public class StreamContext implements Serializable { + @SuppressWarnings({"rawtypes"}) private Map entries = new HashMap(); + @SuppressWarnings({"rawtypes"}) private Map tupleContext = new HashMap(); - private Map lets = new HashMap(); + private Map lets = new HashMap<>(); + @SuppressWarnings({"rawtypes"}) private ConcurrentMap objectCache; public int workerID; public int numWorkers; @@ -51,11 +54,12 @@ public class StreamContext implements Serializable { private SolrParams requestParams; private RequestReplicaListTransformerGenerator requestReplicaListTransformerGenerator; + @SuppressWarnings({"rawtypes"}) public ConcurrentMap getObjectCache() { return this.objectCache; } - public void setObjectCache(ConcurrentMap objectCache) { + public void setObjectCache(@SuppressWarnings({"rawtypes"})ConcurrentMap objectCache) { this.objectCache = objectCache; } @@ -67,6 +71,7 @@ public Object get(Object key) { return entries.get(key); } + @SuppressWarnings({"unchecked"}) public void put(Object key, Object value) { this.entries.put(key, value); } @@ -75,6 +80,7 @@ public boolean containsKey(Object key) { return entries.containsKey(key); } + @SuppressWarnings({"rawtypes"}) public Map getEntries() { return this.entries; } @@ -99,6 +105,7 @@ public void setStreamFactory(StreamFactory streamFactory) { this.streamFactory = streamFactory; } + @SuppressWarnings({"rawtypes"}) public Map getTupleContext() { return tupleContext; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java index 819d3ae447fd..6949d809e78f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java @@ -99,7 +99,7 @@ public class TextLogitStream extends TupleStream implements Expressible { public TextLogitStream(String zkHost, String collectionName, - Map params, + @SuppressWarnings({"rawtypes"})Map params, String name, String field, TupleStream termsStream, @@ -283,9 +283,10 @@ private StreamExpression toExpression(StreamFactory factory, boolean includeStre return expression; } + @SuppressWarnings({"unchecked"}) private void init(String collectionName, String zkHost, - Map params, + @SuppressWarnings({"rawtypes"})Map params, String name, String feature, TupleStream termsStream, @@ -332,7 +333,7 @@ public void open() throws IOException { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); l.add(termsStream); return l; } @@ -371,7 +372,7 @@ protected List getShardUrls() throws IOException { private List> callShards(List baseUrls) throws IOException { - List> futures = new ArrayList(); + List> futures = new ArrayList<>(); for (String baseUrl : baseUrls) { LogitCall lc = new LogitCall(baseUrl, this.params, @@ -423,7 +424,7 @@ public void loadTerms() throws IOException { if (this.terms == null) { termsStream.open(); this.terms = new ArrayList<>(); - this.idfs = new ArrayList(); + this.idfs = new ArrayList<>(); while (true) { Tuple termTuple = termsStream.read(); @@ -438,13 +439,12 @@ public void loadTerms() throws IOException { } } + @SuppressWarnings({"unchecked"}) public Tuple read() throws IOException { try { if(++iteration > maxIterations) { - Map map = new HashMap(); - map.put("EOF", true); - return new Tuple(map); + return Tuple.EOF(); } else { if (this.idfs == null) { @@ -455,7 +455,7 @@ public Tuple read() throws IOException { } } - List> allWeights = new ArrayList(); + List> allWeights = new ArrayList<>(); this.evaluation = new ClassificationEvaluation(); this.error = 0; @@ -465,11 +465,13 @@ public Tuple read() throws IOException { List shardWeights = (List) tuple.get("weights"); allWeights.add(shardWeights); this.error += tuple.getDouble("error"); + @SuppressWarnings({"rawtypes"}) Map shardEvaluation = (Map) tuple.get("evaluation"); this.evaluation.addEvaluation(shardEvaluation); } this.weights = averageWeights(allWeights); + @SuppressWarnings({"rawtypes"}) Map map = new HashMap(); map.put(ID, name+"_"+iteration); map.put("name_s", name); @@ -516,7 +518,7 @@ private List averageWeights(List> allWeights) { working[i] = working[i] / allWeights.size(); } - List ave = new ArrayList(); + List ave = new ArrayList<>(); for(double d : working) { ave.add(d); } @@ -524,7 +526,7 @@ private List averageWeights(List> allWeights) { return ave; } - static String toString(List items) { + static String toString(@SuppressWarnings({"rawtypes"})List items) { StringBuilder buf = new StringBuilder(); for(Object item : items) { if(buf.length() > 0) { @@ -560,14 +562,13 @@ public void close() throws IOException {} @Override public Tuple read() throws IOException { - HashMap map = new HashMap(); - if(it.hasNext()) { - map.put("term_s",it.next()); - map.put("score_f",1.0); - return new Tuple(map); + if (it.hasNext()) { + Tuple tuple = new Tuple(); + tuple.put("term_s", it.next()); + tuple.put("score_f", 1.0); + return tuple; } else { - map.put("EOF", true); - return new Tuple(map); + return Tuple.EOF(); } } @@ -643,20 +644,23 @@ public Tuple call() throws Exception { QueryRequest request= new QueryRequest(params, SolrRequest.METHOD.POST); QueryResponse response = request.process(solrClient); + @SuppressWarnings({"rawtypes"}) NamedList res = response.getResponse(); + @SuppressWarnings({"rawtypes"}) NamedList logit = (NamedList)res.get("logit"); + @SuppressWarnings({"unchecked"}) List shardWeights = (List)logit.get("weights"); double shardError = (double)logit.get("error"); - Map map = new HashMap(); + Tuple tuple = new Tuple(); - map.put("error", shardError); - map.put("weights", shardWeights); - map.put("evaluation", logit.get("evaluation")); + tuple.put("error", shardError); + tuple.put("weights", shardWeights); + tuple.put("evaluation", logit.get("evaluation")); - return new Tuple(map); + return tuple; } } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java index bccc4388b6dd..a7cbee727762 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java @@ -22,10 +22,8 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Optional; import java.util.Map.Entry; import java.util.stream.Collectors; @@ -67,7 +65,7 @@ public class TimeSeriesStream extends TupleStream implements Expressible { private DateTimeFormatter formatter; private Metric[] metrics; - private List tuples = new ArrayList(); + private List tuples = new ArrayList<>(); private int index; private String zkHost; private SolrParams params; @@ -289,7 +287,7 @@ public void setStreamContext(StreamContext context) { } public List children() { - return new ArrayList(); + return new ArrayList<>(); } public void open() throws IOException { @@ -309,6 +307,7 @@ public void open() throws IOException { QueryRequest request = new QueryRequest(paramsLoc, SolrRequest.METHOD.POST); try { + @SuppressWarnings({"rawtypes"}) NamedList response = cloudSolrClient.request(request, collection); getTuples(response, field, metrics); } catch (Exception e) { @@ -328,10 +327,7 @@ public Tuple read() throws IOException { ++index; return tuple; } else { - Map fields = new HashMap(); - fields.put("EOF", true); - Tuple tuple = new Tuple(fields); - return tuple; + return Tuple.EOF(); } } @@ -366,35 +362,45 @@ private void appendJson(StringBuilder buf, if(metricCount>0) { buf.append(","); } - buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier).append('"'); + if(identifier.startsWith("per(")) { + buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("per", "percentile")).append('"'); + } else if(identifier.startsWith("std(")) { + buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("std", "stddev")).append('"'); + } else { + buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier).append('"'); + } ++metricCount; } } buf.append("}}"); } - private void getTuples(NamedList response, + private void getTuples(@SuppressWarnings({"rawtypes"})NamedList response, String field, Metric[] metrics) { - Tuple tuple = new Tuple(new HashMap()); + Tuple tuple = new Tuple(); + @SuppressWarnings({"rawtypes"}) NamedList facets = (NamedList)response.get("facets"); fillTuples(tuples, tuple, facets, field, metrics); } private void fillTuples(List tuples, Tuple currentTuple, - NamedList facets, + @SuppressWarnings({"rawtypes"})NamedList facets, String field, Metric[] _metrics) { + @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList)facets.get("timeseries"); if(nl == null) { return; } + @SuppressWarnings({"rawtypes"}) List allBuckets = (List)nl.get("buckets"); for(int b=0; b children() { - List l = new ArrayList(); + List l = new ArrayList<>(); return l; } public void open() throws IOException { - this.tuples = new TreeSet(); - this.solrStreams = new ArrayList(); - this.eofTuples = Collections.synchronizedMap(new HashMap()); + this.tuples = new TreeSet<>(); + this.solrStreams = new ArrayList<>(); + this.eofTuples = Collections.synchronizedMap(new HashMap<>()); if(checkpoints.size() == 0 && streamContext.numWorkers > 1) { //Each worker must maintain its own checkpoints @@ -313,7 +313,7 @@ private void openStreams() throws IOException { ExecutorService service = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("TopicStream")); try { - List> futures = new ArrayList(); + List> futures = new ArrayList<>(); for (TupleStream solrStream : solrStreams) { StreamOpener so = new StreamOpener((SolrStream) solrStream, comp); Future future = service.submit(so); @@ -430,6 +430,7 @@ private long getCheckpoint(Slice slice, Set liveNodes) throws IOExceptio if(streamContext != null) { StreamContext localContext = new StreamContext(); localContext.setSolrClientCache(streamContext.getSolrClientCache()); + localContext.setObjectCache(streamContext.getObjectCache()); solrStream.setStreamContext(localContext); } @@ -488,6 +489,7 @@ private void getPersistedCheckpoints() throws IOException { try { SolrDocument doc = httpClient.getById(id); if(doc != null) { + @SuppressWarnings({"unchecked"}) List checkpoints = (List)doc.getFieldValue("checkpoint_ss"); for (String checkpoint : checkpoints) { String[] pair = checkpoint.split("~"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupStream.java index a7bca77e86cb..769b6966aafa 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupStream.java @@ -50,8 +50,8 @@ public class TupStream extends TupleStream implements Expressible { private Map stringParams = new HashMap<>(); private Map evaluatorParams = new HashMap<>(); private Map streamParams = new HashMap<>(); - private List fieldNames = new ArrayList(); - private Map fieldLabels = new HashMap(); + private List fieldNames = new ArrayList<>(); + private Map fieldLabels = new HashMap<>(); private Tuple tup = null; private Tuple unnestedTuple = null; private Iterator unnestedTuples = null; @@ -152,9 +152,7 @@ public Tuple read() throws IOException { if(unnestedTuples == null) { if (finished) { - Map m = new HashMap<>(); - m.put("EOF", true); - return new Tuple(m); + return Tuple.EOF(); } else { finished = true; if(unnestedTuple != null) { @@ -167,9 +165,7 @@ public Tuple read() throws IOException { if(unnestedTuples.hasNext()) { return unnestedTuples.next(); } else { - Map m = new HashMap<>(); - m.put("EOF", true); - return new Tuple(m); + return Tuple.EOF(); } } } @@ -178,6 +174,7 @@ public void close() throws IOException { // Nothing to do here } + @SuppressWarnings({"unchecked"}) public void open() throws IOException { Map values = new HashMap<>(); @@ -201,7 +198,7 @@ public void open() throws IOException { for(Entry param : streamParams.entrySet()){ try{ - List streamTuples = new ArrayList(); + List streamTuples = new ArrayList<>(); // open the stream, closed in finally block param.getValue().open(); @@ -225,6 +222,7 @@ public void open() throws IOException { if(o instanceof Tuple) { unnestedTuple = (Tuple)o; } else if(o instanceof List) { + @SuppressWarnings({"rawtypes"}) List l = (List)o; if(l.size() > 0 && l.get(0) instanceof Tuple) { List tl = (List)l; @@ -234,8 +232,8 @@ public void open() throws IOException { } } this.tup = new Tuple(values); - tup.fieldNames = fieldNames; - tup.fieldLabels = fieldLabels; + tup.setFieldNames(fieldNames); + tup.setFieldLabels(fieldLabels); // nothing to do here } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java index 12eeac19f28c..90bfb0eb2f5d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java @@ -123,13 +123,14 @@ public static List getShards(String zkHost, return getShards(zkHost, collection, streamContext, new ModifiableSolrParams()); } + @SuppressWarnings({"unchecked"}) public static List getShards(String zkHost, String collection, StreamContext streamContext, SolrParams requestParams) throws IOException { Map> shardsMap = null; - List shards = new ArrayList(); + List shards = new ArrayList<>(); if(streamContext != null) { shardsMap = (Map>)streamContext.get("shards"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java index 5313f1477030..f309f194b622 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java @@ -19,10 +19,8 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Optional; import org.apache.solr.client.solrj.SolrServerException; @@ -68,7 +66,7 @@ public class UpdateStream extends TupleStream implements Expressible { private PushBackStream tupleSource; private transient SolrClientCache cache; private transient CloudSolrClient cloudSolrClient; - private List documentBatch = new ArrayList(); + private List documentBatch = new ArrayList<>(); private String coreName; public UpdateStream(StreamExpression expression, StreamFactory factory) throws IOException { @@ -296,9 +294,10 @@ private void setCloudSolrClient() { } } + @SuppressWarnings({"unchecked"}) private SolrInputDocument convertTupleToSolrDocument(Tuple tuple) { SolrInputDocument doc = new SolrInputDocument(); - for (Object field : tuple.fields.keySet()) { + for (Object field : tuple.getFields().keySet()) { if (! (field.equals(CommonParams.VERSION_FIELD) && pruneVersionField)) { Object value = tuple.get(field); @@ -347,16 +346,16 @@ protected void uploadBatchToCollection(List documentBatch) th private Tuple createBatchSummaryTuple(int batchSize) { assert batchSize > 0; - Map m = new HashMap(); + Tuple tuple = new Tuple(); this.totalDocsIndex += batchSize; ++batchNumber; - m.put(BATCH_INDEXED_FIELD_NAME, batchSize); - m.put("totalIndexed", this.totalDocsIndex); - m.put("batchNumber", batchNumber); - if(coreName != null) { - m.put("worker", coreName); + tuple.put(BATCH_INDEXED_FIELD_NAME, batchSize); + tuple.put("totalIndexed", this.totalDocsIndex); + tuple.put("batchNumber", batchNumber); + if (coreName != null) { + tuple.put("worker", coreName); } - return new Tuple(m); + return tuple; } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java index a85c33efea39..95fb6268a993 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java @@ -53,9 +53,11 @@ public class ZplotStream extends TupleStream implements Expressible { private static final long serialVersionUID = 1; private StreamContext streamContext; + @SuppressWarnings({"rawtypes"}) private Map letParams = new LinkedHashMap(); private Iterator out; + @SuppressWarnings({"unchecked"}) public ZplotStream(StreamExpression expression, StreamFactory factory) throws IOException { List namedParams = factory.getNamedOperands(expression); @@ -111,20 +113,18 @@ public Tuple read() throws IOException { if(out.hasNext()) { return out.next(); } else { - Map m = new HashMap(); - m.put("EOF", true); - Tuple t = new Tuple(m); - return t; + return Tuple.EOF(); } } public void close() throws IOException { } + @SuppressWarnings({"unchecked", "rawtypes"}) public void open() throws IOException { Map lets = streamContext.getLets(); Set> entries = letParams.entrySet(); - Map evaluated = new HashMap(); + Map evaluated = new HashMap<>(); //Load up the StreamContext with the data created by the letParams. int numTuples = -1; @@ -194,11 +194,11 @@ public void open() throws IOException { } //Load the values into tuples - List outTuples = new ArrayList(); + List outTuples = new ArrayList<>(); if(!table && !distribution && !clusters && !heat) { //Handle the vectors for (int i = 0; i < numTuples; i++) { - Tuple tuple = new Tuple(new HashMap()); + Tuple tuple = new Tuple(); for (Map.Entry entry : evaluated.entrySet()) { List l = (List) entry.getValue(); tuple.put(entry.getKey(), l.get(i)); @@ -208,7 +208,7 @@ public void open() throws IOException { } //Generate the x axis if the tuples contain y and not x - if (outTuples.get(0).fields.containsKey("y") && !outTuples.get(0).fields.containsKey("x")) { + if (outTuples.get(0).getFields().containsKey("y") && !outTuples.get(0).getFields().containsKey("x")) { int x = 0; for (Tuple tuple : outTuples) { tuple.put("x", x++); @@ -224,7 +224,7 @@ public void open() throws IOException { clusterNum++; List points = c.getPoints(); for (KmeansEvaluator.ClusterPoint p : points) { - Tuple tuple = new Tuple(new HashMap()); + Tuple tuple = new Tuple(); tuple.put("x", p.getPoint()[0]); tuple.put("y", p.getPoint()[1]); tuple.put("cluster", "cluster" + clusterNum); @@ -239,7 +239,7 @@ public void open() throws IOException { clusterNum++; List points = c.getPoints(); for (DbscanEvaluator.ClusterPoint p : points) { - Tuple tuple = new Tuple(new HashMap()); + Tuple tuple = new Tuple(); tuple.put("x", p.getPoint()[0]); tuple.put("y", p.getPoint()[1]); tuple.put("cluster", "cluster" + clusterNum); @@ -269,7 +269,7 @@ public void open() throws IOException { } for (int i = 0; i < x.length; i++) { - Tuple tuple = new Tuple(new HashMap()); + Tuple tuple = new Tuple(); if(!Double.isNaN(x[i])) { tuple.put("x", Precision.round(x[i], 2)); if(y[i] == Double.NEGATIVE_INFINITY || y[i] == Double.POSITIVE_INFINITY) { @@ -290,7 +290,7 @@ public void open() throws IOException { } Iterator it = frequency.valuesIterator(); - List values = new ArrayList(); + List values = new ArrayList<>(); while(it.hasNext()) { values.add((Long)it.next()); } @@ -302,7 +302,7 @@ public void open() throws IOException { } for (int i = 0; i < x.length; i++) { - Tuple tuple = new Tuple(new HashMap()); + Tuple tuple = new Tuple(); tuple.put("x", x[i]); tuple.put("y", y[i]); outTuples.add(tuple); @@ -312,16 +312,16 @@ public void open() throws IOException { if(list.get(0) instanceof Tuple) { List tlist = (List)o; Tuple tuple = tlist.get(0); - if(tuple.fields.containsKey("N")) { + if(tuple.getFields().containsKey("N")) { for(Tuple t : tlist) { - Tuple outtuple = new Tuple(new HashMap()); + Tuple outtuple = new Tuple(); outtuple.put("x", Precision.round(((double)t.get("mean")), 2)); outtuple.put("y", t.get("prob")); outTuples.add(outtuple); } - } else if(tuple.fields.containsKey("count")) { + } else if(tuple.getFields().containsKey("count")) { for(Tuple t : tlist) { - Tuple outtuple = new Tuple(new HashMap()); + Tuple outtuple = new Tuple(); outtuple.put("x", t.get("value")); outtuple.put("y", t.get("pct")); outTuples.add(outtuple); @@ -344,7 +344,7 @@ public void open() throws IOException { } else { rowLabel = Integer.toString(i); } - Tuple tuple = new Tuple(new HashMap()); + Tuple tuple = new Tuple(); tuple.put("rowLabel", rowLabel); double[] row = data[i]; for (int j = 0; j < row.length; j++) { @@ -378,7 +378,7 @@ public void open() throws IOException { double[] row = data[i]; for (int j = 0; j < row.length; j++) { - Tuple tuple = new Tuple(new HashMap()); + Tuple tuple = new Tuple(); tuple.put("y", rowLabel); String colLabel = null; if (colLabels != null) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/Explanation.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/Explanation.java index acaefbf4f060..b62777fade25 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/Explanation.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/Explanation.java @@ -128,6 +128,7 @@ public void addHelper(Explanation helper){ } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Map toMap(Map map) { if(null != expressionNodeId){ map.put("expressionNodeId",expressionNodeId); } if(null != expressionType){ map.put("expressionType",expressionType); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExplanation.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExplanation.java index bfe6651f9866..d6e61c45f333 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExplanation.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExplanation.java @@ -55,6 +55,7 @@ public void addChild(Explanation child){ children.add(child); } + @SuppressWarnings({"unchecked"}) public Map toMap(Map map){ map = super.toMap(map); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpression.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpression.java index 35d9f52ed4d8..4d80a4dbdb20 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpression.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpression.java @@ -22,6 +22,7 @@ /** * Expression containing a function and set of parameters */ +@SuppressWarnings({"overrides"}) public class StreamExpression implements StreamExpressionParameter { private String functionName; private List parameters; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionNamedParameter.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionNamedParameter.java index 5c9f53c334f3..ccfc243918f9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionNamedParameter.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionNamedParameter.java @@ -20,6 +20,7 @@ /** * Provides a named parameter */ +@SuppressWarnings({"overrides"}) public class StreamExpressionNamedParameter implements StreamExpressionParameter { private String name; private StreamExpressionParameter parameter; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionValue.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionValue.java index 20a3f9591429..433559ba58db 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionValue.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionValue.java @@ -19,6 +19,7 @@ /** * Basic string stream expression */ +@SuppressWarnings({"overrides"}) public class StreamExpressionValue implements StreamExpressionParameter { private String value; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamFactory.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamFactory.java index 4e176dd212c3..f3443e9c60ba 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamFactory.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamFactory.java @@ -45,17 +45,23 @@ */ public class StreamFactory implements Serializable { - private transient HashMap collectionZkHosts; - private transient HashMap>> functionNames; + private transient HashMap collectionZkHosts; + private transient HashMap>> functionNames; private transient String defaultZkHost; private transient String defaultCollection; + private transient String defaultSort; public StreamFactory(){ collectionZkHosts = new HashMap<>(); functionNames = new HashMap<>(); } + + public StreamFactory(HashMap>> functionNames) { + this.functionNames = functionNames; + collectionZkHosts = new HashMap<>(); + } - public StreamFactory withCollectionZkHost(String collectionName, String zkHost){ + public StreamFactory withCollectionZkHost(String collectionName, String zkHost) { this.collectionZkHosts.put(collectionName, zkHost); this.defaultCollection = collectionName; return this; @@ -70,12 +76,27 @@ public StreamFactory withDefaultZkHost(String zkHost) { return this; } + public Object clone() { + //Shallow copy + StreamFactory clone = new StreamFactory(functionNames); + return clone.withCollectionZkHost(defaultCollection, defaultZkHost).withDefaultSort(defaultSort); + } + + public StreamFactory withDefaultSort(String sort) { + this.defaultSort = sort; + return this; + } + + public String getDefaultSort() { + return this.defaultSort; + } + public String getDefaultZkHost() { return this.defaultZkHost; } - public String getCollectionZkHost(String collectionName){ - if(this.collectionZkHosts.containsKey(collectionName)){ + public String getCollectionZkHost(String collectionName) { + if (this.collectionZkHosts.containsKey(collectionName)) { return this.collectionZkHosts.get(collectionName); } return null; @@ -84,285 +105,284 @@ public String getCollectionZkHost(String collectionName){ public Map>> getFunctionNames() { return Collections.unmodifiableMap(functionNames); } - public StreamFactory withFunctionName(String functionName, Class clazz){ + + public StreamFactory withFunctionName(String functionName, Class clazz) { this.functionNames.put(functionName, () -> clazz); return this; } - public StreamFactory withFunctionName(String functionName, Supplier< Class> clazz){ + public StreamFactory withFunctionName(String functionName, Supplier< Class> clazz) { this.functionNames.put(functionName, clazz); return this; } + public StreamFactory withoutFunctionName(String functionName) { + this.functionNames.remove(functionName); + return this; + } - public StreamExpressionParameter getOperand(StreamExpression expression, int parameterIndex){ - if(null == expression.getParameters() || parameterIndex >= expression.getParameters().size()){ + public StreamExpressionParameter getOperand(StreamExpression expression, int parameterIndex) { + if (null == expression.getParameters() || parameterIndex >= expression.getParameters().size()) { return null; } - return expression.getParameters().get(parameterIndex); } - public List getValueOperands(StreamExpression expression){ + public List getValueOperands(StreamExpression expression) { return getOperandsOfType(expression, StreamExpressionValue.class).stream().map(item -> ((StreamExpressionValue) item).getValue()).collect(Collectors.toList()); } /** Given an expression, will return the value parameter at the given index, or null if doesn't exist */ - public String getValueOperand(StreamExpression expression, int parameterIndex){ + public String getValueOperand(StreamExpression expression, int parameterIndex) { StreamExpressionParameter parameter = getOperand(expression, parameterIndex); - if(null != parameter){ - if(parameter instanceof StreamExpressionValue){ + if (null != parameter) { + if (parameter instanceof StreamExpressionValue) { return ((StreamExpressionValue)parameter).getValue(); - } else if(parameter instanceof StreamExpression) { + } else if (parameter instanceof StreamExpression) { return parameter.toString(); } } - return null; } - public List getNamedOperands(StreamExpression expression){ + public List getNamedOperands(StreamExpression expression) { List namedParameters = new ArrayList<>(); - for(StreamExpressionParameter parameter : getOperandsOfType(expression, StreamExpressionNamedParameter.class)){ - namedParameters.add((StreamExpressionNamedParameter)parameter); + for (StreamExpressionParameter parameter : getOperandsOfType(expression, StreamExpressionNamedParameter.class)) { + namedParameters.add((StreamExpressionNamedParameter) parameter); } - return namedParameters; } - public StreamExpressionNamedParameter getNamedOperand(StreamExpression expression, String name){ + + public StreamExpressionNamedParameter getNamedOperand(StreamExpression expression, String name) { List namedParameters = getNamedOperands(expression); - for(StreamExpressionNamedParameter param : namedParameters){ - if(param.getName().equals(name)){ + for (StreamExpressionNamedParameter param : namedParameters) { + if (param.getName().equals(name)) { return param; } } - return null; } - public List getExpressionOperands(StreamExpression expression){ + public List getExpressionOperands(StreamExpression expression) { List namedParameters = new ArrayList<>(); - for(StreamExpressionParameter parameter : getOperandsOfType(expression, StreamExpression.class)){ - namedParameters.add((StreamExpression)parameter); + for (StreamExpressionParameter parameter : getOperandsOfType(expression, StreamExpression.class)) { + namedParameters.add((StreamExpression) parameter); } - return namedParameters; } - public List getExpressionOperands(StreamExpression expression, String functionName){ + + public List getExpressionOperands(StreamExpression expression, String functionName) { List namedParameters = new ArrayList<>(); - for(StreamExpressionParameter parameter : getOperandsOfType(expression, StreamExpression.class)){ - StreamExpression expressionOperand = (StreamExpression)parameter; - if(expressionOperand.getFunctionName().equals(functionName)){ + for (StreamExpressionParameter parameter : getOperandsOfType(expression, StreamExpression.class)) { + StreamExpression expressionOperand = (StreamExpression) parameter; + if (expressionOperand.getFunctionName().equals(functionName)) { namedParameters.add(expressionOperand); } } - return namedParameters; } - public List getOperandsOfType(StreamExpression expression, Class ... clazzes){ + + @SuppressWarnings({"unchecked"}) + public List getOperandsOfType(StreamExpression expression, + @SuppressWarnings({"rawtypes"})Class ... clazzes) { List parameters = new ArrayList<>(); parameterLoop: - for(StreamExpressionParameter parameter : expression.getParameters()){ - for(Class clazz : clazzes){ - if(!clazz.isAssignableFrom(parameter.getClass())){ + for (StreamExpressionParameter parameter : expression.getParameters()) { + for (@SuppressWarnings({"rawtypes"})Class clazz : clazzes) { + if (!clazz.isAssignableFrom(parameter.getClass())) { continue parameterLoop; // go to the next parameter since this parameter cannot be assigned to at least one of the classes } } - parameters.add(parameter); } - return parameters; } - public List getExpressionOperandsRepresentingTypes(StreamExpression expression, Class ... clazzes){ + @SuppressWarnings({"unchecked"}) + public List getExpressionOperandsRepresentingTypes(StreamExpression expression, + @SuppressWarnings({"rawtypes"})Class ... clazzes) { List matchingStreamExpressions = new ArrayList<>(); List allStreamExpressions = getExpressionOperands(expression); parameterLoop: - for(StreamExpression streamExpression : allStreamExpressions) { + for (StreamExpression streamExpression : allStreamExpressions) { Supplier> classSupplier = functionNames.get(streamExpression.getFunctionName()); if (classSupplier != null) { - for (Class clazz : clazzes) { + for (@SuppressWarnings({"rawtypes"})Class clazz : clazzes) { if (!clazz.isAssignableFrom(classSupplier.get())) { continue parameterLoop; } } - matchingStreamExpressions.add(streamExpression); } } - return matchingStreamExpressions; } - public boolean doesRepresentTypes(StreamExpression expression, Class ... clazzes){ + @SuppressWarnings({"unchecked"}) + public boolean doesRepresentTypes(StreamExpression expression, @SuppressWarnings({"rawtypes"})Class ... clazzes) { Supplier> classSupplier = functionNames.get(expression.getFunctionName()); - if(classSupplier != null){ - for(Class clazz : clazzes){ - if(!clazz.isAssignableFrom(classSupplier.get())){ + if (classSupplier != null) { + for (@SuppressWarnings({"rawtypes"})Class clazz : clazzes) { + if (!clazz.isAssignableFrom(classSupplier.get())) { return false; } } return true; } - return false; } - public int getIntOperand(StreamExpression expression, String paramName, Integer defaultValue) throws IOException{ + public int getIntOperand(StreamExpression expression, String paramName, Integer defaultValue) throws IOException { StreamExpressionNamedParameter param = getNamedOperand(expression, paramName); - if(null == param || null == param.getParameter() || !(param.getParameter() instanceof StreamExpressionValue)){ - if(null != defaultValue){ + if (null == param || null == param.getParameter() || !(param.getParameter() instanceof StreamExpressionValue)) { + if (null != defaultValue) { return defaultValue; } throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting a single '%s' parameter of type integer but didn't find one",expression, paramName)); } - String nStr = ((StreamExpressionValue)param.getParameter()).getValue(); - try{ + String nStr = ((StreamExpressionValue) param.getParameter()).getValue(); + try { return Integer.parseInt(nStr); - } - catch(NumberFormatException e){ - if(null != defaultValue){ + } catch (NumberFormatException e) { + if (null != defaultValue) { return defaultValue; } - throw new IOException(String.format(Locale.ROOT,"invalid expression %s - %s '%s' is not a valid integer.",expression, paramName, nStr)); + throw new IOException(String.format(Locale.ROOT,"invalid expression %s - %s '%s' is not a valid integer.", expression, paramName, nStr)); } } - public boolean getBooleanOperand(StreamExpression expression, String paramName, Boolean defaultValue) throws IOException{ + public boolean getBooleanOperand(StreamExpression expression, String paramName, Boolean defaultValue) throws IOException { StreamExpressionNamedParameter param = getNamedOperand(expression, paramName); - if(null == param || null == param.getParameter() || !(param.getParameter() instanceof StreamExpressionValue)){ - if(null != defaultValue){ + if (null == param || null == param.getParameter() || !(param.getParameter() instanceof StreamExpressionValue)) { + if (null != defaultValue) { return defaultValue; } - throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting a single '%s' parameter of type boolean but didn't find one",expression, paramName)); + throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting a single '%s' parameter of type boolean but didn't find one", expression, paramName)); } - String nStr = ((StreamExpressionValue)param.getParameter()).getValue(); + String nStr = ((StreamExpressionValue) param.getParameter()).getValue(); return Boolean.parseBoolean(nStr); } - public TupleStream constructStream(String expressionClause) throws IOException { return constructStream(StreamExpressionParser.parse(expressionClause)); } - public TupleStream constructStream(StreamExpression expression) throws IOException{ + @SuppressWarnings({"rawtypes"}) + public TupleStream constructStream(StreamExpression expression) throws IOException { String function = expression.getFunctionName(); Supplier> classSupplier = functionNames.get(function); - if(classSupplier != null){ + if (classSupplier != null) { Class clazz = classSupplier.get(); - if(Expressible.class.isAssignableFrom(clazz) && TupleStream.class.isAssignableFrom(clazz)){ + if (Expressible.class.isAssignableFrom(clazz) && TupleStream.class.isAssignableFrom(clazz)) { return (TupleStream)createInstance(clazz, new Class[]{ StreamExpression.class, StreamFactory.class }, new Object[]{ expression, this}); } } - throw new IOException(String.format(Locale.ROOT,"Invalid stream expression %s - function '%s' is unknown (not mapped to a valid TupleStream)", expression, expression.getFunctionName())); + throw new IOException(String.format(Locale.ROOT, "Invalid stream expression %s - function '%s' is unknown (not mapped to a valid TupleStream)", expression, expression.getFunctionName())); } public Metric constructMetric(String expressionClause) throws IOException { return constructMetric(StreamExpressionParser.parse(expressionClause)); } - public Metric constructMetric(StreamExpression expression) throws IOException{ + + @SuppressWarnings({"rawtypes"}) + public Metric constructMetric(StreamExpression expression) throws IOException { String function = expression.getFunctionName(); Supplier> classSupplier = functionNames.get(function); - if(classSupplier != null){ + if (classSupplier != null) { Class clazz = classSupplier.get(); - if(Expressible.class.isAssignableFrom(clazz) && Metric.class.isAssignableFrom(clazz)){ + if (Expressible.class.isAssignableFrom(clazz) && Metric.class.isAssignableFrom(clazz)) { return (Metric)createInstance(clazz, new Class[]{ StreamExpression.class, StreamFactory.class }, new Object[]{ expression, this}); } } - throw new IOException(String.format(Locale.ROOT,"Invalid metric expression %s - function '%s' is unknown (not mapped to a valid Metric)", expression, expression.getFunctionName())); + throw new IOException(String.format(Locale.ROOT, "Invalid metric expression %s - function '%s' is unknown (not mapped to a valid Metric)", expression, expression.getFunctionName())); } - public StreamComparator constructComparator(String comparatorString, Class comparatorType) throws IOException { - if(comparatorString.contains(",")){ + @SuppressWarnings({"unchecked", "rawtypes"}) + public StreamComparator constructComparator(String comparatorString, @SuppressWarnings({"rawtypes"})Class comparatorType) throws IOException { + if (comparatorString.contains(",")) { String[] parts = comparatorString.split(","); StreamComparator[] comps = new StreamComparator[parts.length]; - for(int idx = 0; idx < parts.length; ++idx){ + for (int idx = 0; idx < parts.length; ++idx) { comps[idx] = constructComparator(parts[idx].trim(), comparatorType); } return new MultipleFieldComparator(comps); - } - else if(comparatorString.contains("=")){ + } else if (comparatorString.contains("=")) { // expected format is "left=right order" String[] parts = comparatorString.split("[ =]"); - if(parts.length < 3){ - throw new IOException(String.format(Locale.ROOT,"Invalid comparator expression %s - expecting 'left=right order'",comparatorString)); + if (parts.length < 3) { + throw new IOException(String.format(Locale.ROOT, "Invalid comparator expression %s - expecting 'left=right order'",comparatorString)); } String leftFieldName = null; String rightFieldName = null; String order = null; - for(String part : parts){ + for (String part : parts) { // skip empty - if(null == part || 0 == part.trim().length()){ continue; } + if (null == part || 0 == part.trim().length()) { continue; } // assign each in order - if(null == leftFieldName){ + if (null == leftFieldName) { leftFieldName = part.trim(); - } - else if(null == rightFieldName){ + } else if (null == rightFieldName) { rightFieldName = part.trim(); - } - else { + } else { order = part.trim(); break; // we're done, stop looping } } - if(null == leftFieldName || null == rightFieldName || null == order){ - throw new IOException(String.format(Locale.ROOT,"Invalid comparator expression %s - expecting 'left=right order'",comparatorString)); + if (null == leftFieldName || null == rightFieldName || null == order) { + throw new IOException(String.format(Locale.ROOT, "Invalid comparator expression %s - expecting 'left=right order'",comparatorString)); } - return (StreamComparator)createInstance(comparatorType, new Class[]{ String.class, String.class, ComparatorOrder.class }, new Object[]{ leftFieldName, rightFieldName, ComparatorOrder.fromString(order) }); - } - else{ + return (StreamComparator) createInstance(comparatorType, new Class[]{ String.class, String.class, ComparatorOrder.class }, new Object[]{ leftFieldName, rightFieldName, ComparatorOrder.fromString(order) }); + } else { // expected format is "field order" String[] parts = comparatorString.split(" "); - if(2 != parts.length){ - throw new IOException(String.format(Locale.ROOT,"Invalid comparator expression %s - expecting 'field order'",comparatorString)); + if (2 != parts.length) { + throw new IOException(String.format(Locale.ROOT, "Invalid comparator expression %s - expecting 'field order'",comparatorString)); } String fieldName = parts[0].trim(); String order = parts[1].trim(); - return (StreamComparator)createInstance(comparatorType, new Class[]{ String.class, ComparatorOrder.class }, new Object[]{ fieldName, ComparatorOrder.fromString(order) }); + return (StreamComparator) createInstance(comparatorType, new Class[]{ String.class, ComparatorOrder.class }, new Object[]{ fieldName, ComparatorOrder.fromString(order) }); } } + @SuppressWarnings({"unchecked", "rawtypes"}) public StreamEqualitor constructEqualitor(String equalitorString, Class equalitorType) throws IOException { - if(equalitorString.contains(",")){ + if (equalitorString.contains(",")) { String[] parts = equalitorString.split(","); StreamEqualitor[] eqs = new StreamEqualitor[parts.length]; - for(int idx = 0; idx < parts.length; ++idx){ + for (int idx = 0; idx < parts.length; ++idx) { eqs[idx] = constructEqualitor(parts[idx].trim(), equalitorType); } return new MultipleFieldEqualitor(eqs); - } - else{ + } else { String leftFieldName; String rightFieldName; - if(equalitorString.contains("=")){ + if (equalitorString.contains("=")) { String[] parts = equalitorString.split("="); - if(2 != parts.length){ - throw new IOException(String.format(Locale.ROOT,"Invalid equalitor expression %s - expecting fieldName=fieldName",equalitorString)); + if (2 != parts.length) { + throw new IOException(String.format(Locale.ROOT, "Invalid equalitor expression %s - expecting fieldName=fieldName",equalitorString)); } leftFieldName = parts[0].trim(); rightFieldName = parts[1].trim(); - } - else{ + } else { leftFieldName = rightFieldName = equalitorString.trim(); } - return (StreamEqualitor)createInstance(equalitorType, new Class[]{ String.class, String.class }, new Object[]{ leftFieldName, rightFieldName }); + return (StreamEqualitor) createInstance(equalitorType, new Class[]{ String.class, String.class }, new Object[]{ leftFieldName, rightFieldName }); } } @@ -370,6 +390,7 @@ public Metric constructOperation(String expressionClause) throws IOException { return constructMetric(StreamExpressionParser.parse(expressionClause)); } + @SuppressWarnings({"rawtypes"}) public StreamOperation constructOperation(StreamExpression expression) throws IOException { String function = expression.getFunctionName(); Supplier> classSupplier = functionNames.get(function); @@ -386,18 +407,20 @@ public StreamOperation constructOperation(StreamExpression expression) throws IO public org.apache.solr.client.solrj.io.eval.StreamEvaluator constructEvaluator(String expressionClause) throws IOException { return constructEvaluator(StreamExpressionParser.parse(expressionClause)); } - public org.apache.solr.client.solrj.io.eval.StreamEvaluator constructEvaluator(StreamExpression expression) throws IOException{ + + @SuppressWarnings({"rawtypes"}) + public org.apache.solr.client.solrj.io.eval.StreamEvaluator constructEvaluator(StreamExpression expression) throws IOException { String function = expression.getFunctionName(); Supplier> classSupplier = functionNames.get(function); - if(classSupplier != null){ + if (classSupplier != null) { Class clazz = classSupplier.get(); - if(Expressible.class.isAssignableFrom(clazz) && StreamEvaluator.class.isAssignableFrom(clazz)){ + if (Expressible.class.isAssignableFrom(clazz) && StreamEvaluator.class.isAssignableFrom(clazz)) { return (org.apache.solr.client.solrj.io.eval.StreamEvaluator)createInstance(clazz, new Class[]{ StreamExpression.class, StreamFactory.class }, new Object[]{ expression, this}); } } - throw new IOException(String.format(Locale.ROOT,"Invalid evaluator expression %s - function '%s' is unknown (not mapped to a valid StreamEvaluator)", expression, expression.getFunctionName())); + throw new IOException(String.format(Locale.ROOT, "Invalid evaluator expression %s - function '%s' is unknown (not mapped to a valid StreamEvaluator)", expression, expression.getFunctionName())); } public boolean isStream(StreamExpression expression) throws IOException { @@ -452,13 +475,13 @@ public String getFunctionName(Class clazz) throws IOExcep throw new IOException(String.format(Locale.ROOT, "Unable to find function name for class '%s'", clazz.getName())); } - public Object constructPrimitiveObject(String original){ + public Object constructPrimitiveObject(String original) { String lower = original.trim().toLowerCase(Locale.ROOT); - if("null".equals(lower)){ return null; } - if("true".equals(lower) || "false".equals(lower)){ return Boolean.parseBoolean(lower); } - try{ return Long.valueOf(original); } catch(Exception ignored){}; - try{ return Double.valueOf(original); } catch(Exception ignored){}; + if ("null".equals(lower)) { return null; } + if ("true".equals(lower) || "false".equals(lower)){ return Boolean.parseBoolean(lower); } + try { return Long.valueOf(original); } catch(Exception ignored) { }; + try { return Double.valueOf(original); } catch(Exception ignored) { }; // is a string return original; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/CountMetric.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/CountMetric.java index 61b83398e308..093b95e9dc89 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/CountMetric.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/CountMetric.java @@ -26,6 +26,7 @@ public class CountMetric extends Metric { private String columnName; private long count; + private boolean isAllColumns; public CountMetric() { this("*"); @@ -56,12 +57,13 @@ public String[] getColumns() { private void init(String functionName, String columnName){ this.columnName = columnName; + this.isAllColumns = "*".equals(this.columnName); setFunctionName(functionName); setIdentifier(functionName, "(", columnName, ")"); } private boolean isAllColumns() { - return "*".equals(this.columnName); + return isAllColumns; } public void update(Tuple tuple) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/PercentileMetric.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/PercentileMetric.java new file mode 100644 index 000000000000..fe25de4f5d7c --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/PercentileMetric.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.solrj.io.stream.metrics; + +import java.io.IOException; +import java.util.Locale; + +import org.apache.solr.client.solrj.io.Tuple; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter; +import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; + +public class PercentileMetric extends Metric { + private long longMax = -Long.MIN_VALUE; + private double doubleMax = -Double.MAX_VALUE; + private String columnName; + + public PercentileMetric(String columnName, int percentile){ + + init("per", columnName, percentile); + } + + public PercentileMetric(StreamExpression expression, StreamFactory factory) throws IOException{ + // grab all parameters out + String functionName = expression.getFunctionName(); + String columnName = factory.getValueOperand(expression, 0); + int percentile = Integer.parseInt(factory.getValueOperand(expression, 1)); + + // validate expression contains only what we want. + if(null == columnName){ + throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expected %s(columnName)", expression, functionName)); + } + if(2 != expression.getParameters().size()){ + throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - unknown operands found", expression)); + } + + init(functionName, columnName, percentile); + } + + private void init(String functionName, String columnName, int percentile){ + this.columnName = columnName; + setFunctionName(functionName); + setIdentifier(functionName, "(", columnName, ","+percentile, ")"); + } + + public Number getValue() { + if(longMax == Long.MIN_VALUE) { + return doubleMax; + } else { + return longMax; + } + } + + public String[] getColumns() { + return new String[]{columnName}; + } + + public void update(Tuple tuple) { + + } + + public Metric newInstance() { + return new MaxMetric(columnName); + } + + @Override + public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException { + return new StreamExpression(getFunctionName()).withParameter(columnName); + } +} \ No newline at end of file diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/StdMetric.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/StdMetric.java new file mode 100644 index 000000000000..b4c55b98ee0f --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/StdMetric.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.solrj.io.stream.metrics; + +import java.io.IOException; +import java.util.Locale; + +import org.apache.solr.client.solrj.io.Tuple; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter; +import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; + +public class StdMetric extends Metric { + // How'd the MeanMetric get to be so mean? + // Maybe it was born with it. + // Maybe it was mayba-mean. + // + // I'll see myself out. + + private String columnName; + private double doubleSum; + private long longSum; + private long count; + + public StdMetric(String columnName){ + init("std", columnName, false); + } + + public StdMetric(String columnName, boolean outputLong){ + init("std", columnName, outputLong); + } + + public StdMetric(StreamExpression expression, StreamFactory factory) throws IOException{ + // grab all parameters out + String functionName = expression.getFunctionName(); + String columnName = factory.getValueOperand(expression, 0); + String outputLong = factory.getValueOperand(expression, 1); + + + // validate expression contains only what we want. + if(null == columnName){ + throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expected %s(columnName)", expression, functionName)); + } + + boolean ol = false; + if(outputLong != null) { + ol = Boolean.parseBoolean(outputLong); + } + + init(functionName, columnName, ol); + } + + private void init(String functionName, String columnName, boolean outputLong){ + this.columnName = columnName; + this.outputLong = outputLong; + setFunctionName(functionName); + setIdentifier(functionName, "(", columnName, ")"); + } + + public void update(Tuple tuple) { + } + + public Metric newInstance() { + return new MeanMetric(columnName, outputLong); + } + + public String[] getColumns() { + return new String[]{columnName}; + } + + public Number getValue() { + return null; + } + + @Override + public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException { + return new StreamExpression(getFunctionName()).withParameter(columnName).withParameter(Boolean.toString(outputLong)); + } +} \ No newline at end of file diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java index 7cea4cf87e2b..2ec73a73fbab 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java @@ -110,6 +110,7 @@ public CollectionAdminRequest(String path, CollectionAction action) { } @Override + @SuppressWarnings({"rawtypes"}) public SolrRequest getV2Request() { return usev2 ? V1toV2ApiMapper.convert(this).useBinary(useBinaryV2).build() : @@ -294,6 +295,7 @@ public SolrParams getParams() { } } + @SuppressWarnings({"rawtypes"}) protected abstract static class ShardSpecificAdminRequest extends CollectionAdminRequest { protected String collection; @@ -445,7 +447,6 @@ public static class Create extends AsyncCollectionSpecificAdminRequest { protected Properties properties; protected Boolean autoAddReplicas; protected String alias; - protected Integer stateFormat; protected String[] rule , snitch; protected String withCollection; @@ -484,7 +485,6 @@ private Create(String collection, String config, String routerName, Integer numS public Create setPullReplicas(Integer pullReplicas) { this.pullReplicas = pullReplicas; return this;} public Create setReplicationFactor(Integer repl) { this.nrtReplicas = repl; return this; } - public Create setStateFormat(Integer stateFormat) { this.stateFormat = stateFormat; return this; } public Create setRule(String... s){ this.rule = s; return this; } public Create setSnitch(String... s){ this.snitch = s; return this; } @@ -506,8 +506,6 @@ public Create setAlias(String alias) { public Integer getNumTlogReplicas() {return tlogReplicas;} public Integer getNumPullReplicas() {return pullReplicas;} - public Integer getStateFormat() { return stateFormat; } - /** * Provide the name of the shards to be created, separated by commas * @@ -577,9 +575,6 @@ public SolrParams getParams() { if (properties != null) { addProperties(params, properties); } - if (stateFormat != null) { - params.set(DocCollection.STATE_FORMAT, stateFormat); - } if (pullReplicas != null) { params.set(ZkStateReader.PULL_REPLICAS, pullReplicas); } @@ -1535,6 +1530,7 @@ private ForceLeader(String collection, String shard) { public static class RequestStatusResponse extends CollectionAdminResponse { public RequestStatusState getRequestStatus() { + @SuppressWarnings({"rawtypes"}) NamedList innerResponse = (NamedList) getResponse().get("status"); return RequestStatusState.fromKey((String) innerResponse.get("state")); } @@ -2682,6 +2678,7 @@ protected CollectionAdminResponse createResponse(SolrClient client) { /** * Returns a SolrRequest to get a list of collections in the cluster */ + @SuppressWarnings({"unchecked"}) public static java.util.List listCollections(SolrClient client) throws IOException, SolrServerException { CollectionAdminResponse resp = new List().process(client); return (java.util.List) resp.getResponse().get("collections"); @@ -2794,35 +2791,6 @@ public SolrParams getParams() { params.set("property", propertyName); return params; } - - - } - - /** - * Returns a SolrRequest to migrate a collection state format - * - * This is an expert-level request, and should not generally be necessary. - */ - public static MigrateClusterState migrateCollectionFormat(String collection) { - return new MigrateClusterState(collection); - } - - // MIGRATECLUSTERSTATE request - public static class MigrateClusterState extends AsyncCollectionAdminRequest { - - protected String collection; - - private MigrateClusterState(String collection) { - super(CollectionAction.MIGRATESTATEFORMAT); - this.collection = checkNotNull(CoreAdminParams.COLLECTION, collection); - } - - @Override - public SolrParams getParams() { - ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); - params.set(CoreAdminParams.COLLECTION, collection); - return params; - } } /** diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java index 74d0bbc175c7..5df18e02c476 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java @@ -249,14 +249,17 @@ public SolrRequest.METHOD getMethod() { } Meta(EndPoint endPoint, SolrRequest.METHOD method, CollectionAction action, - String commandName, Map paramsToAttrs) { + String commandName, + @SuppressWarnings({"rawtypes"})Map paramsToAttrs) { this(endPoint, method, action, commandName, paramsToAttrs, Collections.emptyMap()); } // lame... the Maps aren't typed simply because callers want to use Utils.makeMap which yields object vals @SuppressWarnings("unchecked") Meta(EndPoint endPoint, SolrRequest.METHOD method, CollectionAction action, - String commandName, Map paramsToAttrs, Map prefixParamsToAttrs) { + String commandName, + @SuppressWarnings({"rawtypes"})Map paramsToAttrs, + @SuppressWarnings({"rawtypes"})Map prefixParamsToAttrs) { this.action = action; this.commandName = commandName; this.endPoint = endPoint; @@ -431,6 +434,7 @@ public String getSpecName() { + @SuppressWarnings({"unchecked", "rawtypes"}) private static Collection getParamNames_(CommandOperation op, CommandMeta command) { Object o = op.getCommandData(); if (o instanceof Map) { @@ -443,6 +447,7 @@ private static Collection getParamNames_(CommandOperation op, CommandMet } } + @SuppressWarnings({"unchecked"}) public static void collectKeyNames(Map map, List result, String prefix) { for (Map.Entry e : map.entrySet()) { if (e.getValue() instanceof Map) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/ConfigSetAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/ConfigSetAdminRequest.java index 3db85890c500..ab06a9f5501f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/ConfigSetAdminRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/ConfigSetAdminRequest.java @@ -40,6 +40,7 @@ public abstract class ConfigSetAdminRequest protected ConfigSetAction action = null; + @SuppressWarnings({"rawtypes"}) protected ConfigSetAdminRequest setAction(ConfigSetAction action) { this.action = action; return this; @@ -139,7 +140,7 @@ public SolrParams getParams() { params.set("baseConfigSet", baseConfigSetName); } if (properties != null) { - for (Map.Entry entry : properties.entrySet()) { + for (@SuppressWarnings({"rawtypes"})Map.Entry entry : properties.entrySet()) { params.set(PROPERTY_PREFIX + "." + entry.getKey().toString(), entry.getValue().toString()); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreApiMapping.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreApiMapping.java index 9d8aa6eadd6d..164b9adb6436 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreApiMapping.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreApiMapping.java @@ -64,8 +64,9 @@ public enum Meta implements CommandMeta { public final CoreAdminAction action; public final Map paramstoAttr; + @SuppressWarnings({"unchecked"}) Meta(EndPoint endPoint, SolrRequest.METHOD method, CoreAdminAction action, String commandName, - Map paramstoAttr) { + @SuppressWarnings({"rawtypes"})Map paramstoAttr) { this.commandName = commandName; this.endPoint = endPoint; this.method = method; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java index 7d4569cb158d..3ff6a49c1ad1 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java @@ -71,8 +71,11 @@ public JavaBinUpdateRequestCodec setReadStringAsCharSeq(boolean flag) { * * @throws IOException in case of an exception during marshalling or writing to the stream */ + @SuppressWarnings({"unchecked"}) public void marshal(UpdateRequest updateRequest, OutputStream os) throws IOException { + @SuppressWarnings({"rawtypes"}) NamedList nl = new NamedList(); + @SuppressWarnings({"rawtypes"}) NamedList params = solrParamsToNamedList(updateRequest.getParams()); if (updateRequest.getCommitWithin() != -1) { params.add("commitWithin", updateRequest.getCommitWithin()); @@ -115,6 +118,7 @@ public void marshal(UpdateRequest updateRequest, OutputStream os) throws IOExcep * * @throws IOException in case of an exception while reading from the input stream or unmarshalling */ + @SuppressWarnings({"unchecked", "rawtypes"}) public UpdateRequest unmarshal(InputStream is, final StreamingUpdateHandler handler) throws IOException { final UpdateRequest updateRequest = new UpdateRequest(); List> doclist; @@ -181,6 +185,7 @@ public UpdateRequest unmarshal(InputStream is, final StreamingUpdateHandler hand } + @SuppressWarnings({"rawtypes"}) private NamedList solrParamsToNamedList(SolrParams params) { if (params == null) return new NamedList(); return params.toNamedList(); @@ -204,6 +209,7 @@ public Object getFieldValue(String name) { class StreamingCodec extends JavaBinCodec { + @SuppressWarnings({"rawtypes"}) private final NamedList[] namedList; private final UpdateRequest updateRequest; private final StreamingUpdateHandler handler; @@ -212,7 +218,7 @@ class StreamingCodec extends JavaBinCodec { // is ever refactored, this will not work. private boolean seenOuterMostDocIterator; - public StreamingCodec(NamedList[] namedList, UpdateRequest updateRequest, StreamingUpdateHandler handler) { + public StreamingCodec(@SuppressWarnings({"rawtypes"})NamedList[] namedList, UpdateRequest updateRequest, StreamingUpdateHandler handler) { this.namedList = namedList; this.updateRequest = updateRequest; this.handler = handler; @@ -220,11 +226,13 @@ public StreamingCodec(NamedList[] namedList, UpdateRequest updateRequest, Stream } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) protected SolrInputDocument createSolrInputDocument(int sz) { return new MaskCharSequenceSolrInputDoc(new LinkedHashMap(sz)); } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public NamedList readNamedList(DataInputInputStream dis) throws IOException { int sz = readSize(dis); NamedList nl = new NamedList(); @@ -239,6 +247,7 @@ public NamedList readNamedList(DataInputInputStream dis) throws IOException { return nl; } + @SuppressWarnings({"rawtypes"}) private SolrInputDocument listToSolrInputDocument(List namedList) { SolrInputDocument doc = new SolrInputDocument(); for (int i = 0; i < namedList.size(); i++) { @@ -271,6 +280,7 @@ private SolrInputDocument listToSolrInputDocument(List namedList) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public List readIterator(DataInputInputStream fis) throws IOException { // default behavior for reading any regular Iterator in the stream if (seenOuterMostDocIterator) return super.readIterator(fis); @@ -282,6 +292,7 @@ public List readIterator(DataInputInputStream fis) throws IOException { } + @SuppressWarnings({"unchecked", "rawtypes"}) private List readOuterMostDocIterator(DataInputInputStream fis) throws IOException { if(namedList[0] == null) namedList[0] = new NamedList(); NamedList params = (NamedList) namedList[0].get("params"); @@ -338,11 +349,13 @@ private List readOuterMostDocIterator(DataInputInputStream fis) throws IOExcepti } } - private SolrInputDocument convertMapToSolrInputDoc(Map m) { + @SuppressWarnings({"unchecked"}) + private SolrInputDocument convertMapToSolrInputDoc(@SuppressWarnings({"rawtypes"})Map m) { SolrInputDocument result = createSolrInputDocument(m.size()); m.forEach((k, v) -> { if (CHILDDOC.equals(k.toString())) { if (v instanceof List) { + @SuppressWarnings({"rawtypes"}) List list = (List) v; for (Object o : list) { if (o instanceof Map) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/MultiContentWriterRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/MultiContentWriterRequest.java index 54a184239d75..9e2646f68ce3 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/MultiContentWriterRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/MultiContentWriterRequest.java @@ -38,6 +38,7 @@ public class MultiContentWriterRequest extends AbstractUpdateRequest { + @SuppressWarnings({"rawtypes"}) private final Iterator> payload; /** @@ -47,7 +48,8 @@ public class MultiContentWriterRequest extends AbstractUpdateRequest { * @param payload add the per doc params, The Object could be a ByteBuffer or byte[] */ - public MultiContentWriterRequest(METHOD m, String path, Iterator> payload) { + public MultiContentWriterRequest(METHOD m, String path, + @SuppressWarnings({"rawtypes"})Iterator> payload) { super(m, path); params = new ModifiableSolrParams(); params.add("multistream", "true"); @@ -59,12 +61,15 @@ public MultiContentWriterRequest(METHOD m, String path, Iterator { while (payload.hasNext()) { + @SuppressWarnings({"rawtypes"}) Pair next = payload.next(); if (next.second() instanceof ByteBuffer || next.second() instanceof byte[]) { + @SuppressWarnings({"rawtypes"}) NamedList params = next.first(); if(params.get(ASSUME_CONTENT_TYPE) == null){ String detectedType = detect(next.second()); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/RequestWriter.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/RequestWriter.java index 96650c2fadb1..0c69ce8bb271 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/RequestWriter.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/RequestWriter.java @@ -52,7 +52,7 @@ public interface ContentWriter { * {@link org.apache.solr.client.solrj.request.RequestWriter#getContentStreams(SolrRequest)} is * invoked to do a pull write. */ - public ContentWriter getContentWriter(SolrRequest req) { + public ContentWriter getContentWriter(@SuppressWarnings({"rawtypes"})SolrRequest req) { if (req instanceof UpdateRequest) { UpdateRequest updateRequest = (UpdateRequest) req; if (isEmpty(updateRequest)) return null; @@ -77,7 +77,8 @@ public String getContentType() { * @deprecated Use {@link #getContentWriter(SolrRequest)}. */ @Deprecated - public Collection getContentStreams(SolrRequest req) throws IOException { + @SuppressWarnings({"unchecked"}) + public Collection getContentStreams(@SuppressWarnings({"rawtypes"})SolrRequest req) throws IOException { if (req instanceof UpdateRequest) { return null; } @@ -91,11 +92,11 @@ protected boolean isEmpty(UpdateRequest updateRequest) { updateRequest.getDocIterator() == null; } - public String getPath(SolrRequest req) { + public String getPath(@SuppressWarnings({"rawtypes"})SolrRequest req) { return req.getPath(); } - public void write(SolrRequest request, OutputStream os) throws IOException { + public void write(@SuppressWarnings({"rawtypes"})SolrRequest request, OutputStream os) throws IOException { if (request instanceof UpdateRequest) { UpdateRequest updateRequest = (UpdateRequest) request; BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8)); @@ -129,11 +130,11 @@ public String getContentType() { } } - protected boolean isNull(List l) { + protected boolean isNull(@SuppressWarnings({"rawtypes"})List l) { return l == null || l.isEmpty(); } - protected boolean isNull(Map l) { + protected boolean isNull(@SuppressWarnings({"rawtypes"})Map l) { return l == null || l.isEmpty(); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java index f4c77a136dc1..4c86a13b762e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java @@ -241,7 +241,7 @@ public UpdateResponse commit(SolrClient client, String collection) throws IOExce } private interface ReqSupplier { - T get(SolrRequest solrRequest, List servers); + T get(@SuppressWarnings({"rawtypes"})SolrRequest solrRequest, List servers); } private Map getRoutes(DocRouter router, diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/V1toV2ApiMapper.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/V1toV2ApiMapper.java index 0ec1112c214f..0cfaf201b754 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/V1toV2ApiMapper.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/V1toV2ApiMapper.java @@ -84,6 +84,7 @@ synchronized void setPath() { public V2Request.Builder convert(SolrParams paramsV1) { String[] list = new String[template.variables.size()]; MapWriter data = serializeToV2Format(paramsV1, list); + @SuppressWarnings({"rawtypes"}) Map o = data.toMap(new LinkedHashMap<>()); return new V2Request.Builder(template.apply(s -> { int idx = template.variables.indexOf(s); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/DomainMap.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/DomainMap.java index c23cee9456b5..e7e22cb4fce1 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/DomainMap.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/DomainMap.java @@ -38,6 +38,7 @@ public DomainMap withFilter(String filter) { put("filter", new ArrayList()); } + @SuppressWarnings({"unchecked"}) final List filterList = (List) get("filter"); filterList.add(filter); return this; @@ -57,6 +58,7 @@ public DomainMap withQuery(String query) { put("query", new ArrayList()); } + @SuppressWarnings({"unchecked"}) final List queryList = (List) get("query"); queryList.add(query); return this; @@ -79,6 +81,7 @@ public DomainMap withTagsToExclude(String excludeTagsValue) { put("excludeTags", new ArrayList()); } + @SuppressWarnings({"unchecked"}) final List excludeTagsList = (List) get("excludeTags"); excludeTagsList.add(excludeTagsValue); return this; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/HeatmapFacetMap.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/HeatmapFacetMap.java index ed64e08b7834..c7a740079da7 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/HeatmapFacetMap.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/HeatmapFacetMap.java @@ -39,7 +39,8 @@ public HeatmapFacetMap(String fieldName) { public HeatmapFacetMap getThis() { return this; } @Override - public HeatmapFacetMap withSubFacet(String facetName, JsonFacetMap map) { + public HeatmapFacetMap withSubFacet(String facetName, + @SuppressWarnings({"rawtypes"})JsonFacetMap map) { throw new UnsupportedOperationException(getClass().getName() + " doesn't currently support subfacets"); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/JsonFacetMap.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/JsonFacetMap.java index 3d3e6de8abc7..ea9186dfa479 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/JsonFacetMap.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/JsonFacetMap.java @@ -40,11 +40,13 @@ public B withDomain(DomainMap domain) { return getThis(); } - public B withSubFacet(String facetName, JsonFacetMap map) { + public B withSubFacet(String facetName, + @SuppressWarnings({"rawtypes"})JsonFacetMap map) { if (! containsKey("facet")) { put("facet", new HashMap()); } + @SuppressWarnings({"unchecked"}) final Map subFacetMap = (Map) get("facet"); subFacetMap.put(facetName, map); return getThis(); @@ -55,6 +57,7 @@ public B withStatSubFacet(String facetName, String statFacet) { put("facet", new HashMap()); } + @SuppressWarnings({"unchecked"}) final Map subFacetMap = (Map) get("facet"); subFacetMap.put(facetName, statFacet); return getThis(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/JsonQueryRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/JsonQueryRequest.java index cea678dad85f..9febda06380e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/JsonQueryRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/JsonQueryRequest.java @@ -165,6 +165,7 @@ public JsonQueryRequest withFacet(String facetName, Map facetJso jsonRequestMap.put("facet", new HashMap()); } + @SuppressWarnings({"unchecked"}) final Map facetMap = (Map) jsonRequestMap.get("facet"); facetMap.put(facetName, facetJson); return this; @@ -205,6 +206,7 @@ public JsonQueryRequest withFacet(String facetName, MapWriter facetWriter) { jsonRequestMap.put("facet", new HashMap()); } + @SuppressWarnings({"unchecked"}) final Map facetMap = (Map) jsonRequestMap.get("facet"); facetMap.put(facetName, facetWriter); return this; @@ -239,6 +241,7 @@ public JsonQueryRequest withStatFacet(String facetName, String facetValue) { jsonRequestMap.put("facet", new HashMap()); } + @SuppressWarnings({"unchecked"}) final Map facetMap = (Map) jsonRequestMap.get("facet"); facetMap.put(facetName, facetValue); return this; @@ -299,6 +302,7 @@ public JsonQueryRequest setSort(String sort) { * localparams query (e.g. "{!lucene df=text v='solr'}" ) * @throws IllegalArgumentException if {@code filterQuery} is null */ + @SuppressWarnings({"unchecked"}) public JsonQueryRequest withFilter(String filterQuery) { if (filterQuery == null) { throw new IllegalArgumentException("'filterQuery' must be non-null"); @@ -326,6 +330,7 @@ public JsonQueryRequest withFilter(String filterQuery) { * @param filterQuery a Map of values representing the filter request you wish to send. * @throws IllegalArgumentException if {@code filterQuery} is null */ + @SuppressWarnings({"unchecked"}) public JsonQueryRequest withFilter(Map filterQuery) { if (filterQuery == null) { throw new IllegalArgumentException("'filterQuery' parameter must be non-null"); @@ -343,6 +348,7 @@ public JsonQueryRequest withFilter(Map filterQuery) { */ public JsonQueryRequest returnFields(String... fieldNames) { jsonRequestMap.putIfAbsent("fields", new ArrayList()); + @SuppressWarnings({"unchecked"}) final List fields = (List) jsonRequestMap.get("fields"); for (String fieldName : fieldNames) { fields.add(fieldName); @@ -364,6 +370,7 @@ public JsonQueryRequest returnFields(Iterable fieldNames) { } jsonRequestMap.putIfAbsent("fields", new ArrayList()); + @SuppressWarnings({"unchecked"}) final List fields = (List) jsonRequestMap.get("fields"); for (String fieldName : fieldNames) { fields.add(fieldName); @@ -387,6 +394,7 @@ public JsonQueryRequest returnFields(Iterable fieldNames) { * * @throws IllegalArgumentException if either {@code name} or {@code value} are null */ + @SuppressWarnings({"unchecked"}) public JsonQueryRequest withParam(String name, Object value) { if (name == null) { throw new IllegalArgumentException("'name' parameter must be non-null"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/AnalysisResponseBase.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/AnalysisResponseBase.java index 54163766c5ee..cbd7f4ca4d3f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/AnalysisResponseBase.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/AnalysisResponseBase.java @@ -74,6 +74,7 @@ protected List buildPhases(NamedList phaseNL) { TokenInfo tokenInfo = buildTokenInfoFromString((String) phaseValue); phase.addTokenInfo(tokenInfo); } else { + @SuppressWarnings({"unchecked"}) List> tokens = (List>) phaseEntry.getValue(); for (NamedList token : tokens) { TokenInfo tokenInfo = buildTokenInfo(token); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java index 70702a622ff5..ce0d63cdc3e1 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java @@ -83,6 +83,7 @@ public Map> getAliasesAsLists() { return Aliases.convertMapOfCommaDelimitedToMapOfList(getAliases()); } + @SuppressWarnings({"unchecked"}) public Map> getAliasProperties() { NamedList response = getResponse(); if (response.get("properties") != null) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/ConfigSetAdminResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/ConfigSetAdminResponse.java index dc05f6d6b651..041eb0968458 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/ConfigSetAdminResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/ConfigSetAdminResponse.java @@ -23,12 +23,13 @@ */ public class ConfigSetAdminResponse extends SolrResponseBase { - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked"}) public NamedList getErrorMessages() { return (NamedList) getResponse().get( "exceptions" ); } + @SuppressWarnings({"unchecked"}) public static class List extends ConfigSetAdminResponse { public java.util.List getConfigSets() { return (java.util.List) getResponse().get("configSets"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/DelegationTokenResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/DelegationTokenResponse.java index f6281a0d3077..140e1c047577 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/DelegationTokenResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/DelegationTokenResponse.java @@ -40,6 +40,7 @@ public static class Get extends DelegationTokenResponse { */ public String getDelegationToken() { try { + @SuppressWarnings({"rawtypes"}) Map map = (Map)getResponse().get("Token"); if (map != null) { return (String)map.get("urlString"); @@ -76,7 +77,9 @@ public String getWriterType() { } @Override + @SuppressWarnings({"unchecked"}) public NamedList processResponse(InputStream body, String encoding) { + @SuppressWarnings({"rawtypes"}) Map map = null; try { ObjectBuilder builder = new ObjectBuilder( diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldStatsInfo.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldStatsInfo.java index 96e40598ae80..91d05201050d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldStatsInfo.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldStatsInfo.java @@ -51,6 +51,7 @@ public class FieldStatsInfo implements Serializable { Map percentiles; + @SuppressWarnings({"unchecked"}) public FieldStatsInfo( NamedList nl, String fname ) { name = fname; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/HealthCheckResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/HealthCheckResponse.java index b6fc36bb6b76..0b4d4bcceeac 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/HealthCheckResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/HealthCheckResponse.java @@ -24,6 +24,7 @@ public class HealthCheckResponse extends SolrResponseBase { public HealthCheckResponse() { } + @SuppressWarnings({"unchecked"}) public NamedList getErrorMessages() { return (NamedList) getResponse().get( "errors" ); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/PivotField.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/PivotField.java index f5216d6ac6dd..7ea7cf995765 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/PivotField.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/PivotField.java @@ -29,9 +29,11 @@ public class PivotField implements Serializable final List _pivot; final Map _statsInfo; final Map _querycounts; + @SuppressWarnings({"rawtypes"}) final List _ranges; - public PivotField( String f, Object v, int count, List pivot, Map statsInfo, Map queryCounts, List ranges) + public PivotField( String f, Object v, int count, List pivot, Map statsInfo, Map queryCounts, + @SuppressWarnings({"rawtypes"})List ranges) { _field = f; _value = v; @@ -66,6 +68,7 @@ public Map getFacetQuery() { return _querycounts; } + @SuppressWarnings({"rawtypes"}) public List getFacetRanges() { return _ranges; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java index 92db560055ea..2559ac7cf18a 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java @@ -43,6 +43,7 @@ public class QueryResponse extends SolrResponseBase // Direct pointers to known types private NamedList _header = null; private SolrDocumentList _results = null; + @SuppressWarnings({"rawtypes"}) private NamedList _sortvalues = null; private NamedList _facetInfo = null; private NamedList _debugInfo = null; @@ -68,6 +69,7 @@ public class QueryResponse extends SolrResponseBase private List _facetFields = null; private List _limitingFacets = null; private List _facetDates = null; + @SuppressWarnings({"rawtypes"}) private List _facetRanges = null; private NamedList> _facetPivot = null; private List _intervalFacets = null; @@ -117,6 +119,7 @@ public QueryResponse(SolrClient solrClient) { } @Override + @SuppressWarnings({"rawtypes"}) public void setResponse( NamedList res ) { super.setResponse( res ); @@ -277,6 +280,7 @@ private void extractGroupedInfo( NamedList info ) { } for (Object oGrp : groupsArr) { + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap grpMap = (SimpleOrderedMap) oGrp; Object sGroupValue = grpMap.get( "groupValue"); SolrDocumentList doclist = (SolrDocumentList) grpMap.get( "doclist"); @@ -316,6 +320,7 @@ private void extractHighlightingInfo( NamedList info ) } } + @SuppressWarnings({"rawtypes"}) private void extractFacetInfo( NamedList info ) { // Parse the queries @@ -379,6 +384,7 @@ private void extractFacetInfo( NamedList info ) } } + @SuppressWarnings({"rawtypes"}) private List extractRangeFacets(NamedList> rf) { List facetRanges = new ArrayList<>( rf.size() ); @@ -429,6 +435,7 @@ private List extractRangeFacets(NamedList> rf) { return facetRanges; } + @SuppressWarnings({"rawtypes"}) protected List readPivots( List list ) { ArrayList values = new ArrayList<>( list.size() ); @@ -456,7 +463,7 @@ protected List readPivots( List list ) assert null != val : "Server sent back 'null' for sub pivots?"; assert val instanceof List : "Server sent non-List for sub pivots?"; - subPivots = readPivots( (List) val ); + subPivots = readPivots( (List) val ); break; } case "stats": { @@ -514,6 +521,7 @@ public SolrDocumentList getResults() { return _results; } + @SuppressWarnings({"rawtypes"}) public NamedList getSortValues(){ return _sortvalues; } @@ -595,6 +603,7 @@ public List getFacetDates() { return _facetDates; } + @SuppressWarnings({"rawtypes"}) public List getFacetRanges() { return _facetRanges; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/RangeFacet.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/RangeFacet.java index b970ef545af7..0fca6b501e05 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/RangeFacet.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/RangeFacet.java @@ -107,9 +107,11 @@ public static class Count { private final String value; private final int count; + @SuppressWarnings({"rawtypes"}) private final RangeFacet rangeFacet; - public Count(String value, int count, RangeFacet rangeFacet) { + public Count(String value, int count, + @SuppressWarnings({"rawtypes"})RangeFacet rangeFacet) { this.value = value; this.count = count; this.rangeFacet = rangeFacet; @@ -123,6 +125,7 @@ public int getCount() { return count; } + @SuppressWarnings({"rawtypes"}) public RangeFacet getRangeFacet() { return rangeFacet; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/SolrResponseBase.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/SolrResponseBase.java index ffadb3827584..d030f8b6d6ee 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/SolrResponseBase.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/SolrResponseBase.java @@ -62,12 +62,14 @@ public String toString() { return response.toString(); } + @SuppressWarnings({"rawtypes"}) public NamedList getResponseHeader() { return (NamedList) response.get("responseHeader"); } // these two methods are based on the logic in SolrCore.setResponseHeaderValues(...) public int getStatus() { + @SuppressWarnings({"rawtypes"}) NamedList header = getResponseHeader(); if (header != null) { return (Integer) header.get("status"); @@ -78,6 +80,7 @@ public int getStatus() { } public int getQTime() { + @SuppressWarnings({"rawtypes"}) NamedList header = getResponseHeader(); if (header != null) { return (Integer) header.get("QTime"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java index f8756ccb46fe..84138fe6a6e5 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java @@ -145,6 +145,7 @@ public static class Suggestion { private List alternatives = new ArrayList<>(); private List alternativeFrequencies; + @SuppressWarnings({"rawtypes"}) public Suggestion(String token, NamedList suggestion) { this.token = token; for (int i = 0; i < suggestion.size(); i++) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/SuggesterResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/SuggesterResponse.java index e009dfe57c1f..3e5d385d45b8 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/SuggesterResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/SuggesterResponse.java @@ -35,6 +35,7 @@ public class SuggesterResponse { private final Map> suggestionsPerDictionary = new LinkedHashMap<>(); + @SuppressWarnings({"unchecked", "rawtypes"}) public SuggesterResponse(Map> suggestInfo) { for (Map.Entry> entry : suggestInfo.entrySet()) { SimpleOrderedMap suggestionsNode = (SimpleOrderedMap) entry.getValue().getVal(0); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/BucketBasedJsonFacet.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/BucketBasedJsonFacet.java index 620535554319..1c0d707f8405 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/BucketBasedJsonFacet.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/BucketBasedJsonFacet.java @@ -49,6 +49,7 @@ public class BucketBasedJsonFacet { private long afterLastBucketCount = UNSET_FLAG; private long betweenAllBucketsCount = UNSET_FLAG; + @SuppressWarnings({"unchecked", "rawtypes"}) public BucketBasedJsonFacet(NamedList bucketBasedFacet) { for (Map.Entry entry : bucketBasedFacet) { final String key = entry.getKey(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/HeatmapJsonFacet.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/HeatmapJsonFacet.java index 81fb6e182562..da22f72f5f33 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/HeatmapJsonFacet.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/HeatmapJsonFacet.java @@ -44,6 +44,7 @@ public class HeatmapJsonFacet { private List> countGrid; private String countEncodedAsBase64PNG; + @SuppressWarnings({"unchecked"}) public HeatmapJsonFacet(NamedList heatmapNL) { gridLevel = (int) heatmapNL.get("gridLevel"); columns = (int) heatmapNL.get("columns"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java index 52f544e2abcd..8369fb8d083e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java @@ -56,6 +56,7 @@ public NestableJsonFacet(NamedList facetNL) { // Stat/agg facet value statsByName.put(key, entry.getValue()); } else if(entry.getValue() instanceof NamedList) { // Either heatmap/query/range/terms facet + @SuppressWarnings({"unchecked"}) final NamedList facet = (NamedList) entry.getValue(); final boolean isBucketBased = facet.get("buckets") != null; final boolean isHeatmap = HeatmapJsonFacet.isHeatmapFacet(facet); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/schema/SchemaResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/schema/SchemaResponse.java index eb9fdd542b19..7708c7313d70 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/schema/SchemaResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/schema/SchemaResponse.java @@ -132,7 +132,8 @@ private static void fillFieldTypeDefinition(FieldTypeDefinition fieldTypeDefinit } } - private static SchemaRepresentation createSchemaConfiguration(Map schemaObj) { + private static SchemaRepresentation createSchemaConfiguration( + @SuppressWarnings({"rawtypes"})Map schemaObj) { SchemaRepresentation schemaRepresentation = new SchemaRepresentation(); schemaRepresentation.setName(getSchemaName(schemaObj)); schemaRepresentation.setVersion(getSchemaVersion(schemaObj)); @@ -145,19 +146,24 @@ private static SchemaRepresentation createSchemaConfiguration(Map schemaObj) { return schemaRepresentation; } - private static String getSchemaName(Map schemaNamedList) { + private static String getSchemaName( + @SuppressWarnings({"rawtypes"})Map schemaNamedList) { return (String) schemaNamedList.get("name"); } - private static Float getSchemaVersion(Map schemaNamedList) { + private static Float getSchemaVersion( + @SuppressWarnings({"rawtypes"})Map schemaNamedList) { return (Float) schemaNamedList.get("version"); } - private static String getSchemaUniqueKey(Map schemaNamedList) { + private static String getSchemaUniqueKey( + @SuppressWarnings({"rawtypes"})Map schemaNamedList) { return (String) schemaNamedList.get("uniqueKey"); } - private static Map getSimilarity(Map schemaNamedList) { + private static Map getSimilarity( + @SuppressWarnings({"rawtypes"})Map schemaNamedList) { + @SuppressWarnings({"unchecked"}) NamedList similarityNamedList = (NamedList) schemaNamedList.get("similarity"); Map similarity = null; if (similarityNamedList != null) similarity = extractAttributeMap(similarityNamedList); @@ -165,7 +171,8 @@ private static Map getSimilarity(Map schemaNamedList) { } @SuppressWarnings("unchecked") - private static List> getFields(Map schemaNamedList) { + private static List> getFields( + @SuppressWarnings({"rawtypes"})Map schemaNamedList) { List> fieldsAttributes = new LinkedList<>(); List> fieldsResponse = (List>) schemaNamedList.get("fields"); for (NamedList fieldNamedList : fieldsResponse) { @@ -177,7 +184,8 @@ private static List> getFields(Map schemaNamedList) { } @SuppressWarnings("unchecked") - private static List> getDynamicFields(Map schemaNamedList) { + private static List> getDynamicFields( + @SuppressWarnings({"rawtypes"})Map schemaNamedList) { List> dynamicFieldsAttributes = new LinkedList<>(); List> dynamicFieldsResponse = (List>) schemaNamedList.get("dynamicFields"); for (NamedList fieldNamedList : dynamicFieldsResponse) { @@ -189,7 +197,8 @@ private static List> getDynamicFields(Map schemaNamedList) { } @SuppressWarnings("unchecked") - private static List> getCopyFields(Map schemaNamedList) { + private static List> getCopyFields( + @SuppressWarnings({"rawtypes"})Map schemaNamedList) { List> copyFieldsAttributes = new LinkedList<>(); List> copyFieldsResponse = (List>) schemaNamedList.get("copyFields"); for (NamedList copyFieldNamedList : copyFieldsResponse) { @@ -201,7 +210,8 @@ private static List> getCopyFields(Map schemaNamedList) { } @SuppressWarnings("unchecked") - private static List getFieldTypeDefinitions(Map schemaNamedList) { + private static List getFieldTypeDefinitions( + @SuppressWarnings({"rawtypes"})Map schemaNamedList) { List fieldTypeDefinitions = new LinkedList<>(); List> fieldsResponse = (List>) schemaNamedList.get("fieldTypes"); for (NamedList fieldNamedList : fieldsResponse) { @@ -213,7 +223,8 @@ private static List getFieldTypeDefinitions(Map schemaNamed } @SuppressWarnings("unchecked") - private static List getFieldTypeRepresentations(Map schemaNamedList) { + private static List getFieldTypeRepresentations( + @SuppressWarnings({"rawtypes"})Map schemaNamedList) { List fieldTypeRepresentations = new LinkedList<>(); List> fieldsResponse = (List>) schemaNamedList.get("fieldTypes"); for (NamedList fieldNamedList : fieldsResponse) { @@ -229,6 +240,7 @@ private static List getFieldTypeRepresentations(Map sch public void setResponse(NamedList response) { super.setResponse(response); + @SuppressWarnings({"rawtypes"}) Map schemaObj = (Map) response.get("schema"); schemaRepresentation = createSchemaConfiguration(schemaObj); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGenerator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGenerator.java index 2f417687a4cc..4853787783c2 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGenerator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGenerator.java @@ -158,7 +158,7 @@ public void transform(List choices) { Object current; int idx = 1; int boundaryCount = 0; - int[] boundaries = new int[choices.size() - 1]; + int[] boundaries = new int[choices.size()]; do { current = iter.next(); if (replicaComp.compare(prev, current) != 0) { @@ -167,6 +167,7 @@ public void transform(List choices) { prev = current; idx++; } while (iter.hasNext()); + boundaries[boundaryCount++] = idx; // Finally inspect boundaries to apply base transformation, where necessary (separate phase to avoid ConcurrentModificationException) int startIdx = 0; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java b/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java index 2a1dfadd50c7..887b264306c6 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java @@ -63,6 +63,7 @@ public static Collection toContentStreams( final String str, fina //------------------------------------------------------------------------ //------------------------------------------------------------------------ + @SuppressWarnings({"unchecked"}) public static void writeXML( SolrInputDocument doc, Writer writer ) throws IOException { writer.write(""); @@ -81,6 +82,7 @@ public static void writeXML( SolrInputDocument doc, Writer writer ) throws IOExc update = entry.getKey().toString(); v = entry.getValue(); if (v instanceof Collection) { + @SuppressWarnings({"rawtypes"}) Collection values = (Collection) v; for (Object value : values) { writeVal(writer, name, value, update); diff --git a/solr/solrj/src/java/org/apache/solr/common/IteratorWriter.java b/solr/solrj/src/java/org/apache/solr/common/IteratorWriter.java index ec11c786d025..143506d9a700 100644 --- a/solr/solrj/src/java/org/apache/solr/common/IteratorWriter.java +++ b/solr/solrj/src/java/org/apache/solr/common/IteratorWriter.java @@ -74,6 +74,7 @@ default ItemWriter add(boolean v) throws IOException { return this; } } + @SuppressWarnings({"unchecked", "rawtypes"}) default List toList( List l) { try { writeIter(new ItemWriter() { diff --git a/solr/solrj/src/java/org/apache/solr/common/LinkedHashMapWriter.java b/solr/solrj/src/java/org/apache/solr/common/LinkedHashMapWriter.java index 8d07babe1569..3d8adf2caae3 100644 --- a/solr/solrj/src/java/org/apache/solr/common/LinkedHashMapWriter.java +++ b/solr/solrj/src/java/org/apache/solr/common/LinkedHashMapWriter.java @@ -42,12 +42,14 @@ public void writeMap(EntryWriter ew) throws IOException { } @Override + @SuppressWarnings({"unchecked"}) public Object _get(String path, Object def) { if (path.indexOf('/') == -1) return getOrDefault(path, (V) def); return MapWriter.super._get(path, def); } @Override + @SuppressWarnings({"unchecked"}) public Object _get(List path, Object def) { if (path.size() == 1) return getOrDefault(path.get(0), (V) def); return MapWriter.super._get(path, def); diff --git a/solr/solrj/src/java/org/apache/solr/common/MapSerializable.java b/solr/solrj/src/java/org/apache/solr/common/MapSerializable.java index 7fadc2ebee1b..f56f1b2e1c21 100644 --- a/solr/solrj/src/java/org/apache/solr/common/MapSerializable.java +++ b/solr/solrj/src/java/org/apache/solr/common/MapSerializable.java @@ -26,5 +26,6 @@ public interface MapSerializable { * Do not keep a reference to the passed map and reuse it. * it may be reused by the framework */ + @SuppressWarnings({"rawtypes"}) Map toMap(Map map); } diff --git a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java index 926cf4c360db..726641023d15 100644 --- a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java +++ b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java @@ -40,6 +40,7 @@ default String jsonStr(){ } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) default Map toMap(Map map) { try { writeMap(new EntryWriter() { diff --git a/solr/solrj/src/java/org/apache/solr/common/MapWriterMap.java b/solr/solrj/src/java/org/apache/solr/common/MapWriterMap.java index f6b978fded77..8b35c9642a3a 100644 --- a/solr/solrj/src/java/org/apache/solr/common/MapWriterMap.java +++ b/solr/solrj/src/java/org/apache/solr/common/MapWriterMap.java @@ -22,24 +22,28 @@ import java.util.Map; public class MapWriterMap implements MapWriter { + @SuppressWarnings({"rawtypes"}) private final Map delegate; - public MapWriterMap(Map delegate) { + public MapWriterMap(@SuppressWarnings({"rawtypes"})Map delegate) { this.delegate = delegate; } @Override + @SuppressWarnings({"unchecked"}) public void writeMap(EntryWriter ew) throws IOException { delegate.forEach(ew.getBiConsumer()); } @Override + @SuppressWarnings({"unchecked"}) public Object _get(String path, Object def) { if (path.indexOf('/') == -1) return delegate.getOrDefault(path, def); return MapWriter.super._get(path, def); } @Override + @SuppressWarnings({"unchecked"}) public Object _get(List path, Object def) { if (path.size() == 1) return delegate.getOrDefault(path.get(0), def); return MapWriter.super._get(path, def); @@ -47,6 +51,7 @@ public Object _get(List path, Object def) { @Override + @SuppressWarnings({"rawtypes"}) public Map toMap(Map map) { return delegate; } diff --git a/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java b/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java index ccef7e2fc143..22dca2bbe40d 100644 --- a/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java +++ b/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java @@ -53,20 +53,20 @@ default String _getStr(String path, String def) { /**Iterate through the entries of a navigable Object at a certain path * @param path the json path */ - default void _forEachEntry(String path, BiConsumer fun) { + default void _forEachEntry(String path, @SuppressWarnings({"rawtypes"})BiConsumer fun) { Utils.forEachMapEntry(this, path, fun); } /**Iterate through the entries of a navigable Object at a certain path * @param path the json path */ - default void _forEachEntry(List path, BiConsumer fun) { + default void _forEachEntry(List path, @SuppressWarnings({"rawtypes"})BiConsumer fun) { Utils.forEachMapEntry(this, path, fun); } /**Iterate through each entry in this object */ - default void _forEachEntry(BiConsumer fun) { + default void _forEachEntry(@SuppressWarnings({"rawtypes"})BiConsumer fun) { Utils.forEachMapEntry(this, fun); } diff --git a/solr/solrj/src/java/org/apache/solr/common/SolrDocument.java b/solr/solrj/src/java/org/apache/solr/common/SolrDocument.java index d7b268b19af4..1cc32a0223f2 100644 --- a/solr/solrj/src/java/org/apache/solr/common/SolrDocument.java +++ b/solr/solrj/src/java/org/apache/solr/common/SolrDocument.java @@ -99,8 +99,8 @@ public boolean removeFields(String name) * set multiple fields with the included contents. This will replace any existing * field with the given name */ - @SuppressWarnings("unchecked") - public void setField(String name, Object value) + @SuppressWarnings({"unchecked", "rawtypes"}) + public void setField(String name, Object value) { if( value instanceof Object[] ) { value = new ArrayList(Arrays.asList( (Object[])value )); @@ -186,6 +186,7 @@ else if( value instanceof Object[] ) { public Object getFirstValue(String name) { Object v = _fields.get( name ); if (v == null || !(v instanceof Collection)) return v; + @SuppressWarnings({"rawtypes"}) Collection c = (Collection)v; if (c.size() > 0 ) { return c.iterator().next(); diff --git a/solr/solrj/src/java/org/apache/solr/common/SolrDocumentBase.java b/solr/solrj/src/java/org/apache/solr/common/SolrDocumentBase.java index 21d79a97fd48..d309719febea 100644 --- a/solr/solrj/src/java/org/apache/solr/common/SolrDocumentBase.java +++ b/solr/solrj/src/java/org/apache/solr/common/SolrDocumentBase.java @@ -48,6 +48,7 @@ public abstract class SolrDocumentBase implements Map, Serializ /** * Get a collection of values for a given field name */ + @SuppressWarnings({"rawtypes"}) public abstract Collection getFieldValues(String name); public abstract void addChildDocument(K child); diff --git a/solr/solrj/src/java/org/apache/solr/common/SolrDocumentList.java b/solr/solrj/src/java/org/apache/solr/common/SolrDocumentList.java index 58758337adf1..ab13b2debc3f 100644 --- a/solr/solrj/src/java/org/apache/solr/common/SolrDocumentList.java +++ b/solr/solrj/src/java/org/apache/solr/common/SolrDocumentList.java @@ -31,7 +31,16 @@ public class SolrDocumentList extends ArrayList private long numFound = 0; private long start = 0; private Float maxScore = null; + private Boolean numFoundExact = true; + public Boolean getNumFoundExact() { + return numFoundExact; + } + + public void setNumFoundExact(Boolean numFoundExact) { + this.numFoundExact = numFoundExact; + } + public Float getMaxScore() { return maxScore; } @@ -59,6 +68,7 @@ public void setStart(long start) { @Override public String toString() { return "{numFound="+numFound + +",numFoundExact="+String.valueOf(numFoundExact) +",start="+start + (maxScore!=null ? ",maxScore="+maxScore : "") +",docs="+super.toString() diff --git a/solr/solrj/src/java/org/apache/solr/common/SolrException.java b/solr/solrj/src/java/org/apache/solr/common/SolrException.java index 673bae7212a4..9909c2b719c6 100644 --- a/solr/solrj/src/java/org/apache/solr/common/SolrException.java +++ b/solr/solrj/src/java/org/apache/solr/common/SolrException.java @@ -33,7 +33,7 @@ public class SolrException extends RuntimeException { public static final String ROOT_ERROR_CLASS = "root-error-class"; public static final String ERROR_CLASS = "error-class"; - final private Map mdcContext; + final private Map mdcContext; /** * This list of valid HTTP Status error codes that Solr may return in @@ -160,13 +160,12 @@ public static void log(Logger log, String msg, Throwable e) { } public static void log(Logger log, String msg) { - String stackTrace = msg; - String ignore = doIgnore(null, stackTrace); + String ignore = doIgnore(null, msg); if (ignore != null) { log.info(ignore); return; } - log.error(stackTrace); + log.error(msg); } // public String toString() { return toStr(this); } // oops, inf loop @@ -225,8 +224,9 @@ public static Throwable getRootCause(Throwable t) { return t; } + @SuppressWarnings({"unchecked"}) public void logInfoWithMdc(Logger logger, String msg) { - Map previousMdcContext = MDC.getCopyOfContextMap(); + Map previousMdcContext = MDC.getCopyOfContextMap(); MDC.setContextMap(mdcContext); try { logger.info(msg); @@ -236,7 +236,7 @@ public void logInfoWithMdc(Logger logger, String msg) { } public void logDebugWithMdc(Logger logger, String msg) { - Map previousMdcContext = MDC.getCopyOfContextMap(); + Map previousMdcContext = MDC.getCopyOfContextMap(); MDC.setContextMap(mdcContext); try { logger.debug(msg); @@ -246,7 +246,7 @@ public void logDebugWithMdc(Logger logger, String msg) { } public void logWarnWithMdc(Logger logger, String msg) { - Map previousMdcContext = MDC.getCopyOfContextMap(); + Map previousMdcContext = MDC.getCopyOfContextMap(); MDC.setContextMap(mdcContext); try { logger.warn(msg); diff --git a/solr/solrj/src/java/org/apache/solr/common/SolrInputField.java b/solr/solrj/src/java/org/apache/solr/common/SolrInputField.java index db36b3ec334f..f823ade770bc 100644 --- a/solr/solrj/src/java/org/apache/solr/common/SolrInputField.java +++ b/solr/solrj/src/java/org/apache/solr/common/SolrInputField.java @@ -108,7 +108,8 @@ else if( v instanceof Object[] ) { public Object getFirstValue() { if (value instanceof Collection) { - Collection c = (Collection) value; + @SuppressWarnings({"unchecked"}) + Collection c = (Collection) value; if (c.size() > 0) { return c.iterator().next(); } @@ -200,6 +201,7 @@ public SolrInputField deepCopy() { SolrInputField clone = new SolrInputField(name); // We can't clone here, so we rely on simple primitives if (value instanceof Collection) { + @SuppressWarnings({"unchecked"}) Collection values = (Collection) value; Collection cloneValues = new ArrayList<>(values.size()); cloneValues.addAll(values); diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/Aliases.java b/solr/solrj/src/java/org/apache/solr/common/cloud/Aliases.java index 3ba61b7ce145..45fc9d86ad94 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/Aliases.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/Aliases.java @@ -80,7 +80,7 @@ private Aliases(Map> collectionAliases, * @param zNodeVersion the version of the data in zookeeper that this instance corresponds to * @return A new immutable Aliases object */ - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) public static Aliases fromJSON(byte[] bytes, int zNodeVersion) { Map aliasMap; if (bytes == null || bytes.length == 0) { @@ -89,6 +89,7 @@ public static Aliases fromJSON(byte[] bytes, int zNodeVersion) { aliasMap = (Map) Utils.fromJSON(bytes); } + @SuppressWarnings({"rawtypes"}) Map colAliases = aliasMap.getOrDefault(COLLECTION, Collections.emptyMap()); colAliases = convertMapOfCommaDelimitedToMapOfList(colAliases); // also unmodifiable @@ -106,6 +107,7 @@ public byte[] toJSON() { assert collectionAliasProperties.isEmpty(); return null; } else { + @SuppressWarnings({"rawtypes"}) Map tmp = new LinkedHashMap<>(); tmp.put(COLLECTION, convertMapOfListToMapOfCommaDelimited(collectionAliases)); if (!collectionAliasProperties.isEmpty()) { diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java index 96e53718f9c8..2f0779cbd061 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java @@ -108,6 +108,7 @@ public Map getClusterProperties() throws IOException { public void setClusterProperties(Map properties) throws IOException, KeeperException, InterruptedException { client.atomicUpdate(ZkStateReader.CLUSTER_PROPS, zkData -> { if (zkData == null) return Utils.toJSON(convertCollectionDefaultsToNestedFormat(properties)); + @SuppressWarnings({"unchecked"}) Map zkJson = (Map) Utils.fromJSON(zkData); zkJson = convertCollectionDefaultsToNestedFormat(zkJson); boolean modified = Utils.mergeJson(zkJson, convertCollectionDefaultsToNestedFormat(properties)); @@ -122,6 +123,7 @@ public void setClusterProperties(Map properties) throws IOExcept * @param properties the properties to be converted * @return the converted map */ + @SuppressWarnings({"unchecked"}) static Map convertCollectionDefaultsToNestedFormat(Map properties) { if (properties.containsKey(COLLECTION_DEF)) { Map values = (Map) properties.remove(COLLECTION_DEF); @@ -161,6 +163,7 @@ public void setClusterProperty(String propertyName, Object propertyValue) throws Stat s = new Stat(); try { if (client.exists(ZkStateReader.CLUSTER_PROPS, true)) { + @SuppressWarnings({"rawtypes"}) Map properties = (Map) Utils.fromJSON(client.getData(ZkStateReader.CLUSTER_PROPS, null, s, true)); if (propertyValue == null) { //Don't update ZK unless absolutely necessary. @@ -176,6 +179,7 @@ public void setClusterProperty(String propertyName, Object propertyValue) throws } } } else { + @SuppressWarnings({"rawtypes"}) Map properties = new LinkedHashMap(); properties.put(propertyName, propertyValue); client.create(ZkStateReader.CLUSTER_PROPS, Utils.toJSON(properties), CreateMode.PERSISTENT, true); diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java index 80f26f784db9..3c518e028144 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java @@ -39,8 +39,6 @@ * @lucene.experimental */ public class ClusterState implements JSONWriter.Writable { - - private final Integer znodeVersion; private final Map collectionStates, immutableCollectionStates; private Set liveNodes; @@ -48,9 +46,8 @@ public class ClusterState implements JSONWriter.Writable { /** * Use this constr when ClusterState is meant for consumption. */ - public ClusterState(Integer znodeVersion, Set liveNodes, - Map collectionStates) { - this(liveNodes, getRefMap(collectionStates),znodeVersion); + public ClusterState(Set liveNodes, Map collectionStates) { + this(getRefMap(collectionStates), liveNodes); } private static Map getRefMap(Map collectionStates) { @@ -62,26 +59,26 @@ private static Map getRefMap(Map c return collRefs; } - /**Use this if all the collection states are not readily available and some needs to be lazily loaded + /** + * Use this if all the collection states are not readily available and some needs to be lazily loaded + * (parameter order different from constructor above to have different erasures) */ - public ClusterState(Set liveNodes, Map collectionStates, Integer znodeVersion){ - this.znodeVersion = znodeVersion; + public ClusterState(Map collectionStates, Set liveNodes){ this.liveNodes = new HashSet<>(liveNodes.size()); this.liveNodes.addAll(liveNodes); this.collectionStates = new LinkedHashMap<>(collectionStates); this.immutableCollectionStates = Collections.unmodifiableMap(collectionStates); } - /** * Returns a new cluster state object modified with the given collection. * * @param collectionName the name of the modified (or deleted) collection * @param collection the collection object. A null value deletes the collection from the state - * @return the updated cluster state which preserves the current live nodes and zk node version + * @return the updated cluster state which preserves the current live nodes */ public ClusterState copyWith(String collectionName, DocCollection collection) { - ClusterState result = new ClusterState(liveNodes, new LinkedHashMap<>(collectionStates), znodeVersion); + ClusterState result = new ClusterState(new LinkedHashMap<>(collectionStates), liveNodes); if (collection == null) { result.collectionStates.remove(collectionName); } else { @@ -90,13 +87,6 @@ public ClusterState copyWith(String collectionName, DocCollection collection) { return result; } - /** - * Returns the zNode version that was used to construct this instance. - */ - public int getZNodeVersion() { - return znodeVersion; - } - /** * Returns true if the specified collection name exists, false otherwise. * @@ -210,50 +200,48 @@ public boolean liveNodesContain(String name) { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("znodeVersion: ").append(znodeVersion); - sb.append("\n"); sb.append("live nodes:").append(liveNodes); sb.append("\n"); sb.append("collections:").append(collectionStates); return sb.toString(); } - public static ClusterState load(Integer version, byte[] bytes, Set liveNodes) { - return load(version, bytes, liveNodes, ZkStateReader.CLUSTER_STATE); - } /** - * Create ClusterState from json string that is typically stored in zookeeper. + * Create a ClusterState from Json. * - * @param version zk version of the clusterstate.json file (bytes) - * @param bytes clusterstate.json as a byte array + * @param bytes a byte array of a Json representation of a mapping from collection name to the Json representation of a + * {@link DocCollection} as written by {@link #write(JSONWriter)}. It can represent + * one or more collections. * @param liveNodes list of live nodes * @return the ClusterState */ - public static ClusterState load(Integer version, byte[] bytes, Set liveNodes, String znode) { - // System.out.println("######## ClusterState.load:" + (bytes==null ? null : new String(bytes))); + public static ClusterState createFromJson(int version, byte[] bytes, Set liveNodes) { if (bytes == null || bytes.length == 0) { - return new ClusterState(version, liveNodes, Collections.emptyMap()); + return new ClusterState(liveNodes, Collections.emptyMap()); } + @SuppressWarnings({"unchecked"}) Map stateMap = (Map) Utils.fromJSON(bytes); - return load(version, stateMap, liveNodes, znode); + return createFromCollectionMap(version, stateMap, liveNodes); } - public static ClusterState load(Integer version, Map stateMap, Set liveNodes, String znode) { + public static ClusterState createFromCollectionMap(int version, Map stateMap, Set liveNodes) { Map collections = new LinkedHashMap<>(stateMap.size()); for (Entry entry : stateMap.entrySet()) { String collectionName = entry.getKey(); - DocCollection coll = collectionFromObjects(collectionName, (Map)entry.getValue(), version, znode); + @SuppressWarnings({"unchecked"}) + DocCollection coll = collectionFromObjects(collectionName, (Map)entry.getValue(), version); collections.put(collectionName, new CollectionRef(coll)); } - return new ClusterState( liveNodes, collections,version); + return new ClusterState(collections, liveNodes); } // TODO move to static DocCollection.loadFromMap - private static DocCollection collectionFromObjects(String name, Map objs, Integer version, String znode) { + private static DocCollection collectionFromObjects(String name, Map objs, int version) { Map props; Map slices; + @SuppressWarnings({"unchecked"}) Map sliceObjs = (Map) objs.get(DocCollection.SHARDS); if (sliceObjs == null) { // legacy format from 4.0... there was no separate "shards" level to contain the collection shards. @@ -273,47 +261,30 @@ private static DocCollection collectionFromObjects(String name, Map map = new LinkedHashMap<>(); for (Entry e : collectionStates.entrySet()) { - // using this class check to avoid fetching from ZK in case of lazily loaded collection if (e.getValue().getClass() == CollectionRef.class) { - // check if it is a lazily loaded collection outside of clusterstate.json DocCollection coll = e.getValue().get(); - if (coll.getStateFormat() == 1) { - map.put(coll.getName(),coll); - } + map.put(coll.getName(),coll); } } jsonWriter.write(map); } - /** - * The version of clusterstate.json in ZooKeeper. - * - * @return null if ClusterState was created for publication, not consumption - * @deprecated true cluster state spans many ZK nodes, stop depending on the version number of the shared node! - * will be removed in 8.0 - */ - @Deprecated - public Integer getZkClusterStateVersion() { - return znodeVersion; - } - @Override public int hashCode() { final int prime = 31; int result = 1; - result = prime * result - + ((znodeVersion == null) ? 0 : znodeVersion.hashCode()); result = prime * result + ((liveNodes == null) ? 0 : liveNodes.hashCode()); return result; } @@ -324,13 +295,9 @@ public boolean equals(Object obj) { if (obj == null) return false; if (getClass() != obj.getClass()) return false; ClusterState other = (ClusterState) obj; - if (znodeVersion == null) { - if (other.znodeVersion != null) return false; - } else if (!znodeVersion.equals(other.znodeVersion)) return false; if (liveNodes == null) { - if (other.liveNodes != null) return false; - } else if (!liveNodes.equals(other.liveNodes)) return false; - return true; + return other.liveNodes == null; + } else return liveNodes.equals(other.liveNodes); } diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/CompositeIdRouter.java b/solr/solrj/src/java/org/apache/solr/common/cloud/CompositeIdRouter.java index 9fba61ff59af..92efa2e35aea 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/CompositeIdRouter.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/CompositeIdRouter.java @@ -188,6 +188,7 @@ public List partitionRangeByKey(String key, Range range) { } @Override + @SuppressWarnings({"unchecked"}) public List partitionRange(int partitions, Range range) { int min = range.min; int max = range.max; diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java b/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java index 08d5296e6bf8..9dafe6f7527c 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java @@ -46,11 +46,11 @@ /** * Models a Collection in zookeeper (but that Java name is obviously taken, hence "DocCollection") */ +@SuppressWarnings({"overrides"}) public class DocCollection extends ZkNodeProps implements Iterable { public static final String DOC_ROUTER = "router"; public static final String SHARDS = "shards"; - public static final String STATE_FORMAT = "stateFormat"; public static final String RULE = "rule"; public static final String SNITCH = "snitch"; @@ -63,7 +63,6 @@ public class DocCollection extends ZkNodeProps implements Iterable { private final Map> nodeNameReplicas; private final Map> nodeNameLeaderReplicas; private final DocRouter router; - private final String znode; private final Integer replicationFactor; private final Integer numNrtReplicas; @@ -75,15 +74,16 @@ public class DocCollection extends ZkNodeProps implements Iterable { private final Boolean readOnly; public DocCollection(String name, Map slices, Map props, DocRouter router) { - this(name, slices, props, router, Integer.MAX_VALUE, ZkStateReader.CLUSTER_STATE); + this(name, slices, props, router, Integer.MAX_VALUE); } /** * @param name The name of the collection * @param slices The logical shards of the collection. This is used directly and a copy is not made. * @param props The properties of the slice. This is used directly and a copy is not made. + * @param zkVersion The version of the Collection node in Zookeeper (used for conditional updates). */ - public DocCollection(String name, Map slices, Map props, DocRouter router, int zkVersion, String znode) { + public DocCollection(String name, Map slices, Map props, DocRouter router, int zkVersion) { super(props==null ? props = new HashMap<>() : props); // -1 means any version in ZK CAS, so we choose Integer.MAX_VALUE instead to avoid accidental overwrites this.znodeVersion = zkVersion == -1 ? Integer.MAX_VALUE : zkVersion; @@ -119,7 +119,6 @@ public DocCollection(String name, Map slices, Map } this.activeSlicesArr = activeSlices.values().toArray(new Slice[activeSlices.size()]); this.router = router; - this.znode = znode == null? ZkStateReader.CLUSTER_STATE : znode; assert name != null && slices != null; } @@ -172,7 +171,7 @@ public static Object verifyProp(Map props, String propName, Obje * @return the resulting DocCollection */ public DocCollection copyWithSlices(Map slices){ - return new DocCollection(getName(), slices, propMap, router, znodeVersion,znode); + return new DocCollection(getName(), slices, propMap, router, znodeVersion); } /** @@ -247,9 +246,6 @@ public int getZNodeVersion(){ return znodeVersion; } - public int getStateFormat() { - return ZkStateReader.CLUSTER_STATE.equals(znode) ? 1 : 2; - } /** * @return replication factor for this collection or null if no * replication factor exists. @@ -270,11 +266,6 @@ public int getMaxShardsPerNode() { return maxShardsPerNode == 0 ? Integer.MAX_VALUE : maxShardsPerNode; } - public String getZNode(){ - return znode; - } - - public DocRouter getRouter() { return router; } @@ -285,7 +276,7 @@ public boolean isReadOnly() { @Override public String toString() { - return "DocCollection("+name+"/" + znode + "/" + znodeVersion + ")=" + toJSONString(this); + return "DocCollection("+name+"/" + znodeVersion + ")=" + toJSONString(this); } @Override @@ -386,12 +377,17 @@ public String getShardId(String nodeName, String coreName) { @Override public boolean equals(Object that) { - if (that instanceof DocCollection == false) + if (!(that instanceof DocCollection)) return false; DocCollection other = (DocCollection) that; - return super.equals(that) && Objects.equals(this.znode, other.znode) && this.znodeVersion == other.znodeVersion; + return super.equals(that) && Objects.equals(this.name, other.name) && this.znodeVersion == other.znodeVersion; } +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented DocCollection.hashCode"); +// } + /** * @return the number of replicas of type {@link org.apache.solr.common.cloud.Replica.Type#NRT} this collection was created with */ @@ -426,6 +422,5 @@ public int getExpectedReplicaCount(Replica.Type type, int def) { if (type == Replica.Type.PULL) result = numPullReplicas; if (type == Replica.Type.TLOG) result = numTlogReplicas; return result == null ? def : result; - } } diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java b/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java index 65688f497273..02cec6990b86 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java @@ -50,6 +50,7 @@ public static DocRouter getDocRouter(String routerName) { public String getRouteField(DocCollection coll) { if (coll == null) return null; + @SuppressWarnings({"rawtypes"}) Map m = (Map) coll.get(DOC_ROUTER); if (m == null) return null; return (String) m.get("field"); @@ -169,6 +170,7 @@ public List partitionRange(int partitions, Range range) { * of variation in resulting ranges - odd ranges will be larger and even ranges will be smaller * by up to that percentage. */ + @SuppressWarnings({"unchecked"}) public List partitionRange(int partitions, Range range, float fuzz) { int min = range.min; int max = range.max; diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/Replica.java b/solr/solrj/src/java/org/apache/solr/common/cloud/Replica.java index 1409e6c8ebdc..9affb683ff34 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/Replica.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/Replica.java @@ -22,7 +22,7 @@ import java.util.Set; import org.apache.solr.common.util.Utils; - +@SuppressWarnings({"overrides"}) public class Replica extends ZkNodeProps { /** @@ -153,7 +153,10 @@ public boolean equals(Object o) { return name.equals(replica.name); } - +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented Replica.hashCode()"); +// } /** Also known as coreNodeName. */ public String getName() { return name; diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java b/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java index c9adbab2357d..4378ef761f3a 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java @@ -40,6 +40,7 @@ public class Slice extends ZkNodeProps implements Iterable { public final String collection; /** Loads multiple slices into a Map from a generic Map that probably came from deserialized JSON. */ + @SuppressWarnings({"unchecked"}) public static Map loadAllFromMap(String collection, Map genericSlices) { if (genericSlices == null) return Collections.emptyMap(); Map result = new LinkedHashMap<>(genericSlices.size()); @@ -129,6 +130,7 @@ public static State getState(String stateStr) { * @param replicas The replicas of the slice. This is used directly and a copy is not made. If null, replicas will be constructed from props. * @param props The properties of the slice - a shallow copy will always be made. */ + @SuppressWarnings({"unchecked", "rawtypes"}) public Slice(String name, Map replicas, Map props, String collection) { super( props==null ? new LinkedHashMap(2) : new LinkedHashMap<>(props)); this.name = name; @@ -188,6 +190,7 @@ public Slice(String name, Map replicas, Map props } + @SuppressWarnings({"unchecked"}) private Map makeReplicas(String collection, String slice,Map genericReplicas) { if (genericReplicas == null) return new HashMap<>(1); Map result = new LinkedHashMap<>(genericReplicas.size()); diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java b/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java index 7af119ac4697..051870d6fe88 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java @@ -608,12 +608,7 @@ public void printLayout(String path, int indent, StringBuilder string) string.append(dent).append(path).append(" (").append(children.size()).append(")").append(NEWL); if (data != null) { String dataString = new String(data, StandardCharsets.UTF_8); - if ((!path.endsWith(".txt") && !path.endsWith(".xml")) || path.endsWith(ZkStateReader.CLUSTER_STATE)) { - if (path.endsWith(".xml")) { - // this is the cluster state in xml format - lets pretty print - dataString = prettyPrint(dataString); - } - + if (!path.endsWith(".txt") && !path.endsWith(".xml")) { string.append(dent).append("DATA:\n").append(dent).append(" ").append(dataString.replaceAll("\n", "\n" + dent + " ")).append(NEWL); } else { string.append(dent).append("DATA: ...supressed...").append(NEWL); diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZooKeeper.java b/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZooKeeper.java index a60a2759e901..62288c82acab 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZooKeeper.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZooKeeper.java @@ -32,6 +32,7 @@ import org.apache.zookeeper.ZooKeeper; // we use this class to expose nasty stuff for tests +@SuppressWarnings({"try"}) public class SolrZooKeeper extends ZooKeeper { final Set spawnedThreads = new CopyOnWriteArraySet<>(); diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkNodeProps.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkNodeProps.java index dfd85ca403b8..75641f588663 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkNodeProps.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkNodeProps.java @@ -31,6 +31,7 @@ /** * ZkNodeProps contains generic immutable properties. */ +@SuppressWarnings({"overrides"}) public class ZkNodeProps implements JSONWriter.Writable { protected final Map propMap; @@ -91,6 +92,7 @@ public Map shallowCopy() { /** * Create Replica from json string that is typically stored in zookeeper. */ + @SuppressWarnings({"unchecked"}) public static ZkNodeProps load(byte[] bytes) { Map props = null; if (bytes[0] == 2) { @@ -169,4 +171,8 @@ public boolean getBool(String key, boolean b) { public boolean equals(Object that) { return that instanceof ZkNodeProps && ((ZkNodeProps)that).propMap.equals(this.propMap); } +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented ZkNodeProps.hashCode"); +// } } diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java index ae28149146ac..c5e5ee728435 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java @@ -17,6 +17,7 @@ package org.apache.solr.common.cloud; import java.lang.invoke.MethodHandles; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -28,7 +29,6 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Objects; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; @@ -69,8 +69,6 @@ import org.slf4j.LoggerFactory; import static java.util.Collections.EMPTY_MAP; -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; import static java.util.Collections.emptySortedSet; import static org.apache.solr.common.util.Utils.fromJSON; @@ -108,7 +106,12 @@ public class ZkStateReader implements SolrCloseable { public static final String COLLECTIONS_ZKNODE = "/collections"; public static final String LIVE_NODES_ZKNODE = "/live_nodes"; public static final String ALIASES = "/aliases.json"; - public static final String CLUSTER_STATE = "/clusterstate.json"; + /** + * This ZooKeeper file is no longer used starting with Solr 9 but keeping the name around to check if it + * is still present and non empty (in case of upgrade from previous Solr version). It used to contain collection + * state for all collections in the cluster. + */ + public static final String UNSUPPORTED_CLUSTER_STATE = "/clusterstate.json"; public static final String CLUSTER_PROPS = "/clusterprops.json"; public static final String COLLECTION_PROPS_ZKNODE = "collectionprops.json"; public static final String REJOIN_AT_HEAD_PROP = "rejoinAtHead"; @@ -135,7 +138,6 @@ public class ZkStateReader implements SolrCloseable { public static final String CONFIGS_ZKNODE = "/configs"; public final static String CONFIGNAME_PROP = "configName"; - public static final String LEGACY_CLOUD = "legacyCloud"; public static final String SAMPLE_PERCENTAGE = "samplePercentage"; /** @@ -151,7 +153,7 @@ public class ZkStateReader implements SolrCloseable { public static final String REPLICA_TYPE = "type"; /** - * A view of the current state of all collections; combines all the different state sources into a single view. + * A view of the current state of all collections. */ protected volatile ClusterState clusterState; @@ -165,22 +167,12 @@ public class ZkStateReader implements SolrCloseable { public static final String ELECTION_NODE = "election"; /** - * Collections tracked in the legacy (shared) state format, reflects the contents of clusterstate.json. - */ - private Map legacyCollectionStates = emptyMap(); - - /** - * Last seen ZK version of clusterstate.json. - */ - private int legacyClusterStateVersion = 0; - - /** - * Collections with format2 state.json, "interesting" and actively watched. + * "Interesting" and actively watched Collections. */ private final ConcurrentHashMap watchedCollectionStates = new ConcurrentHashMap<>(); /** - * Collections with format2 state.json, not "interesting" and not actively watched. + * "Interesting" but not actively watched Collections. */ private final ConcurrentHashMap lazyCollectionStates = new ConcurrentHashMap<>(); @@ -190,7 +182,7 @@ public class ZkStateReader implements SolrCloseable { private final ConcurrentHashMap watchedCollectionProps = new ConcurrentHashMap<>(); /** - * Collection properties being actively watched + * Watchers of Collection properties */ private final ConcurrentHashMap collectionPropsWatchers = new ConcurrentHashMap<>(); @@ -243,6 +235,7 @@ public AutoScalingConfig getAutoScalingConfig() throws KeeperException, Interrup * @return current configuration from autoscaling.json. NOTE: * this data is retrieved from ZK on each call. */ + @SuppressWarnings({"unchecked"}) public AutoScalingConfig getAutoScalingConfig(Watcher watcher) throws KeeperException, InterruptedException { Stat stat = new Stat(); @@ -271,7 +264,6 @@ public boolean canBeRemoved() { } public static final Set KNOWN_CLUSTER_PROPS = Set.of( - LEGACY_CLOUD, URL_SCHEME, AUTO_ADD_REPLICAS, CoreAdminParams.BACKUP_LOCATION, @@ -386,7 +378,6 @@ public void forciblyRefreshAllClusterStateSlow() throws KeeperException, Interru // No need to set watchers because we should already have watchers registered for everything. refreshCollectionList(null); refreshLiveNodes(null); - refreshLegacyClusterState(null); // Need a copy so we don't delete from what we're iterating over. Collection safeCopy = new ArrayList<>(watchedCollectionStates.keySet()); Set updatedCollections = new HashSet<>(); @@ -414,28 +405,21 @@ public void forceUpdateCollection(String collection) throws KeeperException, Int } ClusterState.CollectionRef ref = clusterState.getCollectionRef(collection); - if (ref == null || legacyCollectionStates.containsKey(collection)) { - // We either don't know anything about this collection (maybe it's new?) or it's legacy. - // First update the legacy cluster state. - log.debug("Checking legacy cluster state for collection {}", collection); - refreshLegacyClusterState(null); - if (!legacyCollectionStates.containsKey(collection)) { - // No dice, see if a new collection just got created. - LazyCollectionRef tryLazyCollection = new LazyCollectionRef(collection); - if (tryLazyCollection.get() != null) { - // What do you know, it exists! - log.debug("Adding lazily-loaded reference for collection {}", collection); - lazyCollectionStates.putIfAbsent(collection, tryLazyCollection); - constructState(Collections.singleton(collection)); - } + if (ref == null) { + // We either don't know anything about this collection (maybe it's new?). + // see if it just got created. + LazyCollectionRef tryLazyCollection = new LazyCollectionRef(collection); + if (tryLazyCollection.get() != null) { + // What do you know, it exists! + log.debug("Adding lazily-loaded reference for collection {}", collection); + lazyCollectionStates.putIfAbsent(collection, tryLazyCollection); + constructState(Collections.singleton(collection)); } } else if (ref.isLazilyLoaded()) { log.debug("Refreshing lazily-loaded state for collection {}", collection); if (ref.get() != null) { return; } - // Edge case: if there's no external collection, try refreshing legacy cluster state in case it's there. - refreshLegacyClusterState(null); } else if (watchedCollectionStates.containsKey(collection)) { // Exists as a watched collection, force a refresh. log.debug("Forcing refresh of watched collection state for {}", collection); @@ -444,10 +428,9 @@ public void forceUpdateCollection(String collection) throws KeeperException, Int constructState(Collections.singleton(collection)); } } else { - log.error("Collection {} is not lazy or watched!", collection); + log.error("Collection {} is not lazy nor watched!", collection); } } - } /** @@ -487,40 +470,38 @@ public Integer compareStateVersions(String coll, int version) { return collection.getZNodeVersion(); } - public synchronized void createClusterStateWatchersAndUpdate() throws KeeperException, - InterruptedException { + @SuppressWarnings({"unchecked"}) + public synchronized void createClusterStateWatchersAndUpdate() throws KeeperException, InterruptedException { // We need to fetch the current cluster state and the set of live nodes log.debug("Updating cluster state from ZooKeeper... "); - // Sanity check ZK structure. - if (!zkClient.exists(CLUSTER_STATE, true)) { - throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, - "Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready"); - } + try { + // on reconnect of SolrZkClient force refresh and re-add watches. + loadClusterProperties(); + refreshLiveNodes(new LiveNodeWatcher()); + refreshCollections(); + refreshCollectionList(new CollectionsChildWatcher()); + refreshAliases(aliasesManager); + + if (securityNodeListener != null) { + addSecurityNodeWatcher(pair -> { + ConfigData cd = new ConfigData(); + cd.data = pair.first() == null || pair.first().length == 0 ? EMPTY_MAP : Utils.getDeepCopy((Map) fromJSON(pair.first()), 4, false); + cd.version = pair.second() == null ? -1 : pair.second().getVersion(); + securityData = cd; + securityNodeListener.run(); + }); + securityData = getSecurityProps(true); + } - // on reconnect of SolrZkClient force refresh and re-add watches. - loadClusterProperties(); - refreshLiveNodes(new LiveNodeWatcher()); - refreshLegacyClusterState(new LegacyClusterStateWatcher()); - refreshStateFormat2Collections(); - refreshCollectionList(new CollectionsChildWatcher()); - refreshAliases(aliasesManager); - - if (securityNodeListener != null) { - addSecurityNodeWatcher(pair -> { - ConfigData cd = new ConfigData(); - cd.data = pair.first() == null || pair.first().length == 0 ? EMPTY_MAP : Utils.getDeepCopy((Map) fromJSON(pair.first()), 4, false); - cd.version = pair.second() == null ? -1 : pair.second().getVersion(); - securityData = cd; - securityNodeListener.run(); + collectionPropsObservers.forEach((k, v) -> { + collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true); }); - securityData = getSecurityProps(true); + } catch (KeeperException.NoNodeException nne) { + throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, + "Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready"); } - - collectionPropsObservers.forEach((k, v) -> { - collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true); - }); } private void addSecurityNodeWatcher(final Callable> callback) @@ -539,9 +520,14 @@ public void process(WatchedEvent event) { log.debug("Updating [{}] ... ", SOLR_SECURITY_CONF_PATH); // remake watch - final Watcher thisWatch = this; final Stat stat = new Stat(); - final byte[] data = getZkClient().getData(SOLR_SECURITY_CONF_PATH, thisWatch, stat, true); + byte[] data = "{}".getBytes(StandardCharsets.UTF_8); + if (EventType.NodeDeleted.equals(event.getType())) { + // Node deleted, just recreate watch without attempting a read - SOLR-9679 + getZkClient().exists(SOLR_SECURITY_CONF_PATH, this, true); + } else { + data = getZkClient().getData(SOLR_SECURITY_CONF_PATH, this, stat, true); + } try { callback.call(new Pair<>(data, stat)); } catch (Exception e) { @@ -574,13 +560,11 @@ private void constructState(Set changedCollections) { Set liveNodes = this.liveNodes; // volatile read - // Legacy clusterstate is authoritative, for backwards compatibility. - // To move a collection's state to format2, first create the new state2 format node, then remove legacy entry. - Map result = new LinkedHashMap<>(legacyCollectionStates); + Map result = new LinkedHashMap<>(); - // Add state format2 collections, but don't override legacy collection states. + // Add collections for (Map.Entry entry : watchedCollectionStates.entrySet()) { - result.putIfAbsent(entry.getKey(), new ClusterState.CollectionRef(entry.getValue())); + result.put(entry.getKey(), new ClusterState.CollectionRef(entry.getValue())); } // Finally, add any lazy collections that aren't already accounted for. @@ -588,11 +572,10 @@ private void constructState(Set changedCollections) { result.putIfAbsent(entry.getKey(), entry.getValue()); } - this.clusterState = new ClusterState(liveNodes, result, legacyClusterStateVersion); + this.clusterState = new ClusterState(result, liveNodes); if (log.isDebugEnabled()) { - log.debug("clusterStateSet: legacy [{}] interesting [{}] watched [{}] lazy [{}] total [{}]", - legacyCollectionStates.keySet().size(), + log.debug("clusterStateSet: interesting [{}] watched [{}] lazy [{}] total [{}]", collectionWatches.keySet().size(), watchedCollectionStates.keySet().size(), lazyCollectionStates.keySet().size(), @@ -600,8 +583,7 @@ private void constructState(Set changedCollections) { } if (log.isTraceEnabled()) { - log.trace("clusterStateSet: legacy [{}] interesting [{}] watched [{}] lazy [{}] total [{}]", - legacyCollectionStates.keySet(), + log.trace("clusterStateSet: interesting [{}] watched [{}] lazy [{}] total [{}]", collectionWatches.keySet(), watchedCollectionStates.keySet(), lazyCollectionStates.keySet(), @@ -617,51 +599,9 @@ private void constructState(Set changedCollections) { } /** - * Refresh legacy (shared) clusterstate.json + * Refresh collections. */ - private void refreshLegacyClusterState(Watcher watcher) throws KeeperException, InterruptedException { - try { - final Stat stat = new Stat(); - final byte[] data = zkClient.getData(CLUSTER_STATE, watcher, stat, true); - final ClusterState loadedData = ClusterState.load(stat.getVersion(), data, emptySet(), CLUSTER_STATE); - synchronized (getUpdateLock()) { - if (this.legacyClusterStateVersion >= stat.getVersion()) { - // Nothing to do, someone else updated same or newer. - return; - } - Set updatedCollections = new HashSet<>(); - for (String coll : this.collectionWatches.keySet()) { - ClusterState.CollectionRef ref = this.legacyCollectionStates.get(coll); - // legacy collections are always in-memory - DocCollection oldState = ref == null ? null : ref.get(); - ClusterState.CollectionRef newRef = loadedData.getCollectionStates().get(coll); - DocCollection newState = newRef == null ? null : newRef.get(); - if (newState == null) { - // check that we haven't just migrated - newState = watchedCollectionStates.get(coll); - } - if (!Objects.equals(oldState, newState)) { - updatedCollections.add(coll); - } - } - this.legacyCollectionStates = loadedData.getCollectionStates(); - this.legacyClusterStateVersion = stat.getVersion(); - constructState(updatedCollections); - } - } catch (KeeperException.NoNodeException e) { - // Ignore missing legacy clusterstate.json. - synchronized (getUpdateLock()) { - this.legacyCollectionStates = emptyMap(); - this.legacyClusterStateVersion = 0; - constructState(Collections.emptySet()); - } - } - } - - /** - * Refresh state format2 collections. - */ - private void refreshStateFormat2Collections() { + private void refreshCollections() { for (String coll : collectionWatches.keySet()) { new StateWatcher(coll).refreshAndWatch(); } @@ -671,17 +611,7 @@ private void refreshStateFormat2Collections() { private final Object refreshCollectionListLock = new Object(); /** - * Search for any lazy-loadable state format2 collections. - *

    - * A stateFormat=1 collection which is not interesting to us can also - * be put into the {@link #lazyCollectionStates} map here. But that is okay - * because {@link #constructState(Set)} will give priority to collections in the - * shared collection state over this map. - * In fact this is a clever way to avoid doing a ZK exists check on - * the /collections/collection_name/state.json znode - * Such an exists check is done in {@link ClusterState#hasCollection(String)} and - * {@link ClusterState#getCollectionsMap()} methods - * have a safeguard against exposing wrong collection names to the users + * Search for any lazy-loadable collections. */ private void refreshCollectionList(Watcher watcher) throws KeeperException, InterruptedException { synchronized (refreshCollectionListLock) { @@ -755,7 +685,6 @@ private void notifyCloudCollectionsListeners(boolean notifyIfSame) { private Set getCurrentCollections() { Set collections = new HashSet<>(); - collections.addAll(legacyCollectionStates.keySet()); collections.addAll(watchedCollectionStates.keySet()); collections.addAll(lazyCollectionStates.keySet()); return collections; @@ -1085,6 +1014,7 @@ public T getClusterProperty(String key, T defaultValue) { * @param defaultValue a default value to use if no such property exists * @return the cluster property, or a default if the property is not set */ + @SuppressWarnings({"unchecked"}) public T getClusterProperty(List keyPath, T defaultValue) { T value = (T) Utils.getObjectByPath(clusterProperties, false, keyPath); if (value == null) @@ -1256,6 +1186,7 @@ private VersionedCollectionProps fetchCollectionProperties(String collection, Wa * Returns the content of /security.json from ZooKeeper as a Map * If the files doesn't exist, it returns null. */ + @SuppressWarnings({"unchecked"}) public ConfigData getSecurityProps(boolean getFresh) { if (!getFresh) { if (securityData == null) return new ConfigData(EMPTY_MAP, -1); @@ -1347,44 +1278,6 @@ public void refreshAndWatch() { } } - /** - * Watches the legacy clusterstate.json. - */ - class LegacyClusterStateWatcher implements Watcher { - - @Override - public void process(WatchedEvent event) { - // session events are not change events, and do not remove the watcher - if (EventType.None.equals(event.getType())) { - return; - } - int liveNodesSize = ZkStateReader.this.clusterState == null ? 0 : ZkStateReader.this.clusterState.getLiveNodes().size(); - log.debug("A cluster state change: [{}], has occurred - updating... (live nodes size: [{}])", event, liveNodesSize); - refreshAndWatch(); - } - - /** - * Must hold {@link #getUpdateLock()} before calling this method. - */ - public void refreshAndWatch() { - try { - refreshLegacyClusterState(this); - } catch (KeeperException.NoNodeException e) { - throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, - "Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready"); - } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) { - log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage()); - } catch (KeeperException e) { - log.error("A ZK error has occurred", e); - throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "A ZK error has occurred", e); - } catch (InterruptedException e) { - // Restore the interrupted status - Thread.currentThread().interrupt(); - log.warn("Interrupted", e); - } - } - } - /** * Watches collection properties */ @@ -1562,8 +1455,7 @@ private DocCollection fetchCollectionState(String coll, Watcher watcher) throws try { Stat stat = new Stat(); byte[] data = zkClient.getData(collectionPath, watcher, stat, true); - ClusterState state = ClusterState.load(stat.getVersion(), data, - Collections.emptySet(), collectionPath); + ClusterState state = ClusterState.createFromJson(stat.getVersion(), data, Collections.emptySet()); ClusterState.CollectionRef collectionRef = state.getCollectionStates().get(coll); return collectionRef == null ? null : collectionRef.get(); } catch (KeeperException.NoNodeException e) { diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/rule/ImplicitSnitch.java b/solr/solrj/src/java/org/apache/solr/common/cloud/rule/ImplicitSnitch.java index e00bf7253f3e..4aa7f4f16e72 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/rule/ImplicitSnitch.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/rule/ImplicitSnitch.java @@ -96,6 +96,7 @@ protected void getRemoteInfo(String solrNode, Set requestedTags, SnitchC } private void fillRole(String solrNode, SnitchContext ctx, String key) throws KeeperException, InterruptedException { + @SuppressWarnings({"rawtypes"}) Map roles = (Map) ctx.retrieve(ZkStateReader.ROLES); // we don't want to hit the ZK for each node // so cache and reuse try { @@ -106,10 +107,12 @@ private void fillRole(String solrNode, SnitchContext ctx, String key) throws Kee } } - private void cacheRoles(String solrNode, SnitchContext ctx, String key, Map roles) { + private void cacheRoles(String solrNode, SnitchContext ctx, String key, + @SuppressWarnings({"rawtypes"})Map roles) { ctx.store(ZkStateReader.ROLES, roles); if (roles != null) { for (Object o : roles.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry e = (Map.Entry) o; if (e.getValue() instanceof List) { if (((List) e.getValue()).contains(solrNode)) { diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/rule/Snitch.java b/solr/solrj/src/java/org/apache/solr/common/cloud/rule/Snitch.java index 7f9cbcd08824..b429ebdf1e45 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/rule/Snitch.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/rule/Snitch.java @@ -23,6 +23,7 @@ * */ public abstract class Snitch { + @SuppressWarnings({"rawtypes"}) public static final Set WELL_KNOWN_SNITCHES = Collections.singleton(ImplicitSnitch.class); public abstract void getTags(String solrNode, Set requestedTags, SnitchContext ctx); diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/rule/SnitchContext.java b/solr/solrj/src/java/org/apache/solr/common/cloud/rule/SnitchContext.java index 0bd551cc4dd8..2720fb956d5d 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/rule/SnitchContext.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/rule/SnitchContext.java @@ -64,6 +64,7 @@ public Map getNodeValues(String node, Collection tags){ return Collections.emptyMap(); } + @SuppressWarnings({"rawtypes"}) public abstract Map getZkJson(String path) throws KeeperException, InterruptedException; public String getNode() { diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java index 89f1600722b0..8e8a027b022f 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java @@ -70,6 +70,15 @@ public boolean isHigherOrEqual(LockLevel that) { } } + /** + *

    (Mostly) Collection API actions that can be sent by nodes to the Overseer over the /overseer/collection-queue-work + * ZooKeeper queue.

    + * + *

    Some of these actions are also used over the cluster state update queue at /overseer/queue and have a + * different (though related) meaning there. These actions are: + * {@link #CREATE}, {@link #DELETE}, {@link #CREATESHARD}, {@link #DELETESHARD}, {@link #ADDREPLICA}, {@link #ADDREPLICAPROP}, + * {@link #DELETEREPLICAPROP}, {@link #BALANCESHARDUNIQUE} and {@link #MODIFYCOLLECTION}.

    + */ enum CollectionAction { CREATE(true, LockLevel.COLLECTION), DELETE(true, LockLevel.COLLECTION), @@ -103,7 +112,6 @@ enum CollectionAction { BALANCESHARDUNIQUE(true, LockLevel.SHARD), REBALANCELEADERS(true, LockLevel.COLLECTION), MODIFYCOLLECTION(true, LockLevel.COLLECTION), - MIGRATESTATEFORMAT(true, LockLevel.CLUSTER), BACKUP(true, LockLevel.COLLECTION), RESTORE(true, LockLevel.COLLECTION), CREATESNAPSHOT(true, LockLevel.COLLECTION), diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java index 2261972d2894..bc1ed097753c 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java @@ -162,6 +162,12 @@ public interface CommonParams { * Timeout value in milliseconds. If not set, or the value is >= 0, there is no timeout. */ String TIME_ALLOWED = "timeAllowed"; + + /** + * The number of hits that need to be counted accurately. If more than {@link #MIN_EXACT_COUNT} documents + * match a query, then the value in "numFound" may be an estimate to speedup search. + */ + String MIN_EXACT_COUNT = "minExactCount"; /** 'true' if the header should include the handler name */ String HEADER_ECHO_HANDLER = "echoHandler"; diff --git a/solr/solrj/src/java/org/apache/solr/common/params/MapSolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/MapSolrParams.java index 5454fcac277b..e8c40e97fe0f 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/MapSolrParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/MapSolrParams.java @@ -20,33 +20,29 @@ import java.util.Map; /** - * + * {@link SolrParams} implementation that can be built from and is backed by a {@link Map}. */ public class MapSolrParams extends SolrParams { protected final Map map; public MapSolrParams(Map map) { + assert map.entrySet().stream().allMatch(e -> { + boolean hasStringKey = e.getKey() == null || e.getKey().getClass() == String.class; + boolean hasStringValue = e.getValue() == null || e.getValue().getClass() == String.class; + return hasStringKey && hasStringValue; + }); this.map = map; } @Override public String get(String name) { - Object o = map.get(name); - if(o == null) return null; - if (o instanceof String) return (String) o; - if (o instanceof String[]) { - String[] strings = (String[]) o; - if(strings.length == 0) return null; - return strings[0]; - } - return String.valueOf(o); + return map.get(name); } @Override public String[] getParams(String name) { - Object val = map.get(name); - if (val instanceof String[]) return (String[]) val; - return val==null ? null : new String[]{String.valueOf(val)}; + String val = map.get(name); + return val == null ? null : new String[] { val }; } @Override diff --git a/solr/solrj/src/java/org/apache/solr/common/params/MultiMapSolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/MultiMapSolrParams.java index 8a5416b40346..c544155e8727 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/MultiMapSolrParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/MultiMapSolrParams.java @@ -55,6 +55,11 @@ public static void addParam(String name, String[] vals, Map map public MultiMapSolrParams(Map map) { + assert map.entrySet().stream().allMatch(e -> { + boolean hasStringKey = e.getKey() == null || e.getKey().getClass() == String.class; + boolean hasStringArrayValue = e.getValue() == null || e.getValue().getClass() == String[].class; + return hasStringKey && hasStringArrayValue; + }); this.map = map; } diff --git a/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java index 08022b2e4ded..c729534ba40c 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java @@ -458,7 +458,7 @@ public static SolrParams wrapAppended(SolrParams params, SolrParams defaults) { /** Create a Map<String,String> from a NamedList given no keys are repeated */ @Deprecated // Doesn't belong here (no SolrParams). Just remove. - public static Map toMap(NamedList params) { + public static Map toMap(@SuppressWarnings({"rawtypes"})NamedList params) { HashMap map = new HashMap<>(); for (int i=0; i toMap(NamedList params) { /** Create a Map<String,String[]> from a NamedList */ @Deprecated // Doesn't belong here (no SolrParams). Just remove. - public static Map toMultiMap(NamedList params) { + public static Map toMultiMap(@SuppressWarnings({"rawtypes"})NamedList params) { HashMap map = new HashMap<>(); for (int i=0; i toMultiMap(NamedList params) { if (val instanceof String[]) { MultiMapSolrParams.addParam(name, (String[]) val, map); } else if (val instanceof List) { + @SuppressWarnings({"rawtypes"}) List l = (List) val; String[] s = new String[l.size()]; for (int j = 0; j < l.size(); j++) { @@ -494,7 +495,7 @@ public static Map toMultiMap(NamedList params) { * @deprecated Use {@link NamedList#toSolrParams()}. */ @Deprecated //move to NamedList to allow easier flow - public static SolrParams toSolrParams(NamedList params) { + public static SolrParams toSolrParams(@SuppressWarnings({"rawtypes"})NamedList params) { return params.toSolrParams(); } diff --git a/solr/solrj/src/java/org/apache/solr/common/params/StreamParams.java b/solr/solrj/src/java/org/apache/solr/common/params/StreamParams.java new file mode 100644 index 000000000000..417b8495e734 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/common/params/StreamParams.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.common.params; + +/** + * Streaming Expressions Parameters and Properties. + */ +public interface StreamParams { + + // parameters + String EXPR = "expr"; + + // stream properties + String TUPLE = "tuple"; + String DOCS = "docs"; + String RETURN_VALUE = "return-value"; + String RESULT_SET = "result-set"; + + // tuple properties + String RESPONSE_TIME = "RESPONSE_TIME"; + String EOF = "EOF"; + String EXCEPTION = "EXCEPTION"; + String METRICS = "_METRICS_"; + + // other common tuple properties + String P_VALUE = "p-value"; +} diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java b/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java index 7a4abe2c3032..0a65b0d62f2d 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java @@ -180,6 +180,7 @@ public ByteArrayUtf8CharSequence deepCopy() { return new ByteArrayUtf8CharSequence(bytes, 0, length, utf16, hashCode); } + @SuppressWarnings({"unchecked", "rawtypes"}) public static Map.Entry convertCharSeq(Map.Entry e) { if (e.getKey() instanceof Utf8CharSequence || e.getValue() instanceof Utf8CharSequence) { return new AbstractMap.SimpleEntry(convertCharSeq(e.getKey()), convertCharSeq(e.getValue())); @@ -188,6 +189,7 @@ public static Map.Entry convertCharSeq(Map.Entry e) { } + @SuppressWarnings({"unchecked", "rawtypes"}) public static Collection convertCharSeq(Collection vals) { if (vals == null) return vals; boolean needsCopy = false; @@ -200,9 +202,9 @@ public static Collection convertCharSeq(Collection vals) { if (needsCopy) { Collection copy = null; if (vals instanceof Set){ - copy = new HashSet(vals.size()); + copy = new HashSet<>(vals.size()); } else { - copy = new ArrayList(vals.size()); + copy = new ArrayList<>(vals.size()); } for (Object o : vals) copy.add(convertCharSeq(o)); return copy; diff --git a/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java b/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java index cc1e2d3c2918..8ca7282b9d49 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java @@ -70,6 +70,7 @@ public void setCommandData(Object o) { commandData = o; } + @SuppressWarnings({"unchecked"}) public Map getDataMap() { if (commandData instanceof Map) { //noinspection unchecked @@ -100,6 +101,7 @@ private Object getMapVal(String key) { return commandData; } if (commandData instanceof Map) { + @SuppressWarnings({"rawtypes"}) Map metaData = (Map) commandData; return metaData.get(key); } else { @@ -170,6 +172,7 @@ public String getStr(String key) { return s; } + @SuppressWarnings({"rawtypes"}) private Map errorDetails() { return Utils.makeMap(name, commandData, ERR_MSGS, errors); } @@ -207,6 +210,7 @@ public List getErrors() { public static final String ERR_MSGS = "errorMessages"; public static final String ROOT_OBJ = ""; + @SuppressWarnings({"rawtypes"}) public static List captureErrors(List ops) { List errors = new ArrayList<>(); for (CommandOperation op : ops) { @@ -226,6 +230,7 @@ public static List parse(Reader rdr) throws IOException { * Parse the command operations into command objects from javabin payload * * @param singletonCommands commands that cannot be repeated */ + @SuppressWarnings({"unchecked", "rawtypes"}) public static List parse(InputStream in, Set singletonCommands) throws IOException { List operations = new ArrayList<>(); @@ -288,6 +293,7 @@ public static List parse(Reader rdr, Set singletonComm ev = parser.nextEvent(); Object val = ob.getVal(); if (val instanceof List && !singletonCommands.contains(key)) { + @SuppressWarnings({"rawtypes"}) List list = (List) val; for (Object o : list) { if (!(o instanceof Map)) { @@ -308,6 +314,7 @@ public CommandOperation getCopy() { return new CommandOperation(name, commandData); } + @SuppressWarnings({"rawtypes"}) public Map getMap(String key, Map def) { Object o = getMapVal(key); if (o == null) return def; @@ -325,7 +332,8 @@ public String toString() { return new String(toJSON(singletonMap(name, commandData)), StandardCharsets.UTF_8); } - public static List readCommands(Iterable streams, NamedList resp) throws IOException { + public static List readCommands(Iterable streams, + @SuppressWarnings({"rawtypes"})NamedList resp) throws IOException { return readCommands(streams, resp, Collections.emptySet()); } @@ -339,7 +347,9 @@ public static List readCommands(Iterable stream * @return parsed list of commands * @throws IOException if there is an error while parsing the stream */ - public static List readCommands(Iterable streams, NamedList resp, Set singletonCommands) + @SuppressWarnings({"unchecked"}) + public static List readCommands(Iterable streams, + @SuppressWarnings({"rawtypes"})NamedList resp, Set singletonCommands) throws IOException { if (streams == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "missing content stream"); @@ -353,6 +363,7 @@ public static List readCommands(Iterable stream ops.addAll(parse(stream.getReader(), singletonCommands)); } } + @SuppressWarnings({"rawtypes"}) List errList = CommandOperation.captureErrors(ops); if (!errList.isEmpty()) { resp.add(CommandOperation.ERR_MSGS, errList); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java b/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java index c2da9afc6552..4f77b5d48750 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java @@ -313,7 +313,8 @@ public String getSourceInfo() { public void setSourceInfo(String sourceInfo) { this.sourceInfo = sourceInfo; } - public static ContentStream create(RequestWriter requestWriter, SolrRequest req) throws IOException { + public static ContentStream create(RequestWriter requestWriter, + @SuppressWarnings({"rawtypes"})SolrRequest req) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); RequestWriter.ContentWriter contentWriter = requestWriter.getContentWriter(req); contentWriter.write(baos); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java b/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java index c90a1abf8900..670a6b428295 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java @@ -184,9 +184,11 @@ public void execute(final Runnable command) { final String submitterContextStr = ctxStr.length() <= MAX_THREAD_NAME_LEN ? ctxStr : ctxStr.substring(0, MAX_THREAD_NAME_LEN); final Exception submitterStackTrace = enableSubmitterStackTrace ? new Exception("Submitter stack trace") : null; final List providersCopy = providers; + @SuppressWarnings({"rawtypes"}) final ArrayList ctx = providersCopy.isEmpty() ? null : new ArrayList<>(providersCopy.size()); if (ctx != null) { for (int i = 0; i < providers.size(); i++) { + @SuppressWarnings({"rawtypes"}) AtomicReference reference = new AtomicReference(); ctx.add(reference); providersCopy.get(i).store(reference); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/FastJavaBinDecoder.java b/solr/solrj/src/java/org/apache/solr/common/util/FastJavaBinDecoder.java index 15084944b3c0..c5ebd06f6646 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/FastJavaBinDecoder.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/FastJavaBinDecoder.java @@ -472,13 +472,18 @@ public void stream(EntryImpl entry, StreamCodec codec) throws IOException { } @Override + @SuppressWarnings({"unchecked"}) public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { SolrDocumentList solrDocs = new SolrDocumentList(); if(entry.metadata != null){ + @SuppressWarnings({"rawtypes"}) List list = (List) entry.metadata; solrDocs.setNumFound((Long) list.get(0)); solrDocs.setStart((Long) list.get(1)); solrDocs.setMaxScore((Float) list.get(2)); + if (list.size() > 3) { //needed for back compatibility + solrDocs.setNumFoundExact((Boolean)list.get(3)); + } } List l = codec.readArray(codec.dis, entry.size); solrDocs.addAll(l); @@ -776,6 +781,7 @@ public void skip(EntryImpl entry, StreamCodec codec) throws IOException { } } + @SuppressWarnings({"unchecked", "rawtypes"}) private static void addObj(DataEntry e) { if (e.type().isContainer) { Object ctx = e.type() == DataEntry.Type.KEYVAL_ITER ? diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Hash.java b/solr/solrj/src/java/org/apache/solr/common/util/Hash.java index 6a7a8d76fc0f..d818a027ff69 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/Hash.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/Hash.java @@ -239,6 +239,7 @@ public static long lookup3ycs64(CharSequence s, int start, int end, long initval /** Returns the MurmurHash3_x86_32 hash. * Original source/tests at https://github.com/yonik/java_util/ */ + @SuppressWarnings({"fallthrough"}) public static int murmurhash3_x86_32(byte[] data, int offset, int len, int seed) { final int c1 = 0xcc9e2d51; @@ -456,6 +457,7 @@ public static final long getLongLittleEndian(byte[] buf, int offset) { /** Returns the MurmurHash3_x64_128 hash, placing the result in "out". */ + @SuppressWarnings({"fallthrough"}) public static void murmurhash3_x64_128(byte[] key, int offset, int len, int seed, LongPair out) { // The original algorithm does have a 32 bit unsigned seed. // We have to mask to match the behavior of the unsigned types and prevent sign extension. diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java index e50c7424aa5f..fb45e883fe30 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java @@ -351,6 +351,7 @@ protected Object readObject(DataInputInputStream dis) throws IOException { throw new RuntimeException("Unknown type " + tagByte); } + @SuppressWarnings({"unchecked", "rawtypes"}) public boolean writeKnownType(Object val) throws IOException { if (writePrimitive(val)) return true; if (val instanceof NamedList) { @@ -588,10 +589,14 @@ public SolrDocument readSolrDocument(DataInputInputStream dis) throws IOExceptio public SolrDocumentList readSolrDocumentList(DataInputInputStream dis) throws IOException { SolrDocumentList solrDocs = new SolrDocumentList(); - List list = (List) readVal(dis); + @SuppressWarnings("unchecked") + List list = (List) readVal(dis); solrDocs.setNumFound((Long) list.get(0)); solrDocs.setStart((Long) list.get(1)); solrDocs.setMaxScore((Float) list.get(2)); + if (list.size() > 3) { //needed for back compatibility + solrDocs.setNumFoundExact((Boolean)list.get(3)); + } @SuppressWarnings("unchecked") List l = (List) readVal(dis); @@ -602,10 +607,11 @@ public SolrDocumentList readSolrDocumentList(DataInputInputStream dis) throws IO public void writeSolrDocumentList(SolrDocumentList docs) throws IOException { writeTag(SOLRDOCLST); - List l = new ArrayList<>(3); + List l = new ArrayList<>(4); l.add(docs.getNumFound()); l.add(docs.getStart()); l.add(docs.getMaxScore()); + l.add(docs.getNumFoundExact()); writeArray(l); writeArray(docs); } @@ -747,7 +753,7 @@ public void writeIterator(IteratorWriter val) throws IOException { val.writeIter(itemWriter); writeTag(END); } - public void writeIterator(Iterator iter) throws IOException { + public void writeIterator(@SuppressWarnings({"rawtypes"})Iterator iter) throws IOException { writeTag(ITERATOR); while (iter.hasNext()) { writeVal(iter.next()); @@ -765,14 +771,14 @@ public List readIterator(DataInputInputStream fis) throws IOException { return l; } - public void writeArray(List l) throws IOException { + public void writeArray(@SuppressWarnings({"rawtypes"})List l) throws IOException { writeTag(ARR, l.size()); for (int i = 0; i < l.size(); i++) { writeVal(l.get(i)); } } - public void writeArray(Collection coll) throws IOException { + public void writeArray(@SuppressWarnings({"rawtypes"})Collection coll) throws IOException { writeTag(ARR, coll.size()); for (Object o : coll) { writeVal(o); @@ -788,11 +794,13 @@ public void writeArray(Object[] arr) throws IOException { } } + @SuppressWarnings({"unchecked"}) public List readArray(DataInputInputStream dis) throws IOException { int sz = readSize(dis); return readArray(dis, sz); } + @SuppressWarnings({"rawtypes"}) protected List readArray(DataInputInputStream dis, int sz) throws IOException { ArrayList l = new ArrayList<>(sz); for (int i = 0; i < sz; i++) { @@ -811,7 +819,7 @@ public void writeEnumFieldValue(EnumFieldValue enumFieldValue) throws IOExceptio writeStr(enumFieldValue.toString()); } - public void writeMapEntry(Map.Entry val) throws IOException { + public void writeMapEntry(Map.Entry val) throws IOException { writeTag(MAP_ENTRY); writeVal(val.getKey()); writeVal(val.getValue()); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JsonRecordReader.java b/solr/solrj/src/java/org/apache/solr/common/util/JsonRecordReader.java index f66c88e4118c..d44dea173eca 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/JsonRecordReader.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/JsonRecordReader.java @@ -117,6 +117,7 @@ private void addField(String path, String fieldName, boolean multiValued, boolea * @param r the stream reader * @return results a List of emitted records */ + @SuppressWarnings({"unchecked"}) public List> getAllRecords(Reader r) throws IOException { final List> results = new ArrayList<>(); // Deep copy is required here because the stream might hold on to the map @@ -342,6 +343,7 @@ class Wrapper extends MethodFrameWrapper { } @Override + @SuppressWarnings({"unchecked"}) public void walk(int event) throws IOException { if (event == OBJECT_START) { walkObject(); @@ -352,6 +354,7 @@ public void walk(int event) throws IOException { // ensure that the value is of type List final Object val = values.get(name); if (val != null && !(val instanceof List)) { + @SuppressWarnings({"rawtypes"}) final ArrayList listVal = new ArrayList(1); listVal.add(val); values.put(name, listVal); @@ -446,6 +449,7 @@ void walkObject() throws IOException { } } + @SuppressWarnings({"unchecked"}) private void addChildDoc2ParentDoc(Map record, Map values, String key) { record = Utils.getDeepCopy(record, 2); Object oldVal = values.get(key); @@ -454,6 +458,7 @@ record = Utils.getDeepCopy(record, 2); } else if (oldVal instanceof List) { ((List) oldVal).add(record); } else { + @SuppressWarnings({"rawtypes"}) ArrayList l = new ArrayList(); l.add(oldVal); l.add(record); @@ -476,6 +481,7 @@ private boolean isRecord() { } + @SuppressWarnings({"unchecked"}) private void putValue(Map values, String fieldName, Object o) { if (o == null) return; Object val = values.get(fieldName); @@ -484,10 +490,12 @@ private void putValue(Map values, String fieldName, Object o) { return; } if (val instanceof List) { + @SuppressWarnings({"rawtypes"}) List list = (List) val; list.add(o); return; } + @SuppressWarnings({"rawtypes"}) ArrayList l = new ArrayList(); l.add(val); l.add(o); @@ -602,9 +610,11 @@ void addName(StringBuilder sb) { public abstract void walk(int event) throws IOException; } + @SuppressWarnings({"unchecked"}) public static List parseArrayFieldValue(int ev, JSONParser parser, MethodFrameWrapper runnable) throws IOException { assert ev == ARRAY_START; + @SuppressWarnings({"rawtypes"}) ArrayList lst = new ArrayList(2); for (; ; ) { ev = parser.nextEvent(); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaCreator.java b/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaCreator.java index 028c09b8fd1d..defd33dd7913 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaCreator.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaCreator.java @@ -36,6 +36,7 @@ */ public class JsonSchemaCreator { + @SuppressWarnings({"rawtypes"}) public static final Map natives = new HashMap<>(); static { @@ -67,7 +68,7 @@ private static Map createSchemaFromType(java.lang.reflect.Type t return map; } - private static void createObjectSchema(Class klas, Map map) { + private static void createObjectSchema(@SuppressWarnings({"rawtypes"})Class klas, Map map) { map.put("type", "object"); Map props = new HashMap<>(); map.put("properties", props); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaValidator.java b/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaValidator.java index b1fcc914342e..178503e990c0 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaValidator.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaValidator.java @@ -34,17 +34,21 @@ * It validates most aspects of json schema but it is NOT A FULLY COMPLIANT JSON schema parser or validator. * This validator borrow some design's idea from https://github.com/networknt/json-schema-validator */ +@SuppressWarnings({"unchecked"}) public class JsonSchemaValidator { + @SuppressWarnings({"unchecked", "rawtypes"}) private List validators; private static Set KNOWN_FNAMES = new HashSet<>(Arrays.asList( "description","documentation","default","additionalProperties")); + @SuppressWarnings({"rawtypes"}) public JsonSchemaValidator(String jsonString) { this((Map) Utils.fromJSONString(jsonString)); } + @SuppressWarnings({"rawtypes"}) public JsonSchemaValidator(Map jsonSchema) { this.validators = new LinkedList<>(); for (Object fname : jsonSchema.keySet()) { @@ -57,6 +61,7 @@ public JsonSchemaValidator(Map jsonSchema) { } } + @SuppressWarnings({"rawtypes"}) static final Map, Validator>> VALIDATORS = new HashMap<>(); static { @@ -74,6 +79,7 @@ public List validateJson(Object data) { return errs.isEmpty() ? null : errs; } + @SuppressWarnings({"unchecked", "rawtypes"}) boolean validate(Object data, List errs) { if (data == null) return true; for (Validator validator : validators) { @@ -88,7 +94,7 @@ boolean validate(Object data, List errs) { abstract class Validator { @SuppressWarnings("unused") - Validator(Map schema, T properties) {}; + Validator(@SuppressWarnings({"rawtypes"})Map schema, T properties) {}; abstract boolean validate(Object o, List errs); } @@ -127,9 +133,10 @@ boolean isValid(Object o) { NULL(null), UNKNOWN(Object.class); + @SuppressWarnings({"rawtypes"}) Class type; - Type(Class type) { + Type(@SuppressWarnings({"rawtypes"})Class type) { this.type = type; } @@ -142,7 +149,7 @@ boolean isValid(Object o) { class TypeValidator extends Validator { private Set types; - TypeValidator(Map schema, Object type) { + TypeValidator(@SuppressWarnings({"rawtypes"})Map schema, Object type) { super(schema, type); types = new HashSet<>(1); if (type instanceof List) { @@ -172,9 +179,10 @@ boolean validate(Object o, List errs) { } } +@SuppressWarnings({"rawtypes"}) class ItemsValidator extends Validator { private JsonSchemaValidator validator; - ItemsValidator(Map schema, Map properties) { + ItemsValidator(@SuppressWarnings({"rawtypes"})Map schema, @SuppressWarnings({"rawtypes"})Map properties) { super(schema, properties); validator = new JsonSchemaValidator(properties); } @@ -198,7 +206,7 @@ class EnumValidator extends Validator> { private Set enumVals; - EnumValidator(Map schema, List properties) { + EnumValidator(@SuppressWarnings({"rawtypes"})Map schema, List properties) { super(schema, properties); enumVals = new HashSet<>(properties); @@ -221,7 +229,7 @@ class RequiredValidator extends Validator> { private Set requiredProps; - RequiredValidator(Map schema, List requiredProps) { + RequiredValidator(@SuppressWarnings({"rawtypes"})Map schema, List requiredProps) { super(schema, requiredProps); this.requiredProps = new HashSet<>(requiredProps); } @@ -233,6 +241,7 @@ boolean validate(Object o, List errs) { boolean validate( Object o, List errs, Set requiredProps) { if (o instanceof Map) { + @SuppressWarnings({"rawtypes"}) Set fnames = ((Map) o).keySet(); for (String requiredProp : requiredProps) { if (requiredProp.contains(".")) { @@ -257,10 +266,12 @@ boolean validate( Object o, List errs, Set requiredProps) { } } +@SuppressWarnings({"rawtypes"}) class PropertiesValidator extends Validator> { private Map jsonSchemas; private boolean additionalProperties; + @SuppressWarnings({"unchecked", "rawtypes"}) PropertiesValidator(Map schema, Map properties) { super(schema, properties); jsonSchemas = new HashMap<>(); @@ -273,6 +284,7 @@ class PropertiesValidator extends Validator> { @Override boolean validate(Object o, List errs) { if (o instanceof Map) { + @SuppressWarnings({"rawtypes"}) Map map = (Map) o; for (Object key : map.keySet()) { JsonSchemaValidator jsonSchema = jsonSchemas.get(key.toString()); @@ -294,7 +306,7 @@ class OneOfValidator extends Validator> { private Set oneOfProps; - OneOfValidator(Map schema, List oneOfProps) { + OneOfValidator(@SuppressWarnings({"rawtypes"})Map schema, List oneOfProps) { super(schema, oneOfProps); this.oneOfProps = new HashSet<>(oneOfProps); } @@ -302,6 +314,7 @@ class OneOfValidator extends Validator> { @Override boolean validate(Object o, List errs) { if (o instanceof Map) { + @SuppressWarnings({"rawtypes"}) Map map = (Map) o; for (Object key : map.keySet()) { if (oneOfProps.contains(key.toString())) return true; diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JsonTextWriter.java b/solr/solrj/src/java/org/apache/solr/common/util/JsonTextWriter.java index ed8c79776f02..8a5c25697d9d 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/JsonTextWriter.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/JsonTextWriter.java @@ -67,6 +67,11 @@ default void writeArrayCloser() throws IOException { } default void writeStr(String name, String val, boolean needsEscaping) throws IOException { + if (val == null) { + writeNull(name); + return; + } + // it might be more efficient to use a stringbuilder or write substrings // if writing chars to the stream is slow. if (needsEscaping) { @@ -186,7 +191,7 @@ default void writeKey(String fname, boolean needsEscaping) throws IOException { _writeChar(':'); } - default void writeJsonIter(Iterator val) throws IOException { + default void writeJsonIter(@SuppressWarnings({"rawtypes"})Iterator val) throws IOException { incLevel(); boolean first = true; while (val.hasNext()) { @@ -233,6 +238,7 @@ default void writeDate(String name, String val) throws IOException { } + @SuppressWarnings({"unchecked", "rawtypes"}) default void writeMap(String name, Map val, boolean excludeOuter, boolean isFirstVal) throws IOException { if (!excludeOuter) { writeMapOpener(val.size()); @@ -265,13 +271,13 @@ default void writeMap(String name, Map val, boolean excludeOuter, boolean isFirs } - default void writeArray(String name, List l) throws IOException { + default void writeArray(String name, @SuppressWarnings({"rawtypes"})List l) throws IOException { writeArrayOpener(l.size()); writeJsonIter(l.iterator()); writeArrayCloser(); } - default void writeArray(String name, Iterator val) throws IOException { + default void writeArray(String name, @SuppressWarnings({"rawtypes"})Iterator val) throws IOException { writeArrayOpener(-1); // no trivial way to determine array size writeJsonIter(val); writeArrayCloser(); @@ -286,7 +292,7 @@ default void unicodeEscape(Appendable out, int ch) throws IOException { out.append(hexdigits[(ch) & 0xf]); } - default void writeNamedList(String name, NamedList val) throws IOException { + default void writeNamedList(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { String namedListStyle = getNamedListStyle(); if (val instanceof SimpleOrderedMap) { writeNamedListAsMapWithDups(name, val); @@ -309,7 +315,7 @@ default void writeNamedList(String name, NamedList val) throws IOException { * Map null to "" and name mangle any repeated keys to avoid repeats in the * output. */ - default void writeNamedListAsMapMangled(String name, NamedList val) throws IOException { + default void writeNamedListAsMapMangled(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { int sz = val.size(); writeMapOpener(sz); incLevel(); @@ -369,7 +375,7 @@ default void writeNamedListAsMapMangled(String name, NamedList val) throws IOExc * null key is mapped to "". */ // NamedList("a"=1,"bar"="foo",null=3,null=null) => {"a":1,"bar":"foo","":3,"":null} - default void writeNamedListAsMapWithDups(String name, NamedList val) throws IOException { + default void writeNamedListAsMapWithDups(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { int sz = val.size(); writeMapOpener(sz); incLevel(); @@ -392,7 +398,7 @@ default void writeNamedListAsMapWithDups(String name, NamedList val) throws IOEx // Represents a NamedList directly as an array of JSON objects... // NamedList("a"=1,"b"=2,null=3,null=null) => [{"a":1},{"b":2},3,null] - default void writeNamedListAsArrMap(String name, NamedList val) throws IOException { + default void writeNamedListAsArrMap(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { int sz = val.size(); indent(); writeArrayOpener(sz); @@ -427,7 +433,7 @@ default void writeNamedListAsArrMap(String name, NamedList val) throws IOExcepti // Represents a NamedList directly as an array of JSON objects... // NamedList("a"=1,"b"=2,null=3,null=null) => [["a",1],["b",2],[null,3],[null,null]] - default void writeNamedListAsArrArr(String name, NamedList val) throws IOException { + default void writeNamedListAsArrArr(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { int sz = val.size(); indent(); writeArrayOpener(sz); @@ -471,7 +477,7 @@ default void writeNamedListAsArrArr(String name, NamedList val) throws IOExcepti // Represents a NamedList directly as an array with keys/values // interleaved. // NamedList("a"=1,"b"=2,null=3,null=null) => ["a",1,"b",2,null,3,null,null] - default void writeNamedListAsFlat(String name, NamedList val) throws IOException { + default void writeNamedListAsFlat(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException { int sz = val.size(); writeArrayOpener(sz * 2); incLevel(); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/MapBackedCache.java b/solr/solrj/src/java/org/apache/solr/common/util/MapBackedCache.java index 552dba6f0500..3efdfe32cde1 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/MapBackedCache.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/MapBackedCache.java @@ -30,6 +30,10 @@ public MapBackedCache(Map map) { this.map = map; } + public Map asMap() { + return map; + } + @Override public V put(K key, V val) { return map.put(key, val); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java b/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java index fbc11dfcb00f..f456e3536336 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java @@ -62,6 +62,7 @@ *

    * */ +@SuppressWarnings({"unchecked", "rawtypes"}) public class NamedList implements Cloneable, Serializable, Iterable> , MapWriter { private static final long serialVersionUID = 1957981902839867821L; @@ -467,9 +468,11 @@ public T remove(Object key) { } @Override + @SuppressWarnings({"unchecked"}) public void putAll(Map m) { boolean isEmpty = isEmpty(); for (Object o : m.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry e = (Entry) o; if (isEmpty) {// we know that there are no duplicates add((String) e.getKey(), (T) e.getValue()); @@ -485,12 +488,14 @@ public void clear() { } @Override + @SuppressWarnings({"unchecked"}) public Set keySet() { //TODO implement more efficiently return NamedList.this.asMap(1).keySet(); } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Collection values() { //TODO implement more efficiently return NamedList.this.asMap(1).values(); @@ -795,7 +800,6 @@ public Boolean getBooleanArg(final String name) { * If values are found for the input key that are not strings or * arrays of strings. */ - @SuppressWarnings("rawtypes") public Collection removeConfigArgs(final String name) throws SolrException { List objects = getAll(name); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Pair.java b/solr/solrj/src/java/org/apache/solr/common/util/Pair.java index 74f5b2d6ff79..05b09e850b81 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/Pair.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/Pair.java @@ -66,6 +66,7 @@ public void writeMap(EntryWriter ew) throws IOException { ew.put("second", second); } + @SuppressWarnings({"unchecked", "rawtypes"}) public static Pair parse(Map m) { return new Pair(m.get("first"), m.get("second")); } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/RetryUtil.java b/solr/solrj/src/java/org/apache/solr/common/util/RetryUtil.java index 3f1377126107..2f148bd7fd9b 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/RetryUtil.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/RetryUtil.java @@ -37,11 +37,12 @@ public interface BooleanRetryCmd { boolean execute(); } - public static void retryOnThrowable(Class clazz, long timeoutms, long intervalms, RetryCmd cmd) throws Throwable { + public static void retryOnThrowable(@SuppressWarnings({"rawtypes"})Class clazz, long timeoutms, long intervalms, RetryCmd cmd) throws Throwable { retryOnThrowable(Collections.singleton(clazz), timeoutms, intervalms, cmd); } - public static void retryOnThrowable(Set classes, long timeoutms, long intervalms, RetryCmd cmd) throws Throwable { + public static void retryOnThrowable(@SuppressWarnings({"rawtypes"})Set classes, + long timeoutms, long intervalms, RetryCmd cmd) throws Throwable { long timeout = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeoutms, TimeUnit.MILLISECONDS); while (true) { try { @@ -61,8 +62,8 @@ public static void retryOnThrowable(Set classes, long timeoutms, long int } } - private static boolean isInstanceOf(Set classes, Throwable t) { - for (Class c : classes) { + private static boolean isInstanceOf(@SuppressWarnings({"rawtypes"})Set classes, Throwable t) { + for (@SuppressWarnings({"rawtypes"})Class c : classes) { if (c.isInstance(t)) { return true; } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/TextWriter.java b/solr/solrj/src/java/org/apache/solr/common/util/TextWriter.java index 6aa58c6a8fab..bb3975628f6f 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/TextWriter.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/TextWriter.java @@ -93,9 +93,9 @@ default void writeVal(String name, Object val) throws IOException { void writeStr(String name, String val, boolean needsEscaping) throws IOException; - void writeMap(String name, Map val, boolean excludeOuter, boolean isFirstVal) throws IOException; + void writeMap(String name, @SuppressWarnings({"rawtypes"})Map val, boolean excludeOuter, boolean isFirstVal) throws IOException; - void writeArray(String name, Iterator val) throws IOException; + void writeArray(String name, @SuppressWarnings({"rawtypes"})Iterator val) throws IOException; void writeNull(String name) throws IOException; @@ -121,7 +121,7 @@ default void writeVal(String name, Object val) throws IOException { /** if this form of the method is called, val is the Solr ISO8601 based date format */ void writeDate(String name, String val) throws IOException; - void writeNamedList(String name, NamedList val) throws IOException; + void writeNamedList(String name, @SuppressWarnings({"rawtypes"})NamedList val) throws IOException; Writer getWriter(); @@ -154,7 +154,7 @@ default void writeArray(String name, Object[] val) throws IOException { writeArray(name, Arrays.asList(val)); } - default void writeArray(String name, List l) throws IOException { + default void writeArray(String name, @SuppressWarnings({"rawtypes"})List l) throws IOException { writeArray(name, l.iterator()); } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java index e32a4265f701..229417a969d6 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java @@ -89,30 +89,39 @@ import static java.util.concurrent.TimeUnit.NANOSECONDS; public class Utils { + @SuppressWarnings({"rawtypes"}) public static final Function NEW_HASHMAP_FUN = o -> new HashMap<>(); + @SuppressWarnings({"rawtypes"}) public static final Function NEW_LINKED_HASHMAP_FUN = o -> new LinkedHashMap<>(); + @SuppressWarnings({"rawtypes"}) public static final Function NEW_ATOMICLONG_FUN = o -> new AtomicLong(); + @SuppressWarnings({"rawtypes"}) public static final Function NEW_ARRAYLIST_FUN = o -> new ArrayList<>(); + @SuppressWarnings({"rawtypes"}) public static final Function NEW_SYNCHRONIZED_ARRAYLIST_FUN = o -> Collections.synchronizedList(new ArrayList<>()); + @SuppressWarnings({"rawtypes"}) public static final Function NEW_HASHSET_FUN = o -> new HashSet<>(); private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + @SuppressWarnings({"rawtypes"}) public static Map getDeepCopy(Map map, int maxDepth) { return getDeepCopy(map, maxDepth, true, false); } + @SuppressWarnings({"rawtypes"}) public static Map getDeepCopy(Map map, int maxDepth, boolean mutable) { return getDeepCopy(map, maxDepth, mutable, false); } + @SuppressWarnings({"unchecked", "rawtypes"}) public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) { if (map == null) return null; if (maxDepth < 1) return map; Map copy; if (sorted) { - copy = new TreeMap(); + copy = new TreeMap<>(); } else { - copy = map instanceof LinkedHashMap ? new LinkedHashMap(map.size()) : new HashMap(map.size()); + copy = map instanceof LinkedHashMap ? new LinkedHashMap<>(map.size()) : new HashMap<>(map.size()); } for (Object o : map.entrySet()) { Map.Entry e = (Map.Entry) o; @@ -121,17 +130,18 @@ public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean so return mutable ? copy : Collections.unmodifiableMap(copy); } - public static void forEachMapEntry(Object o, String path, BiConsumer fun) { + public static void forEachMapEntry(Object o, String path, @SuppressWarnings({"rawtypes"})BiConsumer fun) { Object val = Utils.getObjectByPath(o, false, path); forEachMapEntry(val, fun); } - public static void forEachMapEntry(Object o, List path, BiConsumer fun) { + public static void forEachMapEntry(Object o, List path, @SuppressWarnings({"rawtypes"})BiConsumer fun) { Object val = Utils.getObjectByPath(o, false, path); forEachMapEntry(val, fun); } - public static void forEachMapEntry(Object o, BiConsumer fun) { + @SuppressWarnings({"unchecked"}) + public static void forEachMapEntry(Object o, @SuppressWarnings({"rawtypes"})BiConsumer fun) { if (o instanceof MapWriter) { MapWriter m = (MapWriter) o; try { @@ -150,6 +160,7 @@ public MapWriter.EntryWriter put(CharSequence k, Object v) { } } + @SuppressWarnings({"unchecked", "rawtypes"}) private static Object makeDeepCopy(Object v, int maxDepth, boolean mutable, boolean sorted) { if (v instanceof MapWriter && maxDepth > 1) { v = ((MapWriter) v).toMap(new LinkedHashMap<>()); @@ -182,10 +193,12 @@ public static Object fromJavabin(byte[] bytes) throws IOException { } } + @SuppressWarnings({"rawtypes"}) public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) { return getDeepCopy(c, maxDepth, mutable, false); } + @SuppressWarnings({"unchecked", "rawtypes"}) public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable, boolean sorted) { if (c == null || maxDepth < 1) return c; Collection result = c instanceof Set ? @@ -217,6 +230,7 @@ public MapWriterJSONWriter(CharArr out, int indentSize) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public void handleUnknownClass(Object o) { if (o instanceof MapWriter) { Map m = ((MapWriter) o).toMap(new LinkedHashMap<>()); @@ -313,7 +327,7 @@ public static Object fromJSON(Reader is) { return new ObjectBuilder(jsonParser) { @Override public Object newObject() { - return new LinkedHashMapWriter(); + return new LinkedHashMapWriter<>(); } }; } catch (IOException e) { @@ -326,7 +340,7 @@ public Object newObject() { return new ObjectBuilder(jsonParser) { @Override public Object newObject() { - return new HashMap(); + return new HashMap<>(); } }; } catch (IOException e) { @@ -384,11 +398,13 @@ public static Object getObjectByPath(Object root, boolean onlyPrimitive, String return getObjectByPath(root, onlyPrimitive, parts); } + @SuppressWarnings({"unchecked"}) public static boolean setObjectByPath(Object root, String hierarchy, Object value) { List parts = StrUtils.splitSmart(hierarchy, '/', true); return setObjectByPath(root, parts, value); } + @SuppressWarnings({"unchecked"}) public static boolean setObjectByPath(Object root, List hierarchy, Object value) { if (root == null) return false; if (!isMapLike(root)) throw new RuntimeException("must be a Map or NamedList"); @@ -407,6 +423,7 @@ public static boolean setObjectByPath(Object root, List hierarchy, Objec Object o = getVal(obj, s, -1); if (o == null) return false; if (idx > -1) { + @SuppressWarnings({"rawtypes"}) List l = (List) o; o = idx < l.size() ? l.get(idx) : null; } @@ -415,6 +432,7 @@ public static boolean setObjectByPath(Object root, List hierarchy, Objec } else { if (idx == -2) { if (obj instanceof NamedList) { + @SuppressWarnings({"rawtypes"}) NamedList namedList = (NamedList) obj; int location = namedList.indexOf(s, 0); if (location == -1) namedList.add(s, value); @@ -426,6 +444,7 @@ public static boolean setObjectByPath(Object root, List hierarchy, Objec } else { Object v = getVal(obj, s, -1); if (v instanceof List) { + @SuppressWarnings({"rawtypes"}) List list = (List) v; if (idx == -1) { list.add(value); @@ -469,6 +488,7 @@ public static Object getObjectByPath(Object root, boolean onlyPrimitive, List getJson(DistribStateManager distribStateManager, String path) throws InterruptedException, IOException, KeeperException { VersionedData data = null; try { @@ -614,6 +637,7 @@ public static Map getJson(DistribStateManager distribStateManage * @param retryOnConnLoss whether to retry the operation automatically on connection loss, see {@link org.apache.solr.common.cloud.ZkCmdExecutor#retryOperation(ZkOperation)} * @return a Map if the node exists and contains valid JSON or an empty map if znode does not exist or has a null data */ + @SuppressWarnings({"unchecked"}) public static Map getJson(SolrZkClient zkClient, String path, boolean retryOnConnLoss) throws KeeperException, InterruptedException { try { byte[] bytes = zkClient.getData(path, null, null, retryOnConnLoss); @@ -663,6 +687,7 @@ public static String parseMetricsReplicaName(String collectionName, String coreN * @param input the json with new values * @return whether there was any change made to sink or not. */ + @SuppressWarnings({"unchecked", "rawtypes"}) public static boolean mergeJson(Map sink, Map input) { boolean isModified = false; for (Map.Entry e : input.entrySet()) { diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java b/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java index b5375365fead..a64456fb9159 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java @@ -40,6 +40,7 @@ import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableSet; +@SuppressWarnings({"overrides"}) public class ValidatingJsonMap implements Map, NavigableObject { private static final String INCLUDE = "#include"; @@ -48,6 +49,7 @@ public class ValidatingJsonMap implements Map, NavigableObject { if (o == null) return " Must not be NULL"; return null; }; + @SuppressWarnings({"rawtypes"}) public static final PredicateWithErrMsg ENUM_OF = pair -> { if (pair.second() instanceof Set) { Set set = (Set) pair.second(); @@ -141,7 +143,8 @@ public Set> entrySet() { return delegate.entrySet(); } - public Object get(String key, PredicateWithErrMsg predicate) { + @SuppressWarnings({"unchecked"}) + public Object get(String key, @SuppressWarnings({"rawtypes"})PredicateWithErrMsg predicate) { Object v = get(key); if (predicate != null) { String msg = predicate.test(v); @@ -178,11 +181,12 @@ public ValidatingJsonMap getMap(String key) { return getMap(key, null, null); } - public ValidatingJsonMap getMap(String key, PredicateWithErrMsg predicate) { + public ValidatingJsonMap getMap(String key, @SuppressWarnings({"rawtypes"})PredicateWithErrMsg predicate) { return getMap(key, predicate, null); } + @SuppressWarnings({"unchecked", "rawtypes"}) public ValidatingJsonMap getMap(String key, PredicateWithErrMsg predicate, String message) { Object v = get(key); if (v != null && !(v instanceof Map)) { @@ -199,10 +203,12 @@ public ValidatingJsonMap getMap(String key, PredicateWithErrMsg predicate, Strin return wrap((Map) v); } + @SuppressWarnings({"rawtypes"}) public List getList(String key, PredicateWithErrMsg predicate) { return getList(key, predicate, null); } + @SuppressWarnings({"unchecked", "rawtypes"}) public List getList(String key, PredicateWithErrMsg predicate, Object test) { Object v = get(key); if (v != null && !(v instanceof List)) { @@ -219,6 +225,7 @@ public List getList(String key, PredicateWithErrMsg predicate, Object test) { return (List) v; } + @SuppressWarnings({"unchecked", "rawtypes"}) public Object get(String key, PredicateWithErrMsg predicate, Object arg) { Object v = get(key); String test = predicate.test(new Pair(v, arg)); @@ -279,6 +286,7 @@ private static void handleIncludes(ValidatingJsonMap map, String location, int m } } + @SuppressWarnings({"unchecked", "rawtypes"}) public static ValidatingJsonMap getDeepCopy(Map map, int maxDepth, boolean mutable) { if (map == null) return null; if (maxDepth < 1) return ValidatingJsonMap.wrap(map); @@ -293,6 +301,7 @@ public static ValidatingJsonMap getDeepCopy(Map map, int maxDepth, boolean mutab return mutable ? copy : new ValidatingJsonMap(Collections.unmodifiableMap(copy)); } + @SuppressWarnings({"unchecked", "rawtypes"}) public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) { if (c == null || maxDepth < 1) return c; Collection result = c instanceof Set ? new HashSet() : new ArrayList(); @@ -339,6 +348,12 @@ public boolean equals(Object that) { return that instanceof Map && this.delegate.equals(that); } +// @Override +// public int hashCode() { +// throw new UnsupportedOperationException("TODO unimplemented ValidatingJsonMap.hashCode"); +// } + + @SuppressWarnings({"unchecked"}) public static final ValidatingJsonMap EMPTY = new ValidatingJsonMap(Collections.EMPTY_MAP); public interface PredicateWithErrMsg { diff --git a/solr/solrj/src/java/org/noggit/CharArr.java b/solr/solrj/src/java/org/noggit/CharArr.java index 9ecc8e626097..0431e107a1d5 100644 --- a/solr/solrj/src/java/org/noggit/CharArr.java +++ b/solr/solrj/src/java/org/noggit/CharArr.java @@ -225,170 +225,170 @@ public final Appendable append(char c) throws IOException { write(c); return this; } -} -class NullCharArr extends CharArr { - public NullCharArr() { - super(new char[1], 0, 0); - } + static class NullCharArr extends CharArr { + public NullCharArr() { + super(new char[1], 0, 0); + } - @Override - public void unsafeWrite(char b) { - } + @Override + public void unsafeWrite(char b) { + } - @Override - public void unsafeWrite(char b[], int off, int len) { - } + @Override + public void unsafeWrite(char b[], int off, int len) { + } - @Override - public void unsafeWrite(int b) { - } + @Override + public void unsafeWrite(int b) { + } - @Override - public void write(char b) { - } + @Override + public void write(char b) { + } - @Override - public void write(char b[], int off, int len) { - } + @Override + public void write(char b[], int off, int len) { + } - @Override - public void reserve(int num) { - } + @Override + public void reserve(int num) { + } - @Override - protected void resize(int len) { - } + @Override + protected void resize(int len) { + } - @Override - public Appendable append(CharSequence csq, int start, int end) throws IOException { - return this; - } + @Override + public Appendable append(CharSequence csq, int start, int end) throws IOException { + return this; + } - @Override - public char charAt(int index) { - return 0; - } + @Override + public char charAt(int index) { + return 0; + } - @Override - public void write(String s, int stringOffset, int len) { + @Override + public void write(String s, int stringOffset, int len) { + } } -} -// IDEA: a subclass that refills the array from a reader? -class CharArrReader extends CharArr { - protected final Reader in; + // IDEA: a subclass that refills the array from a reader? + class CharArrReader extends CharArr { + protected final Reader in; - public CharArrReader(Reader in, int size) { - super(size); - this.in = in; - } + public CharArrReader(Reader in, int size) { + super(size); + this.in = in; + } - @Override - public int read() throws IOException { - if (start >= end) fill(); - return start >= end ? -1 : buf[start++]; - } + @Override + public int read() throws IOException { + if (start >= end) fill(); + return start >= end ? -1 : buf[start++]; + } - @Override - public int read(CharBuffer cb) throws IOException { - // empty the buffer and then read direct - int sz = size(); - if (sz > 0) cb.put(buf, start, end); - int sz2 = in.read(cb); - if (sz2 >= 0) return sz + sz2; - return sz > 0 ? sz : -1; - } + @Override + public int read(CharBuffer cb) throws IOException { + // empty the buffer and then read direct + int sz = size(); + if (sz > 0) cb.put(buf, start, end); + int sz2 = in.read(cb); + if (sz2 >= 0) return sz + sz2; + return sz > 0 ? sz : -1; + } - @Override - public int fill() throws IOException { - if (start >= end) { - reset(); - } else if (start > 0) { - System.arraycopy(buf, start, buf, 0, size()); - end = size(); - start = 0; + @Override + public int fill() throws IOException { + if (start >= end) { + reset(); + } else if (start > 0) { + System.arraycopy(buf, start, buf, 0, size()); + end = size(); + start = 0; + } + /*** + // fill fully or not??? + do { + int sz = in.read(buf,end,buf.length-end); + if (sz==-1) return; + end+=sz; + } while (end < buf.length); + ***/ + + int sz = in.read(buf, end, buf.length - end); + if (sz > 0) end += sz; + return sz; } - /*** - // fill fully or not??? - do { - int sz = in.read(buf,end,buf.length-end); - if (sz==-1) return; - end+=sz; - } while (end < buf.length); - ***/ - int sz = in.read(buf, end, buf.length - end); - if (sz > 0) end += sz; - return sz; } -} - -class CharArrWriter extends CharArr { - protected Writer sink; + class CharArrWriter extends CharArr { + protected Writer sink; - @Override - public void flush() { - try { - sink.write(buf, start, end - start); - } catch (IOException e) { - throw new RuntimeException(e); - } - start = end = 0; - } - - @Override - public void write(char b) { - if (end >= buf.length) { - flush(); - } - unsafeWrite(b); - } - - @Override - public void write(char b[], int off, int len) { - int space = buf.length - end; - if (len < space) { - unsafeWrite(b, off, len); - } else if (len < buf.length) { - unsafeWrite(b, off, space); - flush(); - unsafeWrite(b, off + space, len - space); - } else { - flush(); + @Override + public void flush() { try { - sink.write(b, off, len); + sink.write(buf, start, end - start); } catch (IOException e) { throw new RuntimeException(e); } + start = end = 0; } - } - @Override - public void write(String s, int stringOffset, int len) { - int space = buf.length - end; - if (len < space) { - s.getChars(stringOffset, stringOffset + len, buf, end); - end += len; - } else if (len < buf.length) { - // if the data to write is small enough, buffer it. - s.getChars(stringOffset, stringOffset + space, buf, end); - flush(); - s.getChars(stringOffset + space, stringOffset + len, buf, 0); - end = len - space; - } else { - flush(); - // don't buffer, just write to sink - try { - sink.write(s, stringOffset, len); - } catch (IOException e) { - throw new RuntimeException(e); + @Override + public void write(char b) { + if (end >= buf.length) { + flush(); } + unsafeWrite(b); + } + @Override + public void write(char b[], int off, int len) { + int space = buf.length - end; + if (len < space) { + unsafeWrite(b, off, len); + } else if (len < buf.length) { + unsafeWrite(b, off, space); + flush(); + unsafeWrite(b, off + space, len - space); + } else { + flush(); + try { + sink.write(b, off, len); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public void write(String s, int stringOffset, int len) { + int space = buf.length - end; + if (len < space) { + s.getChars(stringOffset, stringOffset + len, buf, end); + end += len; + } else if (len < buf.length) { + // if the data to write is small enough, buffer it. + s.getChars(stringOffset, stringOffset + space, buf, end); + flush(); + s.getChars(stringOffset + space, stringOffset + len, buf, 0); + end = len - space; + } else { + flush(); + // don't buffer, just write to sink + try { + sink.write(s, stringOffset, len); + } catch (IOException e) { + throw new RuntimeException(e); + } + + } } } } diff --git a/solr/solrj/src/java/org/noggit/JSONParser.java b/solr/solrj/src/java/org/noggit/JSONParser.java index 8b1ac01bc72e..d1655d13e70b 100644 --- a/solr/solrj/src/java/org/noggit/JSONParser.java +++ b/solr/solrj/src/java/org/noggit/JSONParser.java @@ -132,7 +132,7 @@ public static String getEventString(int e) { return "Unknown: " + e; } - private static final CharArr devNull = new NullCharArr(); + private static final CharArr devNull = new CharArr.NullCharArr(); protected int flags = FLAGS_DEFAULT; diff --git a/solr/solrj/src/resources/apispec/cluster.Commands.json b/solr/solrj/src/resources/apispec/cluster.Commands.json index 069cd1d3e8d6..b72b67cabb2b 100644 --- a/solr/solrj/src/resources/apispec/cluster.Commands.json +++ b/solr/solrj/src/resources/apispec/cluster.Commands.json @@ -75,9 +75,6 @@ "documentation": "https://lucene.apache.org/solr/guide/cluster-node-management.html#clusterprop", "description": "Add, edit, or delete a cluster-wide property.", "properties": { - "legacyCloud": { - "type": "boolean" - }, "urlScheme": { "type": "string" }, diff --git a/solr/solrj/src/resources/apispec/collections.collection.Commands.json b/solr/solrj/src/resources/apispec/collections.collection.Commands.json index a2694e1187f3..b4545d33f253 100644 --- a/solr/solrj/src/resources/apispec/collections.collection.Commands.json +++ b/solr/solrj/src/resources/apispec/collections.collection.Commands.json @@ -24,15 +24,15 @@ "properties": { "replica": { "type": "string", - "description": "The name of the replica" + "description": "The name of the replica to move. Either this parameter or shard + sourceNode is required, this parameter takes precedence." }, "shard": { "type": "string", - "description": "The name of the shard" + "description": "The name of the shard for which a replica should be moved. Either this parameter or replica is required. If replica is specified, this parameter is ignored." }, "sourceNode": { "type": "string", - "description": "The name of the node that contains the replica." + "description": "The name of the node that contains the replica. Either this parameter or replica is required. If replica is specified, this parameter is ignored." }, "targetNode": { "type": "string", @@ -46,7 +46,7 @@ "timeout": { "type": "integer", "default": 600, - "description": "Timeout to wait for replica to become active. For very large replicas this may need to be increased." + "description": "Number of seconds to wait for replica to become active before failing. For very large replicas this may need to be increased to ensure the old replica is deleted. Ignored for hdfs replicas." }, "inPlaceMove": { "type": "boolean", diff --git a/solr/solrj/src/resources/apispec/collections.collection.shards.shard.delete.json b/solr/solrj/src/resources/apispec/collections.collection.shards.shard.delete.json index ae7c36a550b0..50c1e3bb0f5a 100644 --- a/solr/solrj/src/resources/apispec/collections.collection.shards.shard.delete.json +++ b/solr/solrj/src/resources/apispec/collections.collection.shards.shard.delete.json @@ -1,6 +1,6 @@ { "documentation": "https://lucene.apache.org/solr/guide/shard-management.html#deleteshard", - "description": "Deletes a shard by unloading all replicas of the shard, removing it from clusterstate.json, and by default deleting the instanceDir and dataDir. Only inactive shards or those which have no range for custom sharding will be deleted.", + "description": "Deletes a shard by unloading all replicas of the shard, removing it from the collection's state.json, and by default deleting the instanceDir and dataDir. Only inactive shards or those which have no range for custom sharding will be deleted.", "methods": [ "DELETE" ], diff --git a/solr/solrj/src/resources/apispec/collections.collection.shards.shard.replica.delete.json b/solr/solrj/src/resources/apispec/collections.collection.shards.shard.replica.delete.json index 2d4691df8465..16efecb92a05 100644 --- a/solr/solrj/src/resources/apispec/collections.collection.shards.shard.replica.delete.json +++ b/solr/solrj/src/resources/apispec/collections.collection.shards.shard.replica.delete.json @@ -1,6 +1,6 @@ { "documentation": "https://lucene.apache.org/solr/guide/replica-management.html#deletereplica", - "description": "Deletes a replica. If the responding node is up, the core is unloaded, the entry removed from clusterstate.json, and the instanceDir and dataDir removed. If the node is not up, the entry for the replica is removed from clusterstate.json; if the nodes comes up later, the replica is automatically de-registered.", + "description": "Deletes a replica. If the responding node is up, the core is unloaded, the entry removed from the collection's state.json, and the instanceDir and dataDir removed. If the node is not up, the entry for the replica is removed from state.json; if the nodes comes up later, the replica is automatically de-registered.", "methods": [ "DELETE" ], diff --git a/solr/solrj/src/test-files/solrj/javabin_backcompat.bin b/solr/solrj/src/test-files/solrj/javabin_backcompat.bin index 6e9d32ff78f6..7a5fde24c84b 100644 Binary files a/solr/solrj/src/test-files/solrj/javabin_backcompat.bin and b/solr/solrj/src/test-files/solrj/javabin_backcompat.bin differ diff --git a/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java b/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java index ffe452f67c5b..83fe1c39ca4c 100644 --- a/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java @@ -284,7 +284,7 @@ public void testJsonQueryDslBoostEquivalents() throws Exception { //tag::solrj-ipod-query-boosted-dsl-2[] final Map queryTopLevel = new HashMap<>(); final Map boostProperties = new HashMap<>(); - final Map luceneTopLevel = new HashMap(); + final Map luceneTopLevel = new HashMap<>(); final Map luceneProperties = new HashMap<>(); queryTopLevel.put("boost", boostProperties); boostProperties.put("b", "log(popularity)"); diff --git a/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/UsingSolrJRefGuideExamplesTest.java b/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/UsingSolrJRefGuideExamplesTest.java index c87bb87bdf68..5efa2159a008 100644 --- a/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/UsingSolrJRefGuideExamplesTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/UsingSolrJRefGuideExamplesTest.java @@ -58,7 +58,7 @@ public class UsingSolrJRefGuideExamplesTest extends SolrCloudTestCase { private static final int NUM_INDEXED_DOCUMENTS = 3; private static final int NUM_LIVE_NODES = 1; - private Queue expectedLines = new ArrayDeque(); + private Queue expectedLines = new ArrayDeque<>(); @BeforeClass public static void setUpCluster() throws Exception { @@ -219,10 +219,13 @@ public void otherSolrApisExample() throws Exception { // tag::solrj-other-apis[] final SolrClient client = getSolrClient(); + @SuppressWarnings({"rawtypes"}) final SolrRequest request = new CollectionAdminRequest.ClusterStatus(); final NamedList response = client.request(request); + @SuppressWarnings({"unchecked"}) final NamedList cluster = (NamedList) response.get("cluster"); + @SuppressWarnings({"unchecked"}) final List liveNodes = (List) cluster.get("live_nodes"); print("Found " + liveNodes.size() + " live nodes"); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java index d823ad88109d..f106cbea0e05 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java @@ -154,6 +154,7 @@ public void testQueryPerf() throws Exception { * query the example */ @Test + @SuppressWarnings({"rawtypes"}) public void testExampleConfig() throws Exception { SolrClient client = getSolrClient(); @@ -602,6 +603,7 @@ public void testAugmentFields() throws Exception assertTrue( "should be bigger ["+id1+","+id2+"]", id2 > id1 ); // The score from explain should be the same as the score + @SuppressWarnings({"rawtypes"}) NamedList explain = (NamedList)out1.getFieldValue( "[explain]" ); assertEquals( out1.get( "score"), explain.get( "value" ) ); @@ -787,6 +789,7 @@ public void testStreamingRequest() throws Exception { } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testMultiContentWriterRequest() throws Exception { SolrClient client = getSolrClient(); client.deleteByQuery("*:*");// delete everything! @@ -811,7 +814,7 @@ public void testMultiContentWriterRequest() throws Exception { } - private ByteBuffer getFileContent(NamedList nl, String name) throws IOException { + private ByteBuffer getFileContent(@SuppressWarnings({"rawtypes"})NamedList nl, String name) throws IOException { try (InputStream is = new FileInputStream(getFile(name))) { return MultiContentWriterRequest.readByteBuffer(is); } @@ -1305,6 +1308,7 @@ public void testPivotFacetsQueries() throws Exception { } @Test + @SuppressWarnings({"rawtypes"}) public void testPivotFacetsRanges() throws Exception { SolrClient client = getSolrClient(); @@ -1391,6 +1395,7 @@ public void testPivotFacetsRanges() throws Exception { assertEquals(0, ((Float)range.getStart()).intValue()); assertEquals(200, ((Float)range.getEnd()).intValue()); assertEquals(50, ((Float)range.getGap()).intValue()); + @SuppressWarnings({"unchecked"}) List counts = range.getCounts(); assertEquals(4, counts.size()); for (Count count : counts) { @@ -1406,6 +1411,7 @@ public void testPivotFacetsRanges() throws Exception { assertEquals(0, ((Float) range.getStart()).intValue()); assertEquals(200, ((Float) range.getEnd()).intValue()); assertEquals(50, ((Float) range.getGap()).intValue()); + @SuppressWarnings({"unchecked"}) List counts = range.getCounts(); assertEquals(4, counts.size()); for (Count count : counts) { @@ -1429,6 +1435,7 @@ public void testPivotFacetsRanges() throws Exception { assertEquals(0, ((Float)range.getStart()).intValue()); assertEquals(200, ((Float)range.getEnd()).intValue()); assertEquals(50, ((Float)range.getGap()).intValue()); + @SuppressWarnings({"unchecked"}) List counts = range.getCounts(); assertEquals(4, counts.size()); for (Count count : counts) { @@ -1444,6 +1451,7 @@ public void testPivotFacetsRanges() throws Exception { assertEquals(0, ((Float)range.getStart()).intValue()); assertEquals(200, ((Float)range.getEnd()).intValue()); assertEquals(50, ((Float)range.getGap()).intValue()); + @SuppressWarnings({"unchecked"}) List counts = range.getCounts(); assertEquals(4, counts.size()); for (Count count : counts) { @@ -1827,6 +1835,7 @@ public void testUpdateField() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testUpdateMultiValuedField() throws Exception { SolrClient solrClient = getSolrClient(); SolrInputDocument doc = new SolrInputDocument(); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrSchemalessExampleTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrSchemalessExampleTest.java index b59d5c14498c..bb41dd5ed5fd 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrSchemalessExampleTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrSchemalessExampleTest.java @@ -91,6 +91,7 @@ public void testArbitraryJsonIndexing() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testFieldMutating() throws Exception { HttpSolrClient client = (HttpSolrClient) getSolrClient(); client.deleteByQuery("*:*"); @@ -121,6 +122,7 @@ public void testFieldMutating() throws Exception { "p_q", "p.q", "x_y"); + @SuppressWarnings({"rawtypes"}) HashSet set = new HashSet(); QueryResponse rsp = assertNumFound("*:*", expected.size()); for (SolrDocument doc : rsp.getResults()) set.addAll(doc.getFieldNames()); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/TestSolrJErrorHandling.java b/solr/solrj/src/test/org/apache/solr/client/solrj/TestSolrJErrorHandling.java index 9884e0810c47..c9e2caec6286 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/TestSolrJErrorHandling.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/TestSolrJErrorHandling.java @@ -286,7 +286,7 @@ public void testHttpURLConnection() throws Exception { try { code = conn.getResponseCode(); } catch (Throwable th) { - log.error("ERROR DURING conn.getResponseCode():",th); + log.error("ERROR DURING conn.getResponseCode():", th); } /*** diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/beans/TestDocumentObjectBinder.java b/solr/solrj/src/test/org/apache/solr/client/solrj/beans/TestDocumentObjectBinder.java index e3631b851a44..4894c7476b9f 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/beans/TestDocumentObjectBinder.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/beans/TestDocumentObjectBinder.java @@ -57,6 +57,7 @@ public void testSimple() throws Exception { SolrInputField catfield = out.getField("cat"); assertEquals(3, catfield.getValueCount()); + @SuppressWarnings({"unchecked"}) List catValues = (List) catfield.getValue(); assertEquals("aaa", catValues.get(0)); assertEquals("bbb", catValues.get(1)); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java index 0b3c31e16abb..e83cf8f9fb46 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java @@ -82,7 +82,6 @@ import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.CORES; import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.FREEDISK; import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.REPLICA; -import static org.apache.solr.common.cloud.ZkStateReader.CLUSTER_STATE; import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA; import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA; @@ -93,13 +92,16 @@ public class TestPolicy extends SolrTestCaseJ4 { public TestPolicy(){ useNodeset = true; } - static Suggester createSuggester(SolrCloudManager cloudManager, Map jsonObj, Suggester seed) throws IOException, InterruptedException { + @SuppressWarnings({"unchecked"}) + static Suggester createSuggester(SolrCloudManager cloudManager, + @SuppressWarnings({"rawtypes"})Map jsonObj, Suggester seed) throws IOException, InterruptedException { Policy.Session session = null; if (seed != null) session = seed.session; else { session = cloudManager.getDistribStateManager().getAutoScalingConfig().getPolicy().createSession(cloudManager); } + @SuppressWarnings({"rawtypes"}) Map m = (Map) jsonObj.get("suggester"); Suggester result = session.getSuggester(CollectionParams.CollectionAction.get((String) m.get("action"))); m = (Map) m.get("hints"); @@ -110,13 +112,14 @@ static Suggester createSuggester(SolrCloudManager cloudManager, Map jsonObj, Sug return result; } - static SolrCloudManager createCloudManager(Map jsonObj) { + static SolrCloudManager createCloudManager(@SuppressWarnings({"rawtypes"})Map jsonObj) { return cloudManagerWithData(jsonObj); } public static String clusterState = Utils.toJSONString(loadFromResource("testPolicy.json")); - public static Map>> getReplicaDetails(String node, Map clusterState) { + public static Map>> getReplicaDetails(String node, + @SuppressWarnings({"rawtypes"})Map clusterState) { ValidatingJsonMap m = ValidatingJsonMap .getDeepCopy(clusterState, 6, true); Map>> result = new LinkedHashMap<>(); @@ -141,9 +144,10 @@ public static Map>> getReplicaDetails(Stri public void testWithCollection() { - ClusterState clusterState = ClusterState.load(1, + @SuppressWarnings({"unchecked"}) + ClusterState clusterState = ClusterState.createFromCollectionMap(1, (Map) loadFromResource("testWithCollection.json"), - ImmutableSet.of("node1", "node2", "node3", "node4", "node5"), CLUSTER_STATE); + ImmutableSet.of("node1", "node2", "node3", "node4", "node5")); DelegatingClusterStateProvider clusterStateProvider = new DelegatingClusterStateProvider(null) { @Override public ClusterState getClusterState() throws IOException { @@ -182,9 +186,11 @@ protected ClusterStateProvider getClusterStateProvider() { return clusterStateProvider; } }; + @SuppressWarnings({"rawtypes"}) Map m = solrClientNodeStateProvider.getNodeValues("node1", ImmutableSet.of("cores", "withCollection")); assertNotNull(m.get("withCollection")); + @SuppressWarnings({"rawtypes"}) Map policies = (Map) Utils.fromJSONString("{" + " 'cluster-preferences': [" + " { 'minimize': 'cores'}," + @@ -195,6 +201,7 @@ protected ClusterStateProvider getClusterStateProvider() { " { 'replica': '<2', 'shard': '#EACH', 'node': '#ANY'}," + " ]" + "}"); + @SuppressWarnings({"unchecked"}) AutoScalingConfig config = new AutoScalingConfig(policies); Policy policy = config.getPolicy(); Policy.Session session = policy.createSession(new DelegatingCloudManager(null) { @@ -210,6 +217,7 @@ public NodeStateProvider getNodeStateProvider() { }); Suggester suggester = session.getSuggester(CollectionAction.ADDREPLICA); suggester.hint(Hint.COLL_SHARD, new Pair<>("comments_coll", "shard1")); + @SuppressWarnings({"rawtypes"}) SolrRequest op = suggester.getSuggestion(); assertNotNull(op); Set nodes = new HashSet<>(2); @@ -232,10 +240,11 @@ public NodeStateProvider getNodeStateProvider() { } public void testWithCollectionSuggestions() { + @SuppressWarnings({"unchecked"}) ClusterState clusterState = - ClusterState.load(1, + ClusterState.createFromCollectionMap(1, (Map) loadFromResource("testWithCollectionSuggestions.json"), - ImmutableSet.of("node1", "node2", "node3", "node4", "node5"), CLUSTER_STATE); + ImmutableSet.of("node1", "node2", "node3", "node4", "node5")); DelegatingClusterStateProvider clusterStateProvider = new DelegatingClusterStateProvider(null) { @Override public ClusterState getClusterState() throws IOException { @@ -274,9 +283,11 @@ protected ClusterStateProvider getClusterStateProvider() { return clusterStateProvider; } }; + @SuppressWarnings({"rawtypes"}) Map m = solrClientNodeStateProvider.getNodeValues("node1", ImmutableSet.of("cores", "withCollection")); assertNotNull(m.get("withCollection")); + @SuppressWarnings({"rawtypes"}) Map policies = (Map) Utils.fromJSONString("{" + " 'cluster-preferences': [" + " { 'maximize': 'freedisk', 'precision': 50}," + @@ -288,6 +299,7 @@ protected ClusterStateProvider getClusterStateProvider() { " ]" + "}"); + @SuppressWarnings({"unchecked"}) List l = PolicyHelper.getSuggestions(new AutoScalingConfig(policies), new DelegatingCloudManager(null) { @Override @@ -324,11 +336,10 @@ public NodeStateProvider getNodeStateProvider() { } public void testWithCollectionMoveVsAddSuggestions() throws IOException { - ClusterState clusterState = ClusterState.load(1, + @SuppressWarnings({"unchecked"}) + ClusterState clusterState = ClusterState.createFromCollectionMap(1, (Map) loadFromResource("testWithCollectionMoveVsAddSuggestions.json"), - ImmutableSet.of("node1", "node2", "node3", "node4", "node5", "node6"), - CLUSTER_STATE - ); + ImmutableSet.of("node1", "node2", "node3", "node4", "node5", "node6")); DelegatingClusterStateProvider clusterStateProvider = new DelegatingClusterStateProvider(null) { @Override public ClusterState getClusterState() { @@ -367,9 +378,11 @@ protected ClusterStateProvider getClusterStateProvider() { return clusterStateProvider; } }; + @SuppressWarnings({"rawtypes"}) Map m = solrClientNodeStateProvider.getNodeValues("node1", ImmutableSet.of("cores", "withCollection")); assertNotNull(m.get("withCollection")); + @SuppressWarnings({"rawtypes"}) Map policies = (Map) Utils.fromJSONString("{" + " 'cluster-preferences': [" + " { 'maximize': 'freedisk', 'precision': 50}," + @@ -381,6 +394,7 @@ protected ClusterStateProvider getClusterStateProvider() { " ]" + "}"); + @SuppressWarnings({"unchecked"}) List l = PolicyHelper.getSuggestions(new AutoScalingConfig(policies), new DelegatingCloudManager(null) { @Override @@ -432,9 +446,10 @@ public NodeStateProvider getNodeStateProvider() { } public void testWithCollectionMoveReplica() { - ClusterState clusterState = ClusterState.load(1, + @SuppressWarnings({"unchecked"}) + ClusterState clusterState = ClusterState.createFromCollectionMap(1, (Map) loadFromResource("testWithCollectionMoveReplica.json"), - ImmutableSet.of("node2", "node3", "node4", "node5"), CLUSTER_STATE); + ImmutableSet.of("node2", "node3", "node4", "node5")); DelegatingClusterStateProvider clusterStateProvider = new DelegatingClusterStateProvider(null) { @Override public ClusterState getClusterState() throws IOException { @@ -473,9 +488,11 @@ protected ClusterStateProvider getClusterStateProvider() { return clusterStateProvider; } }; + @SuppressWarnings({"rawtypes"}) Map m = solrClientNodeStateProvider.getNodeValues("node1", ImmutableSet.of("cores", "withCollection")); assertNotNull(m.get("withCollection")); + @SuppressWarnings({"rawtypes"}) Map policies = (Map) Utils.fromJSONString("{" + " 'cluster-preferences': [" + " { 'minimize': 'cores'}," + @@ -486,6 +503,7 @@ protected ClusterStateProvider getClusterStateProvider() { " { 'replica': '<2', 'shard': '#EACH', 'node': '#ANY'}," + " ]" + "}"); + @SuppressWarnings({"unchecked"}) AutoScalingConfig config = new AutoScalingConfig(policies); Policy policy = config.getPolicy(); Policy.Session session = policy.createSession(new DelegatingCloudManager(null) { @@ -502,6 +520,7 @@ public NodeStateProvider getNodeStateProvider() { Suggester suggester = session.getSuggester(CollectionAction.MOVEREPLICA); suggester.hint(Hint.COLL_SHARD, new Pair<>("comments_coll", "shard1")); suggester.hint(Hint.SRC_NODE, "node1"); + @SuppressWarnings({"rawtypes"}) SolrRequest op = suggester.getSuggestion(); assertNotNull(op); assertEquals("node2 should have been selected by move replica", "node2", @@ -808,7 +827,7 @@ public void testEqualFunction() { "}"; - ClusterState clusterState = ClusterState.load(1, clusterStateStr.getBytes(UTF_8), + ClusterState clusterState = ClusterState.createFromJson(1, clusterStateStr.getBytes(UTF_8), ImmutableSet.of("node1", "node2", "node3", "node4", "node5")); DelegatingClusterStateProvider clusterStateProvider = new DelegatingClusterStateProvider(null) { @Override @@ -850,6 +869,7 @@ protected ClusterStateProvider getClusterStateProvider() { } }; + @SuppressWarnings({"rawtypes"}) Map policies = (Map) Utils.fromJSONString("{" + " 'cluster-preferences': [" + " { 'minimize': 'cores', 'precision': 50}" + @@ -858,6 +878,7 @@ protected ClusterStateProvider getClusterStateProvider() { " { 'replica': '#EQUAL', 'node': '#ANY'}," + " ]" + "}"); + @SuppressWarnings({"unchecked"}) AutoScalingConfig config = new AutoScalingConfig(policies); Policy policy = config.getPolicy(); Policy.Session session = policy.createSession(new DelegatingCloudManager(null) { @@ -984,13 +1005,16 @@ public void testNodeLost() { " 'maximize':'freedisk'," + " 'precision':100}]}"; + @SuppressWarnings({"unchecked"}) Policy policy = new Policy((Map) Utils.fromJSONString(autoScalingjson)); Policy.Session session = policy.createSession(cloudManagerWithData(dataproviderdata)); + @SuppressWarnings({"rawtypes"}) SolrRequest op = session.getSuggester(MOVEREPLICA).hint(Hint.SRC_NODE, "127.0.0.1:65427_solr").getSuggestion(); assertNotNull(op); assertEquals("127.0.0.1:65434_solr", op.getParams().get("targetNode")); } + @SuppressWarnings({"unchecked", "rawtypes"}) public void testNodeLostMultipleReplica() { String nodeValues = " {" + " 'node4':{" + @@ -1151,6 +1175,7 @@ private static SolrCloudManager cloudManagerWithData(String data) { return cloudManagerWithData((Map) Utils.fromJSONString(data)); } + @SuppressWarnings({"unchecked", "rawtypes"}) static SolrCloudManager cloudManagerWithData(Map m) { Map replicaInfo = (Map) m.get("replicaInfo"); replicaInfo.forEach((node, val) -> { @@ -1172,6 +1197,7 @@ static SolrCloudManager cloudManagerWithData(Map m) { }); }); + @SuppressWarnings({"unchecked"}) AutoScalingConfig asc = m.containsKey("autoscalingJson") ? new AutoScalingConfig((Map) m.get("autoscalingJson")) : null; return new DelegatingCloudManager(null) { @@ -1190,10 +1216,11 @@ public ClusterStateProvider getClusterStateProvider() { return new DelegatingClusterStateProvider(null) { @Override public ClusterState getClusterState() throws IOException { - return ClusterState.load(0, new HashMap<>(), getLiveNodes(), CLUSTER_STATE); + return ClusterState.createFromCollectionMap(0, new HashMap<>(), getLiveNodes()); } @Override + @SuppressWarnings({"unchecked"}) public Set getLiveNodes() { return new HashSet<>((Collection) m.get("liveNodes")); } @@ -1205,12 +1232,14 @@ public NodeStateProvider getNodeStateProvider() { return new DelegatingNodeStateProvider(null) { @Override public Map getNodeValues(String node, Collection tags) { + @SuppressWarnings({"unchecked"}) Map result = (Map) Utils.getObjectByPath(m, false, Arrays.asList("nodeValues", node)); return result == null ? new HashMap<>() : result; } @Override public Map>> getReplicaInfo(String node, Collection keys) { + @SuppressWarnings({"unchecked"}) Map>> result = (Map>>) Utils.getObjectByPath(m, false, Arrays.asList("replicaInfo", node)); return result == null ? new HashMap<>() : result; } @@ -1219,6 +1248,7 @@ public Map>> getReplicaInfo(String node, C }; } + @SuppressWarnings({"unchecked", "rawtypes"}) public void testPolicyWithReplicaType() { Map policies = (Map) Utils.fromJSONString("{" + " 'cluster-preferences': [" + @@ -1301,12 +1331,14 @@ public void testPolicyWithReplicaType() { public void testMoveReplicasInMultipleCollections() throws IOException { + @SuppressWarnings({"unchecked", "rawtypes"}) Map nodeValues = (Map) Utils.fromJSONString("{" + "node1:{cores:2}," + "node3:{cores:4}" + "node2:{cores:2}" + "}"); Policy policy = new Policy(new HashMap<>()); + @SuppressWarnings({"unchecked"}) Suggester suggester = policy.createSession(getSolrCloudManager(nodeValues, (Map) loadFromResource("testMoveReplicasInMultipleCollections.json"))) .getSuggester(MOVEREPLICA) @@ -1314,6 +1346,7 @@ public void testMoveReplicasInMultipleCollections() throws IOException { .hint(Hint.COLL, "collection2") .hint(Suggester.Hint.SRC_NODE, "node2") .forceOperation(true); + @SuppressWarnings({"rawtypes"}) SolrRequest op = suggester.getSuggestion(); assertNotNull(op); assertEquals("collection2", op.getParams().get("collection")); @@ -1345,6 +1378,7 @@ public void testMoveReplicasInMultipleCollections() throws IOException { public void testMultipleCollections() { + @SuppressWarnings({"rawtypes"}) Map policies = (Map) Utils.fromJSONString("{" + " 'cluster-preferences': [" + " { 'maximize': 'freedisk', 'precision': 50}," + @@ -1376,18 +1410,21 @@ public void testMultipleCollections() { "}"); } + @SuppressWarnings({"unchecked", "rawtypes"}) Map nodeValues = (Map) Utils.fromJSONString("{" + "node1:{cores:12, freedisk: 334, heapUsage:10480, rack: rack4, sysprop.fs: slowdisk}," + "node2:{cores:4, freedisk: 749, heapUsage:6873, rack: rack3, sysprop.fs: unknown}," + "node3:{cores:7, freedisk: 262, heapUsage:7834, rack: rack2, sysprop.fs : ssd}," + "node4:{cores:8, freedisk: 375, heapUsage:16900, nodeRole:overseer, rack: rack1, sysprop.fs: unknown}" + "}"); + @SuppressWarnings({"unchecked"}) Policy policy = new Policy(policies); Suggester suggester = policy.createSession(getSolrCloudManager(nodeValues, clusterState)) .getSuggester(ADDREPLICA) .hint(Hint.REPLICATYPE, Replica.Type.PULL) .hint(Hint.COLL_SHARD, new Pair<>("newColl", "shard1")) .hint(Hint.COLL_SHARD, new Pair<>("newColl2", "shard1")); + @SuppressWarnings({"rawtypes"}) SolrRequest op; int countOp = 0; int countNewCollOp = 0; @@ -1444,6 +1481,7 @@ public NodeStateProvider getNodeStateProvider() { return new DelegatingNodeStateProvider(null) { @Override public Map>> getReplicaInfo(String node, Collection keys) { + @SuppressWarnings({"unchecked"}) Map>> o = (Map>>) Utils.fromJSONString("{c1: {s0:[{}]}}"); Utils.setObjectByPath(o, "c1/s0[0]", new ReplicaInfo("r0", "c1.s0", "c1", "s0", Replica.Type.NRT, "nodex", new HashMap<>())); return o; @@ -1483,6 +1521,7 @@ public Set getLiveNodes() { public void testMerge() { + @SuppressWarnings({"rawtypes"}) Map map = (Map) Utils.fromJSONString("{" + " 'cluster-preferences': [" + " { 'maximize': 'freedisk', 'precision': 50}," + @@ -1520,6 +1559,7 @@ public void testMerge() { "}"); } + @SuppressWarnings({"unchecked"}) Policy policy = new Policy(map); List clauses = Policy.mergePolicies("mycoll", policy.getPolicies().get("policy1"), policy.getClusterPolicy()); Collections.sort(clauses); @@ -1549,6 +1589,7 @@ public void testConditionsSort() { " }"; } + @SuppressWarnings({"unchecked"}) Policy p = new Policy((Map) Utils.fromJSONString(rules)); List clauses = new ArrayList<>(p.getClusterPolicy()); Collections.sort(clauses); @@ -1556,6 +1597,7 @@ public void testConditionsSort() { assertEquals("sysprop.rack", clauses.get(0).tag.getName()); } + @SuppressWarnings({"unchecked", "rawtypes"}) public void testRules() { String rules = "{" + "cluster-policy:[" + @@ -1630,6 +1672,7 @@ public void testSessionCaching() throws IOException, InterruptedException { " { 'replica':'<2', 'shard':'#EACH', 'node':'#ANY'}," + " { 'nodeRole':'overseer','replica':0}]," + " 'cluster-preferences':[{'minimize':'cores'}]}"; + @SuppressWarnings({"unchecked"}) Policy policy = new Policy((Map) Utils.fromJSONString(autoScalingjson)); // PolicyHelper.SESSION_REF.set(ref1); String nodeValues = " {" + @@ -1648,6 +1691,7 @@ public void testSessionCaching() throws IOException, InterruptedException { "}"; + @SuppressWarnings({"rawtypes"}) Map policies = (Map) Utils.fromJSONString("{" + " 'cluster-preferences': [" + " { 'maximize': 'freedisk', 'precision': 50}," + @@ -1658,7 +1702,9 @@ public void testSessionCaching() throws IOException, InterruptedException { " { 'replica': '<2', 'shard': '#EACH', 'node': '#ANY'}," + " ]" + "}"); + @SuppressWarnings({"unchecked"}) AutoScalingConfig config = new AutoScalingConfig(policies); + @SuppressWarnings({"unchecked", "rawtypes"}) final SolrCloudManager solrCloudManager = new DelegatingCloudManager(getSolrCloudManager((Map) Utils.fromJSONString(nodeValues), clusterState)) { @Override @@ -1734,17 +1780,20 @@ public void testNegativeConditions() { " 'cluster-preferences':[" + " {'minimize':'cores', 'precision':3}," + " {'maximize':'freedisk','precision':100}]}"; + @SuppressWarnings({"unchecked", "rawtypes"}) Map nodeValues = (Map) Utils.fromJSONString("{" + "node1:{cores:12, freedisk: 334, heapUsage:10480, rack: rack4, sysprop.fs: slowdisk}," + "node2:{cores:4, freedisk: 749, heapUsage:6873, rack: rack3, sysprop.fs: slowdisk}," + "node3:{cores:7, freedisk: 262, heapUsage:7834, rack: rack2, sysprop.fs : ssd}," + "node4:{cores:8, freedisk: 375, heapUsage:16900, nodeRole:overseer, rack: rack1, sysprop.fs: slowdisk}" + "}"); + @SuppressWarnings({"unchecked"}) Policy policy = new Policy((Map) Utils.fromJSONString(autoscaleJson)); SolrCloudManager cloudManager = getSolrCloudManager(nodeValues, clusterState); Policy.Session session = policy.createSession(cloudManager); for (int i = 0; i < 3; i++) { Suggester suggester = session.getSuggester(ADDREPLICA); + @SuppressWarnings({"rawtypes"}) SolrRequest op = suggester .hint(Hint.COLL_SHARD, new Pair<>("newColl", "shard1")) .getSuggestion(); @@ -1777,6 +1826,7 @@ public void testGreedyConditions() { " {'maximize':'freedisk','precision':100}]}"; } + @SuppressWarnings({"unchecked", "rawtypes"}) Map nodeValues = (Map) Utils.fromJSONString("{" + "node1:{cores:12, freedisk: 334, heapUsage:10480, rack: rack4}," + "node2:{cores:4, freedisk: 749, heapUsage:6873, rack: rack3}," + @@ -1784,10 +1834,12 @@ public void testGreedyConditions() { "node4:{cores:8, freedisk: 375, heapUsage:16900, nodeRole:overseer, rack: rack1}" + "}"); + @SuppressWarnings({"unchecked"}) Policy policy = new Policy((Map) Utils.fromJSONString(autoscaleJson)); SolrCloudManager cloudManager = getSolrCloudManager(nodeValues, clusterState); Policy.Session session = policy.createSession(cloudManager); Suggester suggester = session.getSuggester(ADDREPLICA); + @SuppressWarnings({"rawtypes"}) SolrRequest op = suggester .hint(Hint.COLL_SHARD, new Pair<>("newColl", "shard1")) .getSuggestion(); @@ -1810,6 +1862,7 @@ public void testGreedyConditions() { assertEquals("node2", op.getParams().get("node")); } + @SuppressWarnings({"unchecked", "rawtypes"}) public void testMoveReplica() { String autoscaleJson = "{" + " 'cluster-policy':[" + @@ -1841,6 +1894,7 @@ public void testMoveReplica() { new ReplicaInfo("core_node1", "core_node1", "compute_plan_action_test", "shard1", Replica.Type.NRT, "127.0.0.1:60089_solr", Collections.emptyMap()), new ReplicaInfo("core_node2", "core_node2", "compute_plan_action_test", "shard1", Replica.Type.NRT, "127.0.0.1:60089_solr", Collections.emptyMap()))); + @SuppressWarnings({"unchecked", "rawtypes"}) Map> tagsMap = (Map) Utils.fromJSONString("{" + " '127.0.0.1:60099_solr':{" + " 'cores':0," + @@ -1849,9 +1903,11 @@ public void testMoveReplica() { " 'cores':2," + " 'freedisk':918005641216}}"); + @SuppressWarnings({"unchecked"}) Policy policy = new Policy((Map) Utils.fromJSONString(autoscaleJson)); Policy.Session session = policy.createSession(new DelegatingCloudManager(null) { @Override + @SuppressWarnings({"unchecked"}) public ClusterStateProvider getClusterStateProvider() { return new DelegatingClusterStateProvider(null) { @Override @@ -1871,6 +1927,7 @@ public Map getNodeValues(String node, Collection tags) { } @Override + @SuppressWarnings({"unchecked"}) public Map>> getReplicaInfo(String node, Collection keys) { return (Map>>) replicaInfoMap.get(node); } @@ -1879,6 +1936,7 @@ public Map>> getReplicaInfo(String node, C }); Suggester suggester = session.getSuggester(MOVEREPLICA) .hint(Hint.TARGET_NODE, "127.0.0.1:60099_solr"); + @SuppressWarnings({"rawtypes"}) SolrRequest op = suggester.getSuggestion(); assertNotNull("expect a non null operation", op); } @@ -1930,12 +1988,14 @@ public void testOtherTag() { } + @SuppressWarnings({"unchecked", "rawtypes"}) Map nodeValues = (Map) Utils.fromJSONString("{" + "node1:{cores:12, freedisk: 334, heapUsage:10480, rack: rack4}," + "node2:{cores:4, freedisk: 749, heapUsage:6873, rack: rack3}," + "node3:{cores:7, freedisk: 262, heapUsage:7834, rack: rack2}," + "node4:{cores:8, freedisk: 375, heapUsage:16900, nodeRole:overseer, sysprop.rack: rack1}" + "}"); + @SuppressWarnings({"unchecked"}) Policy policy = new Policy((Map) Utils.fromJSONString(rules)); SolrCloudManager cloudManager = getSolrCloudManager(nodeValues, clusterState); SolrCloudManager cdp = new DelegatingCloudManager(null) { @@ -1979,11 +2039,13 @@ public String getPolicyNameByCollection(String coll) { assertNotNull(op); assertEquals("node2", op.getNode()); } + @SuppressWarnings({"rawtypes"}) static SolrCloudManager getSolrCloudManager(final Map nodeValues, String clusterS) { return getSolrCloudManager(nodeValues,(Map) Utils.fromJSONString(clusterS)); } - private static SolrCloudManager getSolrCloudManager(final Map nodeValues, Map clusterS) { + private static SolrCloudManager getSolrCloudManager(@SuppressWarnings({"rawtypes"})final Map nodeValues, + @SuppressWarnings({"rawtypes"})Map clusterS) { return new SolrCloudManager() { ObjectCache objectCache = new ObjectCache(); @@ -2041,7 +2103,7 @@ public DistributedQueueFactory getDistributedQueueFactory() { } @Override - public SolrResponse request(SolrRequest req) { + public SolrResponse request(@SuppressWarnings({"rawtypes"})SolrRequest req) { return null; } @@ -2057,6 +2119,7 @@ public void testEmptyClusterState() { " 'replica':1," + " 'shard':'#EACH'," + " 'port':'50096'}]}}"; + @SuppressWarnings({"unchecked", "rawtypes"}) Map nodeValues = (Map) Utils.fromJSONString("{" + " '127.0.0.1:50097_solr':{" + " 'cores':0," + @@ -2092,6 +2155,7 @@ public Map>> getReplicaInfo(String node, C }; } }; + @SuppressWarnings({"unchecked"}) List locations = PolicyHelper.getReplicaLocations( "newColl", new AutoScalingConfig((Map) Utils.fromJSONString(autoScaleJson)), dataProvider, Collections.singletonMap("newColl", "c1"), Arrays.asList("shard1", "shard2"), 1, 0, 0, null); @@ -2139,6 +2203,7 @@ public void testMultiReplicaPlacement() { } + @SuppressWarnings({"unchecked", "rawtypes"}) Map nodeValues = (Map) Utils.fromJSONString("{" + "node1:{cores:12, freedisk: 334, heap:10480, sysprop.rack:rack3}," + "node2:{cores:4, freedisk: 749, heap:6873, sysprop.fs : ssd, sysprop.rack:rack1}," + @@ -2174,6 +2239,7 @@ public Set getLiveNodes() { }; } }; + @SuppressWarnings({"unchecked"}) List locations = PolicyHelper.getReplicaLocations( "newColl", new AutoScalingConfig((Map) Utils.fromJSONString(autoScaleJson)), cloudManager, Collections.singletonMap("newColl", "policy1"), Arrays.asList("shard1", "shard2"), 3, 0, 0, null); @@ -2185,10 +2251,12 @@ public void testMoveReplicaSuggester() { "{'cores':'<10', 'node':'#ANY'}," + "{'replica':'<2', 'shard':'#EACH','node':'#ANY'}]," + "'cluster-preferences':[{'minimize':'cores'}]}"; + @SuppressWarnings({"unchecked"}) Policy policy = new Policy((Map) Utils.fromJSONString(autoScalingjson)); Policy.Session session = policy.createSession(cloudManagerWithData((Map) loadFromResource("testMoveReplicaSuggester.json"))); Suggester suggester = session.getSuggester(MOVEREPLICA) .hint(Hint.TARGET_NODE, "10.0.0.6:7574_solr"); + @SuppressWarnings({"rawtypes"}) SolrRequest op = suggester.getSuggestion(); assertNotNull(op); suggester = suggester.getSession() @@ -2221,10 +2289,12 @@ public void testComputePlanAfterNodeAdded() { " {nodeset:{ nodeRole:overseer},replica:0}]}"; } + @SuppressWarnings({"unchecked"}) Policy policy = new Policy((Map) Utils.fromJSONString(autoScalingjson)); Policy.Session session = policy.createSession(cloudManagerWithData((Map) loadFromResource("testComputePlanAfterNodeAdded.json"))); Suggester suggester = session.getSuggester(CollectionParams.CollectionAction.MOVEREPLICA) .hint(Hint.TARGET_NODE, "127.0.0.1:51147_solr"); + @SuppressWarnings({"rawtypes"}) SolrRequest op = suggester.getSuggestion(); log.info("{}", op); assertNotNull("operation expected ", op); @@ -2244,6 +2314,7 @@ public void testReplicaCountSuggestions() { " cluster-preferences :[{ minimize : cores }]}"; } + @SuppressWarnings({"unchecked"}) List l = PolicyHelper.getSuggestions(new AutoScalingConfig((Map) Utils.fromJSONString(autoScalingjson)), cloudManagerWithData((Map) loadFromResource("testReplicaCountSuggestions.json"))); assertFalse(l.isEmpty()); @@ -2261,6 +2332,7 @@ public void testReplicaCountSuggestions() { } + @SuppressWarnings({"unchecked", "rawtypes"}) public void testReplicaPercentage() { List l = (List) loadFromResource("testReplicaPercentage.json"); String autoScalingjson = " { cluster-policy:[" + @@ -2303,6 +2375,7 @@ public void testReplicaPercentage() { } + @SuppressWarnings({"unchecked"}) public void testReplicaZonesPercentage() { String autoScalingjson = " { cluster-policy:[" + " { replica :'33%', shard: '#EACH', sysprop.az : east}," + @@ -2320,6 +2393,7 @@ public void testReplicaZonesPercentage() { int westCount = 0, eastCount = 0; for (int i = 0; i < 12; i++) { + @SuppressWarnings({"rawtypes"}) SolrRequest suggestion = txn.getCurrentSession() .getSuggester(ADDREPLICA) .hint(Hint.COLL_SHARD, new Pair<>(COLL_NAME, "shard1")) @@ -2350,7 +2424,9 @@ public void testReplicaZonesPercentage() { } + @SuppressWarnings({"unchecked"}) public void testFreeDiskDeviation() { + @SuppressWarnings({"rawtypes"}) Map map = (Map) loadFromResource("testFreeDiskDeviation.json"); AutoScalingConfig cfg = new AutoScalingConfig((Map) map.get("config")); if(useNodeset){ @@ -2390,6 +2466,7 @@ public void testFreeDiskDeviation() { } + @SuppressWarnings({"unchecked"}) public void testFreeDiskSuggestions() { String autoScalingjson = " { cluster-policy:[" + " { replica :'0', freedisk:'<1000'}," + @@ -2459,6 +2536,7 @@ public void testCoresSuggestions() { String autoScalingjson = " { cluster-policy:[" + " { cores :'<3', node :'#ANY'}]," + " cluster-preferences :[{ minimize : cores }]}"; + @SuppressWarnings({"unchecked"}) AutoScalingConfig cfg = new AutoScalingConfig((Map) Utils.fromJSONString(autoScalingjson)); List violations = cfg.getPolicy().createSession(cloudManagerWithData((Map) loadFromResource("testCoresSuggestions.json"))).getViolations(); assertFalse(violations.isEmpty()); @@ -2498,6 +2576,7 @@ public void testSyspropSuggestions1() { } + @SuppressWarnings({"unchecked"}) AutoScalingConfig cfg = new AutoScalingConfig((Map) Utils.fromJSONString(autoScalingjson)); List violations = cfg.getPolicy().createSession(cloudManagerWithData((Map) loadFromResource("testSyspropSuggestions1.json"))).getViolations(); assertEquals("expected 2 violations", 2, violations.size()); @@ -2531,6 +2610,7 @@ public void testPortSuggestions() { "}"; } + @SuppressWarnings({"unchecked"}) AutoScalingConfig cfg = new AutoScalingConfig((Map) Utils.fromJSONString(autoScalingjson)); List violations = cfg.getPolicy().createSession(cloudManagerWithData((Map) loadFromResource("testPortSuggestions.json"))).getViolations(); assertEquals(2, violations.size()); @@ -2547,6 +2627,7 @@ public void testDiskSpaceHint() { " cluster-policy:[{cores:'<10',node:'#ANY'}," + " {replica:'<2', shard:'#EACH',node:'#ANY'}," + " { nodeRole:overseer,replica:0}]}"; + @SuppressWarnings({"unchecked"}) Policy policy = new Policy((Map) Utils.fromJSONString(autoScalingjson)); Policy.Session session = policy.createSession(cloudManagerWithData((Map) loadFromResource("testDiskSpaceHint.json"))); Suggester suggester = session.getSuggester(CollectionAction.ADDREPLICA) @@ -2575,6 +2656,7 @@ public void testDiskSpaceReqd() { "}"; + @SuppressWarnings({"unchecked", "rawtypes"}) Map nodeValues = (Map) Utils.fromJSONString("{" + "node1:{cores:12, freedisk: 334, heap:10480, sysprop.rack:rack3}," + "node2:{cores:4, freedisk: 262, heap:6873, sysprop.fs : ssd, sysprop.rack:rack1}," + @@ -2594,6 +2676,7 @@ public Map getNodeValues(String node, Collection keys) { } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Map>> getReplicaInfo(String node, Collection keys) { if (node.equals("node1")) { Map m = Utils.makeMap("newColl", @@ -2637,6 +2720,7 @@ public Replica getLeader(String sliceName) { }; } }; + @SuppressWarnings({"unchecked"}) List locations = PolicyHelper.getReplicaLocations( "newColl", new AutoScalingConfig((Map) Utils.fromJSONString(autoScaleJson)), cloudManager, null, Arrays.asList("shard1", "shard2"), 1, 0, 0, null); @@ -2678,6 +2762,7 @@ public void testMoveReplicaLeaderlast() { } public void testScheduledTriggerFailure() throws Exception { + @SuppressWarnings({"rawtypes"}) Map jsonObj = (Map) loadFromResource("testScheduledTriggerFailure.json"); SolrCloudManager cloudManager = createCloudManager(jsonObj); Suggester suggester = createSuggester(cloudManager, jsonObj, null); @@ -2696,6 +2781,7 @@ public void testScheduledTriggerFailure() throws Exception { } public void testUtilizeNodeFailure() throws Exception { + @SuppressWarnings({"rawtypes"}) Map jsonObj = (Map) loadFromResource("testUtilizeNodeFailure.json"); //(Map) Utils.fromJSONString(state); SolrCloudManager cloudManager = createCloudManager(jsonObj); Suggester suggester = createSuggester(cloudManager, jsonObj, null); @@ -2714,6 +2800,7 @@ public void testUtilizeNodeFailure() throws Exception { } public void testUtilizeNodeFailure2() throws Exception { + @SuppressWarnings({"rawtypes"}) Map jsonObj = (Map) loadFromResource("testUtilizeNodeFailure2.json"); SolrCloudManager cloudManager = createCloudManager(jsonObj); Suggester suggester = createSuggester(cloudManager, jsonObj, null); @@ -2732,6 +2819,7 @@ public void testUtilizeNodeFailure2() throws Exception { } //SOLR-12358 + @SuppressWarnings({"unchecked", "rawtypes"}) public void testSortError() { Policy policy = new Policy((Map) Utils.fromJSONString("{cluster-preferences: [{minimize : cores, precision:1}, " + "{maximize : freedisk, precision: 50}, " + @@ -2791,6 +2879,7 @@ public void testViolationOutput() throws IOException { " ]" + "}"; + @SuppressWarnings({"unchecked"}) AutoScalingConfig cfg = new AutoScalingConfig((Map) Utils.fromJSONString(autoScalingjson)); List violations = cfg.getPolicy().createSession(cloudManagerWithData((Map) loadFromResource("testViolationOutput.json"))).getViolations(); StringWriter writer = new StringWriter(); @@ -2811,6 +2900,7 @@ public void testViolationOutput() throws IOException { } + @SuppressWarnings({"unchecked"}) public void testFreediskPercentage() { String autoScalingjson = "{" + @@ -2848,6 +2938,7 @@ public void testFreediskPercentage() { } + @SuppressWarnings({"unchecked", "rawtypes"}) public static void fixRequiredProps(Map testData) { Map clusterState = (Map) testData.get("clusterstate"); clusterState.forEach((collection, val) -> { @@ -2890,6 +2981,7 @@ public static void fixRequiredProps(Map testData) { }); } + @SuppressWarnings({"unchecked", "rawtypes"}) public void testAutoscalingPreferencesUsedWithNoPolicy() throws IOException, InterruptedException { Map m = (Map) loadFromResource("testAutoscalingPreferencesUsedWithNoPolicy.json"); fixRequiredProps(m); @@ -2915,6 +3007,7 @@ public void testAutoscalingPreferencesUsedWithNoPolicy() throws IOException, Int }); }); + @SuppressWarnings({"unchecked"}) AutoScalingConfig asc = m.containsKey("autoscalingJson") ? new AutoScalingConfig((Map) m.get("autoscalingJson")) : new AutoScalingConfig(Collections.emptyMap()); DelegatingCloudManager cloudManager = new DelegatingCloudManager(null) { @@ -2932,13 +3025,14 @@ public AutoScalingConfig getAutoScalingConfig() { public ClusterStateProvider getClusterStateProvider() { return new DelegatingClusterStateProvider(null) { @Override + @SuppressWarnings({"unchecked"}) public Set getLiveNodes() { return new HashSet<>((Collection) m.get("liveNodes")); } @Override - public ClusterState getClusterState() throws IOException { - return ClusterState.load(0, clusterState, getLiveNodes(), ZkStateReader.getCollectionPath("c1")); + public ClusterState getClusterState() { + return ClusterState.createFromCollectionMap(0, clusterState, getLiveNodes()); } @Override @@ -2953,12 +3047,14 @@ public NodeStateProvider getNodeStateProvider() { return new DelegatingNodeStateProvider(null) { @Override public Map getNodeValues(String node, Collection tags) { + @SuppressWarnings({"unchecked"}) Map result = (Map) Utils.getObjectByPath(m, false, Arrays.asList("nodeValues", node)); return result == null ? new HashMap<>() : result; } @Override public Map>> getReplicaInfo(String node, Collection keys) { + @SuppressWarnings({"unchecked"}) Map>> result = (Map>>) Utils.getObjectByPath(m, false, Arrays.asList("replicaInfo", node)); return result == null ? new HashMap<>() : result; } @@ -2984,11 +3080,15 @@ public Map>> getReplicaInfo(String node, C } public void testPolicyForEmptyCollection() throws IOException, InterruptedException { + @SuppressWarnings({"rawtypes"}) Map m = (Map) loadFromResource("testEmptyCollection.json"); + @SuppressWarnings({"unchecked", "rawtypes"}) Map clusterStateMap = (Map) m.remove("clusterstate"); + @SuppressWarnings({"unchecked", "rawtypes"}) Map replicaInfoMap = (Map) m.remove("replicaInfo"); - ClusterState clusterState = ClusterState.load(1, clusterStateMap, ImmutableSet.of("node1", "node2"), CLUSTER_STATE); + @SuppressWarnings({"unchecked"}) + ClusterState clusterState = ClusterState.createFromCollectionMap(1, clusterStateMap, ImmutableSet.of("node1", "node2")); List shards = Arrays.asList("shard1", "shard2", "shard3"); @@ -3027,11 +3127,13 @@ public AutoScalingConfig getAutoScalingConfig() { public NodeStateProvider getNodeStateProvider() { return new DelegatingNodeStateProvider(null) { @Override + @SuppressWarnings({"unchecked"}) public Map getNodeValues(String node, Collection keys) { return Collections.EMPTY_MAP; } @Override + @SuppressWarnings({"unchecked"}) public Map>> getReplicaInfo(String node, Collection keys) { //return Collections.EMPTY_MAP; return replicaInfoMap; @@ -3060,6 +3162,7 @@ public Map>> getReplicaInfo(String node, C * @see Assign#usePolicyFramework(DocCollection, SolrCloudManager) */ public void testPolicyMapWriterWithEmptyPreferences() throws IOException { + @SuppressWarnings({"rawtypes"}) List defaultPreferences = Policy.DEFAULT_PREFERENCES .stream().map(preference -> preference.getOriginal()).collect(Collectors.toList()); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java index 63b7da4b3664..16328889af34 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy2.java @@ -60,6 +60,7 @@ public TestPolicy2(){ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + @SuppressWarnings({"unchecked", "rawtypes"}) public void testEqualOnNonNode() { List l = (List) loadFromResource("testEqualOnNonNode.json"); String autoScalingjson = "{cluster-policy:[" + @@ -174,10 +175,11 @@ public void testEqualOnNonNode() { } + @SuppressWarnings({"unchecked", "rawtypes"}) static SolrCloudManager createCloudManager(Map m, Map meta) { Map nodeVals = (Map) meta.get("nodeValues"); List replicaVals = (List) meta.get("replicaValues"); - ClusterState clusterState = ClusterState.load(0, m, Collections.emptySet(), null); + ClusterState clusterState = ClusterState.createFromCollectionMap(0, m, Collections.emptySet()); Map coreCount = new LinkedHashMap<>(); Set nodes = new HashSet<>(nodeVals.keySet()); clusterState.getCollectionStates().forEach((s, collectionRef) -> collectionRef.get() @@ -228,6 +230,7 @@ protected Map fetchTagValues(String node, Collection tag @Override public Map>> getReplicaInfo(String node, Collection keys) { + @SuppressWarnings({"unchecked"}) Map>> result = nodeVsCollectionVsShardVsReplicaInfo.computeIfAbsent(node, Utils.NEW_HASHMAP_FUN); if (!keys.isEmpty()) { Row.forEachReplica(result, replicaInfo -> { @@ -248,6 +251,7 @@ public Map>> getReplicaInfo(String node, C }; } + @SuppressWarnings({"unchecked"}) public void testAutoScalingHandlerFailure() { Map m = (Map) loadFromResource("testAutoScalingHandlerFailure.json"); @@ -260,6 +264,7 @@ public void testAutoScalingHandlerFailure() { } + @SuppressWarnings({"unchecked", "rawtypes"}) static SolrCloudManager createCloudManagerFromDiagnostics(Map m) { List sortedNodes = (List) getObjectByPath(m, false, "diagnostics/sortedNodes"); Set liveNodes = new HashSet<>(); @@ -290,6 +295,7 @@ public Map>> getReplicaInfo(String node, C } @Override + @SuppressWarnings({"unchecked", "rawtypes"}) public Map getNodeValues(String node, Collection tags) { for (Map n : sortedNodes) if (n.get("node").equals(node)) return n; return Collections.emptyMap(); @@ -305,9 +311,10 @@ public NodeStateProvider getNodeStateProvider() { @Override public ClusterStateProvider getClusterStateProvider() { if (clusterState == null) { + @SuppressWarnings({"rawtypes"}) Map map = (Map) getObjectByPath(m, false, "cluster/collections"); if (map == null) map = new HashMap<>(); - clusterState = ClusterState.load(0, map, liveNodes, "/clusterstate.json"); + clusterState = ClusterState.createFromCollectionMap(0, map, liveNodes); } return new DelegatingClusterStateProvider(null) { @@ -332,7 +339,9 @@ public Set getLiveNodes() { } public void testHostAttribute() { + @SuppressWarnings({"unchecked"}) Map m = (Map) loadFromResource("testHostAttribute.json"); + @SuppressWarnings({"unchecked"}) Map conf = (Map) getObjectByPath(m, false, "diagnostics/config"); Policy policy = new Policy(conf); SolrCloudManager cloudManagerFromDiagnostics = createCloudManagerFromDiagnostics(m); @@ -349,6 +358,7 @@ public void testHostAttribute() { suggestion._get("operation/command/move-replica/targetNode", null))); } } + @SuppressWarnings({"unchecked"}) public void testSysPropSuggestions() { Map m = (Map) loadFromResource("testSysPropSuggestions.json"); @@ -411,8 +421,10 @@ public void testSuggestionsRebalanceOnly() { "{'replica':'<5','shard':'#EACH', nodeset:{'sysprop.zone':['east','west']}}]}"; } + @SuppressWarnings({"unchecked"}) Map m = (Map) loadFromResource("testSuggestionsRebalanceOnly.json"); SolrCloudManager cloudManagerFromDiagnostics = createCloudManagerFromDiagnostics(m); + @SuppressWarnings({"unchecked"}) AutoScalingConfig autoScalingConfig = new AutoScalingConfig((Map) Utils.fromJSONString(conf)); List suggestions = PolicyHelper.getSuggestions(autoScalingConfig, cloudManagerFromDiagnostics); @@ -424,9 +436,11 @@ public void testSuggestionsRebalanceOnly() { } public void testSuggestionsRebalance2() { + @SuppressWarnings({"unchecked"}) Map m = (Map) loadFromResource("testSuggestionsRebalance2.json"); SolrCloudManager cloudManagerFromDiagnostics = createCloudManagerFromDiagnostics(m); + @SuppressWarnings({"unchecked"}) AutoScalingConfig autoScalingConfig = new AutoScalingConfig((Map) getObjectByPath(m, false, "diagnostics/config")); List suggestions = PolicyHelper.getSuggestions(autoScalingConfig, cloudManagerFromDiagnostics); @@ -440,8 +454,10 @@ public void testSuggestionsRebalance2() { } public void testAddMissingReplica() { + @SuppressWarnings({"unchecked"}) Map m = (Map) loadFromResource("testAddMissingReplica.json"); SolrCloudManager cloudManagerFromDiagnostics = createCloudManagerFromDiagnostics(m); + @SuppressWarnings({"unchecked"}) AutoScalingConfig autoScalingConfig = new AutoScalingConfig((Map) getObjectByPath(m, false, "diagnostics/config")); List suggestions = PolicyHelper.getSuggestions(autoScalingConfig, cloudManagerFromDiagnostics); @@ -455,10 +471,13 @@ public void testAddMissingReplica() { } public void testCreateCollectionWithEmptyPolicy() { + @SuppressWarnings({"rawtypes"}) Map m = (Map) loadFromResource("testCreateCollectionWithEmptyPolicy.json"); + @SuppressWarnings({"unchecked"}) SolrCloudManager cloudManagerFromDiagnostics = createCloudManagerFromDiagnostics(m); - AutoScalingConfig autoScalingConfig = new AutoScalingConfig(new HashMap()); + AutoScalingConfig autoScalingConfig = new AutoScalingConfig(new HashMap<>()); //POSITIONS : [shard1:1[NRT] @127.0.0.1:49469_solr, shard1:2[NRT] @127.0.0.1:49469_solr] + @SuppressWarnings({"unchecked"}) List positions = PolicyHelper.getReplicaLocations("coll_new", autoScalingConfig, cloudManagerFromDiagnostics, EMPTY_MAP, Collections.singletonList("shard1"), 2, 0, 0, null); @@ -470,9 +489,11 @@ public void testCreateCollectionWithEmptyPolicy() { } public void testUnresolvedSuggestion() { + @SuppressWarnings({"unchecked"}) Map m = (Map) loadFromResource("testUnresolvedSuggestion.json"); SolrCloudManager cloudManagerFromDiagnostics = createCloudManagerFromDiagnostics(m); + @SuppressWarnings({"unchecked"}) List suggestions = PolicyHelper.getSuggestions(new AutoScalingConfig((Map) getObjectByPath(m, false, "diagnostics/config")) , cloudManagerFromDiagnostics); for (Suggester.SuggestionInfo suggestion : suggestions) { @@ -483,6 +504,7 @@ public void testUnresolvedSuggestion() { @Ignore("This takes too long to run. enable it for perf testing") + @SuppressWarnings({"unchecked"}) public void testInfiniteLoop() { Row.cacheStats.clear(); Map m = (Map) loadFromResource("testInfiniteLoop.json"); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleJettyTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleJettyTest.java index ee747e3d2f9b..d48c65fcf525 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleJettyTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleJettyTest.java @@ -91,6 +91,7 @@ public void testArbitraryJsonIndexing() throws Exception { SolrDocument doc = rsp.getResults().get(0); String src = (String) doc.getFieldValue("_src_"); + @SuppressWarnings({"rawtypes"}) Map m = (Map) fromJSONString(src); assertEquals("abc1",m.get("id")); assertEquals("name1",m.get("name")); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServer.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServer.java index 321448868311..d8cfb591e875 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServer.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServer.java @@ -45,7 +45,7 @@ protected EmbeddedSolrServer getSolrCore1() { public void testGetCoreContainer() { Assert.assertEquals(cores, ((EmbeddedSolrServer)getSolrCore0()).getCoreContainer()); - Assert.assertEquals(cores, ((EmbeddedSolrServer)getSolrCore1()).getCoreContainer()); + Assert.assertEquals(cores, (getSolrCore1()).getCoreContainer()); } public void testClose() throws IOException { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java index ec7773644811..4832af99d6ce 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java @@ -41,7 +41,6 @@ import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; import org.apache.http.HttpResponse; -import org.apache.http.ParseException; import org.apache.http.client.CookieStore; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpGet; @@ -241,7 +240,7 @@ public void testQuery() throws Exception { try (HttpSolrClient client = getHttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo")) { SolrQuery q = new SolrQuery("foo"); q.setParam("a", "\u1234"); - expectThrows(ParseException.class, () -> client.query(q, METHOD.GET)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.query(q, METHOD.GET)); //default method assertEquals("get", DebugServlet.lastMethod); @@ -265,7 +264,7 @@ public void testQuery() throws Exception { //POST DebugServlet.clear(); - expectThrows(ParseException.class, () -> client.query(q, METHOD.POST)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.query(q, METHOD.POST)); assertEquals("post", DebugServlet.lastMethod); assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent")); @@ -281,7 +280,7 @@ public void testQuery() throws Exception { //PUT DebugServlet.clear(); - expectThrows(ParseException.class, () -> client.query(q, METHOD.PUT)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.query(q, METHOD.PUT)); assertEquals("put", DebugServlet.lastMethod); assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent")); @@ -298,7 +297,7 @@ public void testQuery() throws Exception { //XML/GET client.setParser(new XMLResponseParser()); DebugServlet.clear(); - expectThrows(ParseException.class, () -> client.query(q, METHOD.GET)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.query(q, METHOD.GET)); assertEquals("get", DebugServlet.lastMethod); assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent")); @@ -314,7 +313,7 @@ public void testQuery() throws Exception { //XML/POST client.setParser(new XMLResponseParser()); DebugServlet.clear(); - expectThrows(ParseException.class, () -> client.query(q, METHOD.POST)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.query(q, METHOD.POST)); assertEquals("post", DebugServlet.lastMethod); assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent")); @@ -330,7 +329,7 @@ public void testQuery() throws Exception { client.setParser(new XMLResponseParser()); DebugServlet.clear(); - expectThrows(ParseException.class, () -> client.query(q, METHOD.PUT)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.query(q, METHOD.PUT)); assertEquals("put", DebugServlet.lastMethod); assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent")); @@ -351,7 +350,7 @@ public void testQuery() throws Exception { public void testDelete() throws Exception { DebugServlet.clear(); try (HttpSolrClient client = getHttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo")) { - expectThrows(ParseException.class, () -> client.deleteById("id")); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.deleteById("id")); //default method assertEquals("post", DebugServlet.lastMethod); @@ -370,7 +369,7 @@ public void testDelete() throws Exception { //XML client.setParser(new XMLResponseParser()); - expectThrows(ParseException.class, () -> client.deleteByQuery("*:*")); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.deleteByQuery("*:*")); assertEquals("post", DebugServlet.lastMethod); assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent")); @@ -389,10 +388,10 @@ public void testGetById() throws Exception { DebugServlet.clear(); try (HttpSolrClient client = getHttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo")) { Collection ids = Collections.singletonList("a"); - expectThrows(ParseException.class, () -> client.getById("a")); - expectThrows(ParseException.class, () -> client.getById(ids, null)); - expectThrows(ParseException.class, () -> client.getById("foo", "a")); - expectThrows(ParseException.class, () -> client.getById("foo", ids, null)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.getById("a")); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.getById(ids, null)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.getById("foo", "a")); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.getById("foo", ids, null)); } } @@ -403,7 +402,7 @@ public void testUpdate() throws Exception { UpdateRequest req = new UpdateRequest(); req.add(new SolrInputDocument()); req.setParam("a", "\u1234"); - expectThrows(ParseException.class, () -> client.request(req)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.request(req)); //default method assertEquals("post", DebugServlet.lastMethod); @@ -424,7 +423,7 @@ public void testUpdate() throws Exception { //XML response and writer client.setParser(new XMLResponseParser()); client.setRequestWriter(new RequestWriter()); - expectThrows(ParseException.class, () -> client.request(req)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.request(req)); assertEquals("post", DebugServlet.lastMethod); assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent")); @@ -440,7 +439,7 @@ public void testUpdate() throws Exception { client.setParser(new BinaryResponseParser()); client.setRequestWriter(new BinaryRequestWriter()); DebugServlet.clear(); - expectThrows(ParseException.class, () -> client.request(req)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.request(req)); assertEquals("post", DebugServlet.lastMethod); assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent")); @@ -483,21 +482,21 @@ public void testCompression() throws Exception { try (HttpSolrClient client = getHttpSolrClient(clientUrl)) { // verify request header gets set DebugServlet.clear(); - expectThrows(ParseException.class, () -> client.query(q)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.query(q)); assertNull(DebugServlet.headers.toString(), DebugServlet.headers.get("Accept-Encoding")); } try (HttpSolrClient client = getHttpSolrClient(clientUrl, null, null, true)) { try { client.query(q); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} assertNotNull(DebugServlet.headers.get("Accept-Encoding")); } try (HttpSolrClient client = getHttpSolrClient(clientUrl, null, null, false)) { try { client.query(q); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} } assertNull(DebugServlet.headers.get("Accept-Encoding")); @@ -559,6 +558,7 @@ public void testGetRawStream() throws SolrServerException, IOException{ HttpSolrClient solrClient = getHttpSolrClient(jetty.getBaseUrl().toString() + "/collection1", client, null); QueryRequest req = new QueryRequest(); + @SuppressWarnings({"rawtypes"}) NamedList response = solrClient.request(req); InputStream stream = (InputStream) response.get("stream"); assertNotNull(stream); @@ -675,7 +675,8 @@ private void setReqParamsOf(UpdateRequest req, String... keys) { } } - private void verifyServletState(HttpSolrClient client, SolrRequest request) { + private void verifyServletState(HttpSolrClient client, + @SuppressWarnings({"rawtypes"})SolrRequest request) { // check query String Iterator paramNames = request.getParams().getParameterNamesIterator(); while (paramNames.hasNext()) { @@ -705,7 +706,7 @@ public void testQueryString() throws Exception { client.setQueryParams(setOf("serverOnly")); UpdateRequest req = new UpdateRequest(); setReqParamsOf(req, "serverOnly", "notServer"); - expectThrows(ParseException.class, () -> client.request(req)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.request(req)); verifyServletState(client, req); // test without server query params @@ -714,7 +715,7 @@ public void testQueryString() throws Exception { UpdateRequest req2 = new UpdateRequest(); req2.setQueryParams(setOf("requestOnly")); setReqParamsOf(req2, "requestOnly", "notRequest"); - expectThrows(ParseException.class, () -> client.request(req2)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.request(req2)); verifyServletState(client, req2); // test with both request and server query params @@ -723,7 +724,7 @@ public void testQueryString() throws Exception { client.setQueryParams(setOf("serverOnly", "both")); req3.setQueryParams(setOf("requestOnly", "both")); setReqParamsOf(req3, "serverOnly", "requestOnly", "both", "neither"); - expectThrows(ParseException.class, () -> client.request(req3)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.request(req3)); verifyServletState(client, req3); // test with both request and server query params with single stream @@ -733,7 +734,7 @@ public void testQueryString() throws Exception { client.setQueryParams(setOf("serverOnly", "both")); req4.setQueryParams(setOf("requestOnly", "both")); setReqParamsOf(req4, "serverOnly", "requestOnly", "both", "neither"); - expectThrows(ParseException.class, () -> client.request(req4)); + expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.request(req4)); // NOTE: single stream requests send all the params // as part of the query string. So add "neither" to the request // so it passes the verification step. @@ -743,6 +744,7 @@ public void testQueryString() throws Exception { } @Test + @SuppressWarnings({"try"}) public void testInvariantParams() throws IOException { try(HttpSolrClient createdClient = new HttpSolrClient.Builder() .withBaseSolrUrl(jetty.getBaseUrl().toString()) @@ -758,6 +760,7 @@ public void testInvariantParams() throws IOException { assertEquals(2, createdClient.getInvariantParams().getParams("fq").length); } + try(HttpSolrClient createdClient = new HttpSolrClient.Builder() .withBaseSolrUrl(jetty.getBaseUrl().toString()) .withKerberosDelegationToken("mydt") diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientBadInputTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientBadInputTest.java index 6206d4d0be44..1c9196c13af9 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientBadInputTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientBadInputTest.java @@ -61,7 +61,9 @@ public void testDeleteByIdReportsInvalidIdLists() throws Exception { } } - private void assertExceptionThrownWithMessageContaining(Class expectedType, List expectedStrings, LuceneTestCase.ThrowingRunnable runnable) { + private void assertExceptionThrownWithMessageContaining(@SuppressWarnings({"rawtypes"})Class expectedType, + List expectedStrings, LuceneTestCase.ThrowingRunnable runnable) { + @SuppressWarnings({"unchecked"}) Throwable thrown = expectThrows(expectedType, runnable); if (expectedStrings != null) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientRetryTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientRetryTest.java index 52a4b84a15d2..d0266e87ae0c 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientRetryTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientRetryTest.java @@ -60,6 +60,7 @@ public void testRetry() throws Exception { QueryResponse response = solrClient.query(collectionName, params, SolrRequest.METHOD.GET); NamedList namedList = response.getResponse(); System.out.println(namedList); + @SuppressWarnings({"rawtypes"}) NamedList metrics = (NamedList) namedList.get("metrics"); assertEquals(1L, metrics.get(updateRequestCountKey)); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientTest.java index 3b95b110a399..df76cb83739b 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientTest.java @@ -239,6 +239,7 @@ public void testAliasHandling() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testRouting() throws Exception { CollectionAdminRequest.createCollection("routing_collection", "conf", 2, 1).process(cluster.getSolrClient()); cluster.waitForActiveCollection("routing_collection", 2, 2); @@ -253,6 +254,7 @@ public void testRouting() throws Exception { if (getRandomClient().isDirectUpdatesToLeadersOnly()) { checkSingleServer(response); } + @SuppressWarnings({"rawtypes"}) RouteResponse rr = (RouteResponse) response; Map routes = rr.getRoutes(); Iterator> it = routes.entrySet() @@ -468,6 +470,7 @@ private void queryWithShardsPreferenceRules(CloudHttp2SolrClient cloudClient, // Iterate over shards-info and check what cores responded SimpleOrderedMap shardsInfoMap = (SimpleOrderedMap)shardsInfo; + @SuppressWarnings({"unchecked"}) Iterator> itr = shardsInfoMap.asMap(100).entrySet().iterator(); List shardAddresses = new ArrayList(); while (itr.hasNext()) { @@ -544,6 +547,7 @@ private void queryReplicaType(CloudHttp2SolrClient cloudClient, // Iterate over shards-info and check what cores responded SimpleOrderedMap shardsInfoMap = (SimpleOrderedMap)shardsInfo; + @SuppressWarnings({"unchecked"}) Iterator> itr = shardsInfoMap.asMap(100).entrySet().iterator(); List shardAddresses = new ArrayList(); while (itr.hasNext()) { @@ -586,6 +590,7 @@ private Long getNumRequests(String baseUrl, String collectionName, String catego } else { name = category + "." + (scope != null ? scope : key) + ".requests"; } + @SuppressWarnings({"unchecked"}) Map map = (Map)resp.findRecursive("solr-mbeans", category, key, "stats"); if (map == null) { return null; @@ -720,6 +725,7 @@ public void stateVersionParamTest() throws Exception { q.setParam(CloudSolrClient.STATE_VERSION, COLLECTION + ":" + (coll.getZNodeVersion() - 1)); //an older version expect error QueryResponse rsp = solrClient.query(q); + @SuppressWarnings({"rawtypes"}) Map m = (Map) rsp.getResponse().get(CloudSolrClient.STATE_VERSION, rsp.getResponse().size()-1); assertNotNull("Expected an extra information from server with the list of invalid collection states", m); assertNotNull(m.get(COLLECTION)); @@ -845,6 +851,7 @@ public void testVersionsAreReturned() throws Exception { response = deleteRequest.commit(getRandomClient(), "versions_collection").getResponse(); Object deletesObject = response.get("deletes"); assertNotNull("There must be a deletes parameter", deletesObject); + @SuppressWarnings({"rawtypes"}) NamedList deletes = (NamedList) deletesObject; assertEquals("There must be 1 version", 1, deletes.size()); } @@ -939,7 +946,9 @@ public void testRetryUpdatesWhenClusterStateIsStale() throws Exception { private static void checkSingleServer(NamedList response) { + @SuppressWarnings({"rawtypes"}) final RouteResponse rr = (RouteResponse) response; + @SuppressWarnings({"unchecked"}) final Map routes = rr.getRoutes(); final Iterator> it = routes.entrySet().iterator(); @@ -1050,6 +1059,7 @@ private void queryWithPreferReplicaTypes(CloudHttp2SolrClient cloudClient, // Iterate over shards-info and check that replicas of correct type responded SimpleOrderedMap shardsInfoMap = (SimpleOrderedMap)shardsInfo; + @SuppressWarnings({"unchecked"}) Iterator> itr = shardsInfoMap.asMap(100).entrySet().iterator(); List shardAddresses = new ArrayList(); while (itr.hasNext()) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBadInputTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBadInputTest.java index 61f8ceb885f8..acace0dc7322 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBadInputTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBadInputTest.java @@ -61,7 +61,9 @@ public void testDeleteByIdReportsInvalidIdLists() throws Exception { } } - private void assertExceptionThrownWithMessageContaining(Class expectedType, List expectedStrings, LuceneTestCase.ThrowingRunnable runnable) { + private void assertExceptionThrownWithMessageContaining(@SuppressWarnings({"rawtypes"})Class expectedType, + List expectedStrings, LuceneTestCase.ThrowingRunnable runnable) { + @SuppressWarnings({"unchecked"}) Throwable thrown = expectThrows(expectedType, runnable); if (expectedStrings != null) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBuilderTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBuilderTest.java index 4051b85f9c71..3be5d83100a7 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBuilderTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBuilderTest.java @@ -95,7 +95,7 @@ public void testIsDirectUpdatesToLeadersOnlyDefault() throws IOException { } @Test - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 + @SuppressWarnings({"try"}) public void test0Timeouts() throws IOException { try(CloudSolrClient createdClient = new Builder(Collections.singletonList(ANY_ZK_HOST), Optional.empty()) .withSocketTimeout(0) diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientCacheTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientCacheTest.java index 92c5c629b047..1a671a8e9f69 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientCacheTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientCacheTest.java @@ -49,6 +49,7 @@ public static void beforeClass() { assumeWorkingMockito(); } + @SuppressWarnings({"unchecked"}) public void testCaching() throws Exception { String collName = "gettingstarted"; Set livenodes = new HashSet<>(); @@ -74,7 +75,9 @@ public DocCollection get() { return colls.get(c); } } + @SuppressWarnings({"rawtypes"}) Map responses = new HashMap<>(); + @SuppressWarnings({"rawtypes"}) NamedList okResponse = new NamedList(); okResponse.add("responseHeader", new NamedList<>(Collections.singletonMap("status", 0))); @@ -84,8 +87,7 @@ public DocCollection get() { .withLBHttpSolrClient(mockLbclient) .build()) { livenodes.addAll(ImmutableSet.of("192.168.1.108:7574_solr", "192.168.1.108:8983_solr")); - ClusterState cs = ClusterState.load(1, coll1State.getBytes(UTF_8), - Collections.emptySet(), "/collections/gettingstarted/state.json"); + ClusterState cs = ClusterState.createFromJson(1, coll1State.getBytes(UTF_8), Collections.emptySet()); refs.put(collName, new Ref(collName)); colls.put(collName, cs.getCollectionOrNull(collName)); responses.put("request", o -> { @@ -105,11 +107,14 @@ public DocCollection get() { } - private LBHttpSolrClient getMockLbHttpSolrClient(Map responses) throws Exception { + @SuppressWarnings({"unchecked"}) + private LBHttpSolrClient getMockLbHttpSolrClient( + @SuppressWarnings({"rawtypes"})Map responses) throws Exception { LBHttpSolrClient mockLbclient = mock(LBHttpSolrClient.class); when(mockLbclient.request(any(LBSolrClient.Req.class))).then(invocationOnMock -> { LBHttpSolrClient.Req req = invocationOnMock.getArgument(0); + @SuppressWarnings({"rawtypes"}) Function f = responses.get("request"); if (f == null) return null; Object res = f.apply(null); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientRetryTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientRetryTest.java index 900ae7160f63..9dc1524432c2 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientRetryTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientRetryTest.java @@ -57,6 +57,7 @@ public void testRetry() throws Exception { QueryResponse response = solrClient.query(collectionName, params, SolrRequest.METHOD.GET); NamedList namedList = response.getResponse(); System.out.println(namedList); + @SuppressWarnings({"rawtypes"}) NamedList metrics = (NamedList) namedList.get("metrics"); assertEquals(1L, metrics.get(updateRequestCountKey)); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java index d6aec6222a08..fb3635893007 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java @@ -456,6 +456,7 @@ private void queryWithShardsPreferenceRules(CloudSolrClient cloudClient, // Iterate over shards-info and check what cores responded SimpleOrderedMap shardsInfoMap = (SimpleOrderedMap)shardsInfo; + @SuppressWarnings({"unchecked"}) Iterator> itr = shardsInfoMap.asMap(100).entrySet().iterator(); List shardAddresses = new ArrayList(); while (itr.hasNext()) { @@ -530,6 +531,7 @@ private void queryReplicaType(CloudSolrClient cloudClient, // Iterate over shards-info and check what cores responded SimpleOrderedMap shardsInfoMap = (SimpleOrderedMap)shardsInfo; + @SuppressWarnings({"unchecked"}) Iterator> itr = shardsInfoMap.asMap(100).entrySet().iterator(); List shardAddresses = new ArrayList(); while (itr.hasNext()) { @@ -572,6 +574,7 @@ private Long getNumRequests(String baseUrl, String collectionName, String catego } else { name = category + "." + (scope != null ? scope : key) + ".requests"; } + @SuppressWarnings({"unchecked"}) Map map = (Map)resp.findRecursive("solr-mbeans", category, key, "stats"); if (map == null) { return null; @@ -706,6 +709,7 @@ public void stateVersionParamTest() throws Exception { q.setParam(CloudSolrClient.STATE_VERSION, COLLECTION + ":" + (coll.getZNodeVersion() - 1)); //an older version expect error QueryResponse rsp = solrClient.query(q); + @SuppressWarnings({"rawtypes"}) Map m = (Map) rsp.getResponse().get(CloudSolrClient.STATE_VERSION, rsp.getResponse().size()-1); assertNotNull("Expected an extra information from server with the list of invalid collection states", m); assertNotNull(m.get(COLLECTION)); @@ -825,6 +829,7 @@ public void testVersionsAreReturned() throws Exception { response = deleteRequest.commit(getRandomClient(), "versions_collection").getResponse(); Object deletesObject = response.get("deletes"); assertNotNull("There must be a deletes parameter", deletesObject); + @SuppressWarnings({"rawtypes"}) NamedList deletes = (NamedList) deletesObject; assertEquals("There must be 1 version", 1, deletes.size()); } @@ -1022,6 +1027,7 @@ private void queryWithPreferReplicaTypes(CloudSolrClient cloudClient, // Iterate over shards-info and check that replicas of correct type responded SimpleOrderedMap shardsInfoMap = (SimpleOrderedMap)shardsInfo; + @SuppressWarnings({"unchecked"}) Iterator> itr = shardsInfoMap.asMap(100).entrySet().iterator(); List shardAddresses = new ArrayList(); while (itr.hasNext()) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClientBadInputTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClientBadInputTest.java index 7b271ca833e8..6c2728b3e92f 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClientBadInputTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClientBadInputTest.java @@ -86,7 +86,9 @@ public void testDeleteByIdReportsInvalidIdLists() throws Exception { } } - private void assertExceptionThrownWithMessageContaining(Class expectedType, List expectedStrings, LuceneTestCase.ThrowingRunnable runnable) { + private void assertExceptionThrownWithMessageContaining(@SuppressWarnings({"rawtypes"})Class expectedType, + List expectedStrings, LuceneTestCase.ThrowingRunnable runnable) { + @SuppressWarnings({"unchecked"}) Throwable thrown = expectThrows(expectedType, runnable); if (expectedStrings != null) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBadInputTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBadInputTest.java index 2bc7eab1c59e..9db0e4d0f643 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBadInputTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBadInputTest.java @@ -79,7 +79,9 @@ public void testDeleteByIdReportsInvalidIdLists() throws Exception { } } - private void assertExceptionThrownWithMessageContaining(Class expectedType, List expectedStrings, LuceneTestCase.ThrowingRunnable runnable) { + private void assertExceptionThrownWithMessageContaining(@SuppressWarnings({"rawtypes"})Class expectedType, + List expectedStrings, LuceneTestCase.ThrowingRunnable runnable) { + @SuppressWarnings({"unchecked"}) Throwable thrown = expectThrows(expectedType, runnable); if (expectedStrings != null) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java index 282b88da189f..00801986b967 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java @@ -38,7 +38,7 @@ public void testRejectsMissingBaseSolrUrl() { } @Test - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 + @SuppressWarnings({"try"}) public void testMissingQueueSize() { try (ConcurrentUpdateSolrClient client = new Builder("someurl").build()){ // Do nothing as we just need to test that the only mandatory parameter for building the client diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientCompatibilityTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientCompatibilityTest.java index 81f64b65dd2f..16f687527bfe 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientCompatibilityTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientCompatibilityTest.java @@ -17,7 +17,6 @@ package org.apache.solr.client.solrj.impl; -import org.apache.http.ParseException; import org.apache.solr.SolrJettyTestBase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrQuery; @@ -60,7 +59,7 @@ public void testConnectToOldNodesUsingHttp1() throws Exception { assertTrue(client.getHttpClient().getTransport() instanceof HttpClientTransportOverHTTP); try { client.query(new SolrQuery("*:*"), SolrRequest.METHOD.GET); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} } finally { afterSolrJettyTestBase(); } @@ -80,7 +79,7 @@ public void testConnectToNewNodesUsingHttp1() throws Exception { assertTrue(client.getHttpClient().getTransport() instanceof HttpClientTransportOverHTTP); try { client.query(new SolrQuery("*:*"), SolrRequest.METHOD.GET); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} } finally { afterSolrJettyTestBase(); } @@ -102,7 +101,7 @@ public void testConnectToOldNodesUsingHttp2() throws Exception { try { client.query(new SolrQuery("*:*"), SolrRequest.METHOD.GET); fail("Jetty client with HTTP2 transport should not be able to connect to HTTP1 only nodes"); - } catch (ParseException ignored) { + } catch (BaseHttpSolrClient.RemoteSolrException ignored) { fail("Jetty client with HTTP2 transport should not be able to connect to HTTP1 only nodes"); } catch (SolrServerException e) { // expected diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientTest.java index 6462f2c7dcb2..8b6b339c80ce 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientTest.java @@ -32,7 +32,6 @@ import java.util.Set; import java.util.TreeSet; -import org.apache.http.ParseException; import org.apache.solr.SolrJettyTestBase; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrRequest; @@ -189,7 +188,7 @@ public void test0IdleTimeout() throws Exception { try(Http2SolrClient client = getHttp2SolrClient(jetty.getBaseUrl().toString() + "/debug/foo", DEFAULT_CONNECTION_TIMEOUT, 0)) { try { client.query(q, SolrRequest.METHOD.GET); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} } } @@ -227,7 +226,7 @@ public void testQuery() throws Exception { q.setParam("a", "\u1234"); try { client.query(q, SolrRequest.METHOD.GET); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} //default method assertEquals("get", DebugServlet.lastMethod); @@ -251,7 +250,7 @@ public void testQuery() throws Exception { DebugServlet.clear(); try { client.query(q, SolrRequest.METHOD.POST); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} assertEquals("post", DebugServlet.lastMethod); assertEquals(EXPECTED_USER_AGENT, DebugServlet.headers.get("user-agent")); @@ -268,7 +267,7 @@ public void testQuery() throws Exception { DebugServlet.clear(); try { client.query(q, SolrRequest.METHOD.PUT); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} assertEquals("put", DebugServlet.lastMethod); assertEquals(EXPECTED_USER_AGENT, DebugServlet.headers.get("user-agent")); @@ -286,7 +285,7 @@ public void testQuery() throws Exception { DebugServlet.clear(); try { client.query(q, SolrRequest.METHOD.GET); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} assertEquals("get", DebugServlet.lastMethod); assertEquals(EXPECTED_USER_AGENT, DebugServlet.headers.get("user-agent")); @@ -303,7 +302,7 @@ public void testQuery() throws Exception { DebugServlet.clear(); try { client.query(q, SolrRequest.METHOD.POST); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} assertEquals("post", DebugServlet.lastMethod); assertEquals(EXPECTED_USER_AGENT, DebugServlet.headers.get("user-agent")); @@ -320,7 +319,7 @@ public void testQuery() throws Exception { DebugServlet.clear(); try { client.query(q, SolrRequest.METHOD.PUT); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} assertEquals("put", DebugServlet.lastMethod); assertEquals(EXPECTED_USER_AGENT, DebugServlet.headers.get("user-agent")); @@ -342,7 +341,7 @@ public void testDelete() throws Exception { try (Http2SolrClient client = getHttp2SolrClient(jetty.getBaseUrl().toString() + "/debug/foo")) { try { client.deleteById("id"); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} //default method assertEquals("post", DebugServlet.lastMethod); @@ -361,7 +360,7 @@ public void testDelete() throws Exception { client.setParser(new XMLResponseParser()); try { client.deleteByQuery("*:*"); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} assertEquals("post", DebugServlet.lastMethod); assertEquals(EXPECTED_USER_AGENT, DebugServlet.headers.get("user-agent")); @@ -381,19 +380,19 @@ public void testGetById() throws Exception { Collection ids = Collections.singletonList("a"); try { client.getById("a"); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} try { client.getById(ids, null); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} try { client.getById("foo", "a"); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} try { client.getById("foo", ids, null); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} } } @@ -406,7 +405,7 @@ public void testUpdate() throws Exception { req.setParam("a", "\u1234"); try { client.request(req); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} //default method assertEquals("post", DebugServlet.lastMethod); @@ -429,7 +428,7 @@ public void testUpdate() throws Exception { client.setRequestWriter(new RequestWriter()); try { client.request(req); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} assertEquals("post", DebugServlet.lastMethod); assertEquals(EXPECTED_USER_AGENT, DebugServlet.headers.get("user-agent")); @@ -447,7 +446,7 @@ public void testUpdate() throws Exception { DebugServlet.clear(); try { client.request(req); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} assertEquals("post", DebugServlet.lastMethod); assertEquals(EXPECTED_USER_AGENT, DebugServlet.headers.get("user-agent")); @@ -525,7 +524,8 @@ private void setReqParamsOf(UpdateRequest req, String... keys) { } } - private void verifyServletState(Http2SolrClient client, SolrRequest request) { + private void verifyServletState(Http2SolrClient client, + @SuppressWarnings({"rawtypes"})SolrRequest request) { // check query String Iterator paramNames = request.getParams().getParameterNamesIterator(); while (paramNames.hasNext()) { @@ -557,7 +557,7 @@ public void testQueryString() throws Exception { setReqParamsOf(req, "serverOnly", "notServer"); try { client.request(req); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} verifyServletState(client, req); // test without server query params @@ -568,7 +568,7 @@ public void testQueryString() throws Exception { setReqParamsOf(req, "requestOnly", "notRequest"); try { client.request(req); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} verifyServletState(client, req); // test with both request and server query params @@ -579,7 +579,7 @@ public void testQueryString() throws Exception { setReqParamsOf(req, "serverOnly", "requestOnly", "both", "neither"); try { client.request(req); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} verifyServletState(client, req); // test with both request and server query params with single stream @@ -591,7 +591,7 @@ public void testQueryString() throws Exception { setReqParamsOf(req, "serverOnly", "requestOnly", "both", "neither"); try { client.request(req); - } catch (ParseException ignored) {} + } catch (BaseHttpSolrClient.RemoteSolrException ignored) {} // NOTE: single stream requests send all the params // as part of the query string. So add "neither" to the request // so it passes the verification step. diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientBadInputTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientBadInputTest.java index 29535c0c9ff9..4b1eccc0d4af 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientBadInputTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientBadInputTest.java @@ -45,7 +45,9 @@ public static void beforeTest() throws Exception { createAndStartJetty(legacyExampleCollection1SolrHome(), jettyConfig); } - private void assertExceptionThrownWithMessageContaining(Class expectedType, List expectedStrings, ThrowingRunnable runnable) { + private void assertExceptionThrownWithMessageContaining(@SuppressWarnings({"rawtypes"})Class expectedType, + List expectedStrings, ThrowingRunnable runnable) { + @SuppressWarnings({"unchecked"}) Throwable thrown = expectThrows(expectedType, runnable); if (expectedStrings != null) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientBadInputTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientBadInputTest.java index 9875b1654d3d..6ce46f0c8960 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientBadInputTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientBadInputTest.java @@ -77,7 +77,9 @@ public void testDeleteByIdReportsInvalidIdLists() throws Exception { } } - private void assertExceptionThrownWithMessageContaining(Class expectedType, List expectedStrings, LuceneTestCase.ThrowingRunnable runnable) { + private void assertExceptionThrownWithMessageContaining(@SuppressWarnings({"rawtypes"})Class expectedType, + List expectedStrings, LuceneTestCase.ThrowingRunnable runnable) { + @SuppressWarnings({"unchecked"}) Throwable thrown = expectThrows(expectedType, runnable); if (expectedStrings != null) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java index 459626e27f2d..160b1085283e 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java @@ -85,11 +85,11 @@ public class TestLang extends SolrTestCase { "getSupportPoints", "pairSort", "log10", "plist", "recip", "pivot", "ltrim", "rtrim", "export", "zplot", "natural", "repeat", "movingMAD", "hashRollup", "noop", "var", "stddev", "recNum", "isNull", "notNull", "matches", "projectToBorder", "double", "long", "parseCSV", "parseTSV", "dateTime", - "split", "upper", "trim", "lower", "trunc", "cosine", "dbscan"}; + "split", "upper", "trim", "lower", "trunc", "cosine", "dbscan", "per", "std"}; @Test public void testLang() { - List functions = new ArrayList(); + List functions = new ArrayList<>(); for(String f : allFunctions) { functions.add(f); } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java index d98f88631884..f5f757db5f7d 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java @@ -59,6 +59,7 @@ import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.NamedList; +import org.apache.solr.util.BaseTestHarness; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -99,6 +100,7 @@ public void cleanIndex() throws Exception { @Test // commented 4-Sep-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018 + @SuppressWarnings({"unchecked"}) public void testShortestPathStream() throws Exception { new UpdateRequest() @@ -130,6 +132,7 @@ public void testShortestPathStream() throws Exception { .withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress()) .withFunctionName("shortestPath", ShortestPathStream.class); + @SuppressWarnings({"rawtypes"}) Map params = new HashMap(); params.put("fq", "predicate_s:knows"); @@ -143,7 +146,7 @@ public void testShortestPathStream() throws Exception { "maxDepth=\"6\")"); stream.setStreamContext(context); - paths = new HashSet(); + paths = new HashSet<>(); tuples = getTuples(stream); assertTrue(tuples.size() == 2); @@ -169,7 +172,7 @@ public void testShortestPathStream() throws Exception { "maxDepth=\"6\")"); stream.setStreamContext(context); - paths = new HashSet(); + paths = new HashSet<>(); tuples = getTuples(stream); assertTrue(tuples.size() == 2); @@ -194,7 +197,6 @@ public void testShortestPathStream() throws Exception { "maxDepth=\"6\")"); stream.setStreamContext(context); - paths = new HashSet(); tuples = getTuples(stream); assertTrue(tuples.size() == 0); @@ -230,7 +232,7 @@ public void testShortestPathStream() throws Exception { stream.setStreamContext(context); - paths = new HashSet(); + paths = new HashSet<>(); tuples = getTuples(stream); assertTrue(tuples.size() == 1); @@ -887,7 +889,7 @@ public void testGraphHandler() throws Exception { InputStreamReader reader = new InputStreamReader(stream, StandardCharsets.UTF_8); String xml = readString(reader); //Validate the nodes - String error = h.validateXPath(xml, + String error = BaseTestHarness.validateXPath(xml, "//graph/node[1][@id ='jim']", "//graph/node[2][@id ='max']", "//graph/node[3][@id ='sam']"); @@ -895,7 +897,7 @@ public void testGraphHandler() throws Exception { throw new Exception(error); } //Validate the edges - error = h.validateXPath(xml, + error = BaseTestHarness.validateXPath(xml, "//graph/edge[1][@source ='bill']", "//graph/edge[1][@target ='jim']", "//graph/edge[2][@source ='bill']", @@ -930,7 +932,7 @@ private String readString(InputStreamReader reader) throws Exception{ protected List getTuples(TupleStream tupleStream) throws IOException { tupleStream.open(); - List tuples = new ArrayList(); + List tuples = new ArrayList<>(); for(Tuple t = tupleStream.read(); !t.EOF; t = tupleStream.read()) { tuples.add(t); } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java index 1edc0e952ef1..038bd0ea005d 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java @@ -114,7 +114,7 @@ public void testShortestPathStream() throws Exception { stream.setStreamContext(context); - paths = new HashSet(); + paths = new HashSet<>(); tuples = getTuples(stream); assertTrue(tuples.size() == 2); @@ -142,7 +142,7 @@ public void testShortestPathStream() throws Exception { 6); stream.setStreamContext(context); - paths = new HashSet(); + paths = new HashSet<>(); tuples = getTuples(stream); assertTrue(tuples.size() == 2); @@ -170,7 +170,6 @@ public void testShortestPathStream() throws Exception { 6); stream.setStreamContext(context); - paths = new HashSet(); tuples = getTuples(stream); assertTrue(tuples.size() == 0); @@ -191,7 +190,6 @@ public void testShortestPathStream() throws Exception { 2); stream.setStreamContext(context); - paths = new HashSet(); tuples = getTuples(stream); assertTrue(tuples.size() == 0); @@ -213,7 +211,7 @@ public void testShortestPathStream() throws Exception { 6); stream.setStreamContext(context); - paths = new HashSet(); + paths = new HashSet<>(); tuples = getTuples(stream); assertTrue(tuples.size() == 1); @@ -228,7 +226,7 @@ public void testShortestPathStream() throws Exception { protected List getTuples(TupleStream tupleStream) throws IOException { tupleStream.open(); - List tuples = new ArrayList(); + List tuples = new ArrayList<>(); for(;;) { Tuple t = tupleStream.read(); if(t.EOF) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java index 9ae7aa992df5..066b47e2cd49 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java @@ -417,6 +417,7 @@ public void testJDBCPropertiesParameters() throws Exception { @Ignore("Fix error checking") @Test + @SuppressWarnings({"try"}) public void testErrorPropagation() throws Exception { //Test error propagation Properties props = new Properties(); @@ -434,6 +435,7 @@ public void testErrorPropagation() throws Exception { } @Test + @SuppressWarnings({"try"}) public void testSQLExceptionThrownWhenQueryAndConnUseDiffCollections() throws Exception { String badCollection = COLLECTIONORALIAS + "bad"; String connectionString = "jdbc:solr://" + zkHost + "?collection=" + badCollection; diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java index a452465fa402..e8aef51421c8 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java @@ -75,6 +75,7 @@ public class CloudAuthStreamTest extends SolrCloudTestCase { * * @see SolrRequest#setBasicAuthCredentials */ + @SuppressWarnings({"rawtypes"}) private static T setBasicAuthCredentials(T req, String user) { assert null != user; req.setBasicAuthCredentials(user, user); @@ -522,7 +523,7 @@ public void testDaemonUpdateStreamInsufficientCredentials() throws Exception { final List tuples = getTuples(daemonCheck); assertEquals(1, tuples.size()); // our daemon; if (log.isInfoEnabled()) { - log.info("Current daemon status: {}", tuples.get(0).fields); + log.info("Current daemon status: {}", tuples.get(0).getFields()); } assertEquals(daemonId + " should have never had a successful iteration", Long.valueOf(0L), tuples.get(0).getLong("iterations")); @@ -808,7 +809,9 @@ protected static List getTuples(final TupleStream tupleStream) throws IOE log.trace("TupleStream: {}", tupleStream); tupleStream.open(); for (Tuple t = tupleStream.read(); !t.EOF; t = tupleStream.read()) { - log.trace("Tuple: {}", t.fields); + if (log.isTraceEnabled()) { + log.trace("Tuple: {}", t.getFields()); + } tuples.add(t); } } finally { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java index 2fbe1011e4ef..8b74a66b579b 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java @@ -642,7 +642,7 @@ protected boolean assertOrderOf(List tuples, String fieldName, String... protected boolean assertFields(List tuples, String ... fields) throws Exception{ for(Tuple tuple : tuples){ for(String field : fields){ - if(!tuple.fields.containsKey(field)){ + if(!tuple.getFields().containsKey(field)){ throw new Exception(String.format(Locale.ROOT, "Expected field '%s' not found", field)); } } @@ -653,7 +653,7 @@ protected boolean assertFields(List tuples, String ... fields) throws Exc protected boolean assertNotFields(List tuples, String ... fields) throws Exception{ for(Tuple tuple : tuples){ for(String field : fields){ - if(tuple.fields.containsKey(field)){ + if(tuple.getFields().containsKey(field)){ throw new Exception(String.format(Locale.ROOT, "Unexpected field '%s' found", field)); } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java index 25de9e1b194c..edef2698f769 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java @@ -26,12 +26,15 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.Slow; +import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.io.SolrClientCache; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.cloud.AbstractDistribZkTestBase; import org.apache.solr.cloud.SolrCloudTestCase; +import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.params.ModifiableSolrParams; import org.junit.Before; import org.junit.BeforeClass; @@ -77,7 +80,7 @@ public void cleanIndex() throws Exception { .commit(cluster.getSolrClient(), COLLECTIONORALIAS); } - @Test + @Test public void testAnalyzeEvaluator() throws Exception { UpdateRequest updateRequest = new UpdateRequest(); @@ -134,6 +137,7 @@ public void testAnalyzeEvaluator() throws Exception { solrStream.setStreamContext(context); tuples = getTuples(solrStream); assertEquals(tuples.size(), 1); + @SuppressWarnings({"rawtypes"}) List terms = (List)tuples.get(0).get("return-value"); assertTrue(terms.get(0).equals("hello")); assertTrue(terms.get(1).equals("world")); @@ -198,6 +202,7 @@ public void testAnalyzeEvaluator() throws Exception { solrStream.setStreamContext(context); tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"rawtypes"}) List l = (List)tuples.get(0).get("test1_t"); assertTrue(l.get(0).equals("l")); assertTrue(l.get(1).equals("b")); @@ -285,6 +290,7 @@ public void testTruncArray() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertEquals(tuples.size(), 1); + @SuppressWarnings({"unchecked"}) List l1 = (List)tuples.get(0).get("field2"); assertEquals(l1.get(0), "aaa"); assertEquals(l1.get(1), "bbb"); @@ -304,13 +310,16 @@ public void testUpperLowerArray() throws Exception { StreamContext context = new StreamContext(); solrStream.setStreamContext(context); + @SuppressWarnings({"unchecked"}) List tuples = getTuples(solrStream); assertEquals(tuples.size(), 1); + @SuppressWarnings({"unchecked"}) List l1 = (List)tuples.get(0).get("field3"); assertEquals(l1.get(0), "A"); assertEquals(l1.get(1), "B"); assertEquals(l1.get(2), "C"); + @SuppressWarnings({"unchecked"}) List l2 = (List)tuples.get(0).get("field4"); assertEquals(l2.get(0), "x"); assertEquals(l2.get(1), "y"); @@ -332,6 +341,7 @@ public void testSplitTrim() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertEquals(tuples.size(), 1); + @SuppressWarnings({"unchecked"}) List l1 = (List)tuples.get(0).get("field2"); assertEquals(l1.get(0), "a"); assertEquals(l1.get(1), "b"); @@ -436,6 +446,7 @@ public void testMemsetTimeSeries() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) Map> mem = (Map)tuples.get(0).get("return-value"); List array = mem.get("a"); assertEquals(array.get(0).intValue(), 100); @@ -474,6 +485,7 @@ public void testLatlonFunctions() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List>locVectors = (List>)tuples.get(0).get("b"); int v=1; for(List row : locVectors) { @@ -553,6 +565,7 @@ public void testConvexHull() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> points = (List>)tuples.get(0).get("e"); assertTrue(points.size() == 6); List point1 = points.get(0); @@ -595,11 +608,13 @@ public void testConvexHull() throws Exception { double boundarySize = tuples.get(0).getDouble("g"); assertEquals(boundarySize, 122.73784789223708, 0.0); + @SuppressWarnings({"unchecked"}) List baryCenter = (List)tuples.get(0).get("h"); assertEquals(baryCenter.size(), 2); assertEquals(baryCenter.get(0).doubleValue(), 101.3021125450865, 0.0); assertEquals(baryCenter.get(1).doubleValue(), 100.07343616615786, 0.0); + @SuppressWarnings({"unchecked"}) List> borderPoints = (List>)tuples.get(0).get("i"); assertEquals(borderPoints.get(0).get(0).doubleValue(), 100.31316833934775, 0); assertEquals(borderPoints.get(0).get(1).doubleValue(), 115.6639686234851, 0); @@ -629,6 +644,7 @@ public void testEnclosingDisk() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List center = (List)tuples.get(0).get("e"); assertEquals(center.get(0).doubleValue(), 97.40659699625388, 0.0); assertEquals(center.get(1).doubleValue(), 101.57826559647323, 0.0); @@ -636,6 +652,7 @@ public void testEnclosingDisk() throws Exception { double radius =tuples.get(0).getDouble("f"); assertEquals(radius, 22.814029299535, 0.0); + @SuppressWarnings({"unchecked"}) List> supportPoints = (List>)tuples.get(0).get("g"); List support1 = supportPoints.get(0); assertEquals(support1.get(0).doubleValue(), 95.71563821370013, 0.0); @@ -830,6 +847,7 @@ public void testDistance() throws Exception { Number f = (Number)tuples.get(0).get("f"); assertEquals(f.doubleValue(), 2.0, 0.0); + @SuppressWarnings({"unchecked"}) List> h = (List>)tuples.get(0).get("h"); assertEquals(h.size(), 3); assertEquals(h.get(0).size(), 3); @@ -855,6 +873,7 @@ public void testDistance() throws Exception { Number k = (Number)tuples.get(0).get("k"); assertEquals(k.doubleValue(), 4.0, 0.0); + @SuppressWarnings({"unchecked"}) List> m = (List>)tuples.get(0).get("m"); assertEquals(m.size(), 3); assertEquals(m.get(0).size(), 3); @@ -880,6 +899,7 @@ public void testDistance() throws Exception { Number p = (Number)tuples.get(0).get("p"); assertEquals(p.doubleValue(), 0.544877, 0.0001); + @SuppressWarnings({"unchecked"}) List> r = (List>)tuples.get(0).get("r"); assertEquals(r.size(), 3); assertEquals(r.get(0).size(), 3); @@ -906,6 +926,7 @@ public void testDistance() throws Exception { Number u = (Number)tuples.get(0).get("u"); assertEquals(u.doubleValue(), 10.0, 0); + @SuppressWarnings({"unchecked"}) List> x = (List>)tuples.get(0).get("x"); assertEquals(x.size(), 3); assertEquals(x.get(0).size(), 3); @@ -967,6 +988,7 @@ public void testReverse() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List reverse = (List)tuples.get(0).get("reverse"); assertTrue(reverse.size() == 4); assertTrue(reverse.get(0).doubleValue() == 400D); @@ -1017,6 +1039,7 @@ public void testCopyOf() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List copy1 = (List)tuples.get(0).get("copy1"); assertTrue(copy1.size() == 4); assertTrue(copy1.get(0).doubleValue() == 100D); @@ -1024,6 +1047,7 @@ public void testCopyOf() throws Exception { assertTrue(copy1.get(2).doubleValue() == 300D); assertTrue(copy1.get(3).doubleValue() == 400D); + @SuppressWarnings({"unchecked"}) List copy2 = (List)tuples.get(0).get("copy2"); assertTrue(copy2.size() == 4); assertTrue(copy2.get(0).doubleValue() == 100D); @@ -1031,6 +1055,7 @@ public void testCopyOf() throws Exception { assertTrue(copy2.get(2).doubleValue() == 300D); assertTrue(copy2.get(3).doubleValue() == 400D); + @SuppressWarnings({"unchecked"}) List copy3 = (List)tuples.get(0).get("copy3"); assertTrue(copy3.size() == 2); assertTrue(copy3.get(0).doubleValue() == 100D); @@ -1079,11 +1104,13 @@ public void testCopyOfRange() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List copy1 = (List)tuples.get(0).get("copy"); assertTrue(copy1.size() == 2); assertTrue(copy1.get(0).doubleValue() == 500D); assertTrue(copy1.get(1).doubleValue() == 300D); + @SuppressWarnings({"unchecked"}) List copy2 = (List)tuples.get(0).get("copy2"); assertTrue(copy2.size() == 2); assertTrue(copy2.get(0).doubleValue() == 300D); @@ -1156,6 +1183,7 @@ public void testPercentile() throws Exception { tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); tuple = tuples.get(0); + @SuppressWarnings({"unchecked"}) List percentiles = (List)tuple.get("return-value"); assertEquals(percentiles.get(0).doubleValue(), 2.4, 0.001); assertEquals(percentiles.get(1).doubleValue(), 6.0, 0.001); @@ -1176,6 +1204,7 @@ public void testPrimes() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); Tuple tuple = tuples.get(0); + @SuppressWarnings({"unchecked"}) List asort = (List)tuple.get("return-value"); assertEquals(asort.size(), 10); assertEquals(asort.get(0).intValue(), 2); @@ -1222,6 +1251,7 @@ public void testAscend() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); Tuple tuple = tuples.get(0); + @SuppressWarnings({"unchecked"}) List asort = (List)tuple.get("return-value"); assertEquals(asort.size(), 6); assertEquals(asort.get(0).doubleValue(), 0, 0.0); @@ -1274,6 +1304,7 @@ public void testRankTransform() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List reverse = (List)tuples.get(0).get("reverse"); assertTrue(reverse.size() == 4); assertTrue(reverse.get(0).doubleValue() == 400D); @@ -1281,6 +1312,7 @@ public void testRankTransform() throws Exception { assertTrue(reverse.get(2).doubleValue() == 500D); assertTrue(reverse.get(3).doubleValue() == 100D); + @SuppressWarnings({"unchecked"}) List ranked = (List)tuples.get(0).get("ranked"); assertTrue(ranked.size() == 4); assertTrue(ranked.get(0).doubleValue() == 1D); @@ -1290,6 +1322,7 @@ public void testRankTransform() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testArray() throws Exception { String cexpr = "array(1, 2, 3, 300, 2, 500)"; ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); @@ -1342,6 +1375,7 @@ public void testPairSort() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> out = (List>)tuples.get(0).get("c"); assertEquals(out.size(), 2); List row1 = out.get(0); @@ -1372,6 +1406,7 @@ public void testOnes() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertEquals(out.size(), 6); assertEquals(out.get(0).intValue(), 1); @@ -1394,6 +1429,7 @@ public void testNatural() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertEquals(out.size(), 6); assertEquals(out.get(0).intValue(), 0); @@ -1417,6 +1453,7 @@ public void testRepeat() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertEquals(out.size(), 6); assertEquals(out.get(0).doubleValue(), 6.5, 0); @@ -1440,6 +1477,7 @@ public void testLtrim() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertEquals(out.size(), 4); assertEquals(out.get(0).intValue(), 3); @@ -1460,6 +1498,7 @@ public void testRtrim() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertEquals(out.size(), 4); assertEquals(out.get(0).intValue(), 1); @@ -1481,6 +1520,7 @@ public void testZeros() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertEquals(out.size(), 6); assertEquals(out.get(0).intValue(), 0); @@ -1514,6 +1554,7 @@ public void testMatrix() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> out = (List>)tuples.get(0).get("a"); List array1 = out.get(0); @@ -1528,6 +1569,7 @@ public void testMatrix() throws Exception { assertEquals(array2.get(1).doubleValue(), 5.0, 0.0); assertEquals(array2.get(2).doubleValue(), 4.0, 0.0); + @SuppressWarnings({"unchecked"}) List row = (List)tuples.get(0).get("b"); assertEquals(row.size(), 3); @@ -1535,17 +1577,20 @@ public void testMatrix() throws Exception { assertEquals(array2.get(1).doubleValue(), 5.0, 0.0); assertEquals(array2.get(2).doubleValue(), 4.0, 0.0); + @SuppressWarnings({"unchecked"}) List col = (List)tuples.get(0).get("c"); assertEquals(col.size(), 2); assertEquals(col.get(0).doubleValue(), 3.0, 0.0); assertEquals(col.get(1).doubleValue(), 4.0, 0.0); + @SuppressWarnings({"unchecked"}) List colLabels = (List)tuples.get(0).get("d"); assertEquals(colLabels.size(), 3); assertEquals(colLabels.get(0), "col1"); assertEquals(colLabels.get(1), "col2"); assertEquals(colLabels.get(2), "col3"); + @SuppressWarnings({"unchecked"}) List> features = (List>)tuples.get(0).get("e"); assertEquals(features.size(), 2); assertEquals(features.get(0).size(), 1); @@ -1561,6 +1606,7 @@ public void testMatrix() throws Exception { @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testZplot() throws Exception { String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS; @@ -1860,6 +1906,7 @@ public void testZplot() throws Exception { @Test + @SuppressWarnings({"unchecked"}) public void testMatrixMath() throws Exception { String cexpr = "let(echo=true, a=matrix(array(1.5, 2.5, 3.5), array(4.5,5.5,6.5)), " + "b=grandSum(a), " + @@ -1983,6 +2030,7 @@ public void testTranspose() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> out = (List>)tuples.get(0).get("b"); assertEquals(out.size(), 3); List array1 = out.get(0); @@ -2013,6 +2061,7 @@ public void testUnitize() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> out = (List>)tuples.get(0).get("a"); assertEquals(out.size(), 2); List array1 = out.get(0); @@ -2027,6 +2076,7 @@ public void testUnitize() throws Exception { assertEquals(array2.get(1).doubleValue(), 0.5698028822981898, 0.0); assertEquals(array2.get(2).doubleValue(), 0.6837634587578276, 0.0); + @SuppressWarnings({"unchecked"}) List array3 = (List)tuples.get(0).get("b"); assertEquals(array3.size(), 3); assertEquals(array3.get(0).doubleValue(), 0.4558423058385518, 0.0); @@ -2049,6 +2099,7 @@ public void testNormalizeSum() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> out = (List>)tuples.get(0).get("a"); assertEquals(out.size(), 2); List array1 = out.get(0); @@ -2063,12 +2114,14 @@ public void testNormalizeSum() throws Exception { assertEquals(array2.get(1).doubleValue(), 0.3333333333333333, 0.0001); assertEquals(array2.get(2).doubleValue(), 0.4, 0.0001); + @SuppressWarnings({"unchecked"}) List array3 = (List)tuples.get(0).get("b"); assertEquals(array3.size(), 3); assertEquals(array3.get(0).doubleValue(), 0.16666666666666666, 0.0001); assertEquals(array3.get(1).doubleValue(), 0.3333333333333333, 0.0001); assertEquals(array3.get(2).doubleValue(), 0.5, 0.0001); + @SuppressWarnings({"unchecked"}) List array4 = (List)tuples.get(0).get("c"); assertEquals(array4.size(), 3); assertEquals(array4.get(0).doubleValue(), 16.666666666666666, 0.0001); @@ -2088,6 +2141,7 @@ public void testStandardize() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> out = (List>)tuples.get(0).get("a"); assertEquals(out.size(), 2); List array1 = out.get(0); @@ -2102,12 +2156,14 @@ public void testStandardize() throws Exception { assertEquals(array2.get(1).doubleValue(), 0, 0.0); assertEquals(array2.get(2).doubleValue(), 1, 0.0); + @SuppressWarnings({"unchecked"}) List array3 = (List)tuples.get(0).get("b"); assertEquals(array3.size(), 3); assertEquals(array3.get(0).doubleValue(), -1, 0.0); assertEquals(array3.get(1).doubleValue(), 0, 0.0); assertEquals(array3.get(2).doubleValue(), 1, 0.0); + @SuppressWarnings({"unchecked"}) List array4 = (List)tuples.get(0).get("c"); assertEquals(array4.size(), 3); assertEquals(array4.get(0).doubleValue(), -1, 0.0); @@ -2153,6 +2209,7 @@ public void testAddAll() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertTrue(out.size() == 9); assertTrue(out.get(0).intValue() == 1); @@ -2223,10 +2280,14 @@ private void sampleTest(ModifiableSolrParams paramsLoc, String url) throws IOExc solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List) tuples.get(0).get("sample"); + @SuppressWarnings({"rawtypes"}) Map ks = (Map) tuples.get(0).get("ks"); + @SuppressWarnings({"rawtypes"}) Map ks2 = (Map) tuples.get(0).get("ks2"); + @SuppressWarnings({"rawtypes"}) Map ks3 = (Map) tuples.get(0).get("ks3"); assertTrue(out.size() == 250); @@ -2433,6 +2494,7 @@ public void testLog10() throws Exception { List tuples = getTuples(solrStream); assertEquals(tuples.size(), 1); Tuple tuple = tuples.get(0); + @SuppressWarnings({"unchecked"}) List logs = (List)tuple.get("b"); assertEquals(logs.size(), 3); assertEquals(logs.get(0).doubleValue(), 1, 0.0); @@ -2457,6 +2519,7 @@ public void testRecip() throws Exception { List tuples = getTuples(solrStream); assertEquals(tuples.size(), 1); Tuple tuple = tuples.get(0); + @SuppressWarnings({"unchecked"}) List logs = (List)tuple.get("b"); assertEquals(logs.size(), 3); assertEquals(logs.get(0).doubleValue(), .1, 0.0); @@ -2469,6 +2532,7 @@ public void testRecip() throws Exception { @Test + @SuppressWarnings({"unchecked"}) public void testPow() throws Exception { String cexpr = "let(echo=true, a=array(10, 20, 30), b=pow(a, 2), c=pow(2, a), d=pow(10, 3), e=pow(a, array(1, 2, 3)))"; ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); @@ -2505,6 +2569,7 @@ public void testPow() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testTermVectors() throws Exception { // Test termVectors with only documents and default termVector settings String cexpr = "let(echo=true," + @@ -2826,6 +2891,7 @@ public void testPivot() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertEquals(tuples.size(), 1); + @SuppressWarnings({"unchecked"}) List> matrix = (List>)tuples.get(0).get("b"); List row1 = matrix.get(0); assertEquals(row1.get(0).doubleValue(), 2.0,0); @@ -2840,10 +2906,12 @@ public void testPivot() throws Exception { assertEquals(row3.get(1).doubleValue(), 0,0); assertEquals(row3.get(2).doubleValue(), 8.0,0); + @SuppressWarnings({"unchecked"}) List rowLabels = (List)tuples.get(0).get("c"); assertEquals(rowLabels.get(0), "x1"); assertEquals(rowLabels.get(1), "x2"); assertEquals(rowLabels.get(2), "x3"); + @SuppressWarnings({"unchecked"}) List columnLabels = (List)tuples.get(0).get("d"); assertEquals(columnLabels.get(0), "f1"); assertEquals(columnLabels.get(1), "f2"); @@ -2870,6 +2938,7 @@ public void testEbeSubtract() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("c"); assertEquals(out.size(), 6); assertEquals(out.get(0).doubleValue(), 1.0, 0.0); @@ -2879,6 +2948,7 @@ public void testEbeSubtract() throws Exception { assertEquals(out.get(4).doubleValue(), 5.0, 0.0); assertEquals(out.get(5).doubleValue(), 6.0, 0.0); + @SuppressWarnings({"unchecked"}) List> mout = (List>)tuples.get(0).get("h"); assertEquals(mout.size(), 2); List row1 = mout.get(0); @@ -2901,6 +2971,7 @@ public void testEbeSubtract() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testMatrixMult() throws Exception { String cexpr = "let(echo=true," + " a=array(1,2,3)," + @@ -3003,10 +3074,15 @@ public void testKmeans() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> cluster1 = (List>)tuples.get(0).get("g"); + @SuppressWarnings({"unchecked"}) List> cluster2 = (List>)tuples.get(0).get("h"); + @SuppressWarnings({"unchecked"}) List> centroids = (List>)tuples.get(0).get("i"); + @SuppressWarnings({"unchecked"}) List labels1 = (List)tuples.get(0).get("j"); + @SuppressWarnings({"unchecked"}) List labels2 = (List)tuples.get(0).get("k"); assertEquals(cluster1.size(), 2); @@ -3166,10 +3242,15 @@ public void testMultiKmeans() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> cluster1 = (List>)tuples.get(0).get("g"); + @SuppressWarnings({"unchecked"}) List> cluster2 = (List>)tuples.get(0).get("h"); + @SuppressWarnings({"unchecked"}) List> centroids = (List>)tuples.get(0).get("i"); + @SuppressWarnings({"unchecked"}) List labels1 = (List)tuples.get(0).get("j"); + @SuppressWarnings({"unchecked"}) List labels2 = (List)tuples.get(0).get("k"); assertEquals(cluster1.size(), 2); @@ -3246,12 +3327,18 @@ public void testFuzzyKmeans() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> cluster1 = (List>)tuples.get(0).get("g"); + @SuppressWarnings({"unchecked"}) List> cluster2 = (List>)tuples.get(0).get("h"); + @SuppressWarnings({"unchecked"}) List> centroids = (List>)tuples.get(0).get("i"); + @SuppressWarnings({"unchecked"}) List> membership = (List>)tuples.get(0).get("l"); + @SuppressWarnings({"unchecked"}) List labels1 = (List)tuples.get(0).get("j"); + @SuppressWarnings({"unchecked"}) List labels2 = (List)tuples.get(0).get("k"); assertEquals(cluster1.size(), 2); @@ -3337,6 +3424,7 @@ public void testEbeMultiply() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertTrue(out.size() == 6); assertTrue(out.get(0).intValue() == 2); @@ -3368,14 +3456,15 @@ public void testOscillate() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List wave = (List)tuples.get(0).get("a"); assertEquals(wave.size(), 128); - Map desc = (Map)tuples.get(0).get("b"); Number min = (Number)tuples.get(0).get("c"); Number max = (Number)tuples.get(0).get("d"); assertEquals(min.doubleValue(), -9.9, .1); assertEquals(max.doubleValue(), 9.9, .1); + @SuppressWarnings({"unchecked"}) List wave1 = (List)tuples.get(0).get("e"); assertEquals(wave1.size(), 128); @@ -3387,6 +3476,7 @@ public void testOscillate() throws Exception { assertEquals(freq.doubleValue(), .3, .1); assertEquals(pha.doubleValue(), 2.9, .1); + @SuppressWarnings({"unchecked"}) List der = (List)tuples.get(0).get("i"); assertEquals(der.size(), 128); assertEquals(der.get(0).doubleValue(), -0.7177479876419472, 0); @@ -3413,6 +3503,7 @@ public void testEbeAdd() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("c"); assertEquals(out.size(), 6); assertEquals(out.get(0).doubleValue(), 3.0, 0.0); @@ -3422,6 +3513,7 @@ public void testEbeAdd() throws Exception { assertEquals(out.get(4).doubleValue(), 15.0, 0.0); assertEquals(out.get(5).doubleValue(), 18.0, 0.0); + @SuppressWarnings({"unchecked"}) List> mout = (List>)tuples.get(0).get("h"); assertEquals(mout.size(), 2); List row1 = mout.get(0); @@ -3467,6 +3559,7 @@ public void testSetAndGetValue() throws Exception { assertEquals(mean.doubleValue(), 3.3800151591412964, 0.0); Number mean1 = (Number)tuples.get(0).get("d"); assertEquals(mean1.doubleValue(), 4.3800151591412964, 0.0); + @SuppressWarnings({"unchecked"}) List vals = (List)tuples.get(0).get("f"); assertEquals(vals.size(), 3); assertEquals(vals.get(0).doubleValue(), 8.11, 0); @@ -3487,6 +3580,7 @@ public void testEbeDivide() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertTrue(out.size() == 6); assertTrue(out.get(0).intValue() == 2); @@ -3549,6 +3643,7 @@ public void testFFT() throws Exception { List tuples = getTuples(solrStream); assertEquals(tuples.size(), 1); + @SuppressWarnings({"unchecked"}) List> fft = (List>)tuples.get(0).get("a"); assertEquals(fft.size(), 2); List reals = fft.get(0); @@ -3574,6 +3669,7 @@ public void testFFT() throws Exception { assertEquals(imaginary.get(i).doubleValue(), 0.0, 0.0); } + @SuppressWarnings({"unchecked"}) List ifft = (List)tuples.get(0).get("b"); assertEquals(ifft.get(0).doubleValue(), 1, 0.0); assertEquals(ifft.get(1).doubleValue(), 4, 0.0); @@ -3644,6 +3740,7 @@ public void testPoissonDistribution() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"rawtypes"}) Map map = (Map)tuples.get(0).get("d"); Number mean = (Number)map.get("mean"); Number var = (Number)map.get("var"); @@ -3678,17 +3775,23 @@ public void testGeometricDistribution() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked", "rawtypes"}) List listg = (List)tuples.get(0).get("g"); + @SuppressWarnings({"rawtypes"}) Map mapg = listg.get(0); double pctg = (double) mapg.get("pct"); assertEquals(pctg, .2, .02); + @SuppressWarnings({"unchecked", "rawtypes"}) List listh = (List)tuples.get(0).get("h"); + @SuppressWarnings({"rawtypes"}) Map maph = listh.get(0); double pcth = (double)maph.get("pct"); assertEquals(pcth, .5, .02); + @SuppressWarnings({"unchecked", "rawtypes"}) List listi = (List)tuples.get(0).get("i"); + @SuppressWarnings({"rawtypes"}) Map mapi = listi.get(0); double pcti = (double)mapi.get("pct"); assertEquals(pcti, .8, .02); @@ -3734,6 +3837,7 @@ public void testUniformIntegerDistribution() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"rawtypes"}) Map map = (Map)tuples.get(0).get("d"); Number N = (Number)map.get("N"); assertEquals(N.intValue(), 10000); @@ -3755,6 +3859,7 @@ public void testZipFDistribution() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List counts = (List)tuples.get(0).get("c"); assertTrue(counts.size() == 10); @@ -3796,6 +3901,7 @@ public void testValueAt() throws Exception { @Test + @SuppressWarnings({"unchecked"}) public void testBetaDistribution() throws Exception { String cexpr = "let(a=sample(betaDistribution(1, 5), 50000), b=hist(a, 11), c=col(b, N))"; ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); @@ -3859,6 +3965,7 @@ public void testEnumeratedDistribution() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"rawtypes"}) Map map = (Map)tuples.get(0).get("d"); Number N = (Number)map.get("N"); assertEquals(N.intValue(), 10000); @@ -3881,6 +3988,7 @@ public void testEnumeratedDistribution() throws Exception { solrStream.setStreamContext(context); tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List freqs = (List)tuples.get(0).get("y"); assertEquals(freqs.get(0).doubleValue(), .40, .03); assertEquals(freqs.get(1).doubleValue(), .30, .03); @@ -3922,13 +4030,31 @@ public void testVarianceAndStandardDeviation() throws Exception { assertTrue(stddev.doubleValue() == 0); } + // NOTE: cache evaluators work only locally, on + // the same node where the replica that executes + // the stream is located @Test - public void testCache() throws Exception { + @SuppressWarnings({"unchecked"}) +public void testCache() throws Exception { String cexpr = "putCache(\"space1\", \"key1\", dotProduct(array(2,4,6,8,10,12),array(1,2,3,4,5,6)))"; ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); paramsLoc.set("expr", cexpr); paramsLoc.set("qt", "/stream"); - String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS; + // find a node with a replica + ClusterState clusterState = cluster.getSolrClient().getClusterStateProvider().getClusterState(); + String collection = useAlias ? COLLECTIONORALIAS + "_collection" : COLLECTIONORALIAS; + DocCollection coll = clusterState.getCollection(collection); + String node = coll.getReplicas().iterator().next().getNodeName(); + String url = null; + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { + if (jetty.getNodeName().equals(node)) { + url = jetty.getBaseUrl().toString()+"/"+COLLECTIONORALIAS; + break; + } + } + if (url == null) { + fail("unable to find a node with replica"); + } TupleStream solrStream = new SolrStream(url, paramsLoc); StreamContext context = new StreamContext(); solrStream.setStreamContext(context); @@ -4017,6 +4143,7 @@ public void testExponentialMovingAverage() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertTrue(out.size() == 21); assertEquals((double) out.get(0), 22.22, 0.009); @@ -4042,6 +4169,44 @@ public void testExponentialMovingAverage() throws Exception { assertEquals((double)out.get(20), 22.92, 0.009); } + + @Test + public void testTimeDifferencingMatrix() throws Exception { + String cexpr = "let(echo=\"c, d\",\n" + + " a=matrix(array(1,2,3,4,5),array(7.5,9,11,15.5,50.2)),\n" + + " b=setColumnLabels(a, array(\"a\",\"b\",\"c\",\"d\",\"e\")),\n" + + " c=diff(b, 2),\n" + + " d=getColumnLabels(c))"; + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); + paramsLoc.set("expr", cexpr); + paramsLoc.set("qt", "/stream"); + String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS; + TupleStream solrStream = new SolrStream(url, paramsLoc); + StreamContext context = new StreamContext(); + solrStream.setStreamContext(context); + List tuples = getTuples(solrStream); + assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) + List> matrix = (List>)tuples.get(0).get("c"); + @SuppressWarnings({"unchecked"}) + List columnsLabels = (List)tuples.get(0).get("d"); + assertEquals(columnsLabels.size(), 3); + assertEquals(columnsLabels.get(0), "c"); + assertEquals(columnsLabels.get(1), "d"); + assertEquals(columnsLabels.get(2), "e"); + assertEquals(matrix.size(), 2); + List row1 = matrix.get(0); + List row2 = matrix.get(1); + assertEquals(row1.size(), 3); + assertEquals(row1.get(0).doubleValue(), 2.0, 0); + assertEquals(row1.get(1).doubleValue(), 2.0, 0); + assertEquals(row1.get(2).doubleValue(), 2.0, 0); + assertEquals(row2.size(), 3 ); + assertEquals(row2.get(0).doubleValue(), 3.5, 0); + assertEquals(row2.get(1).doubleValue(), 6.5, 0); + assertEquals(row2.get(2).doubleValue(), 39.2, 0); + } + @Test public void testTimeDifferencingDefaultLag() throws Exception { String cexpr = "diff(array(1709.0, 1621.0, 1973.0, 1812.0, 1975.0, 1862.0, 1940.0, 2013.0, 1596.0, 1725.0, 1676.0, 1814.0, 1615.0, 1557.0, 1891.0, 1956.0, 1885.0, 1623.0, 1903.0, 1997.0))"; @@ -4054,6 +4219,7 @@ public void testTimeDifferencingDefaultLag() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertTrue(out.size() == 19); assertEquals(out.get(0).doubleValue(),-88.0, 0.01); @@ -4089,6 +4255,7 @@ public void testTimeDifferencingDefinedLag() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertTrue(out.size() == 8); assertEquals(out.get(0).doubleValue(), -94.0, 0.01); @@ -4113,6 +4280,7 @@ public void testNestedDoubleTimeDifference() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertTrue(out.size() == 7); assertEquals(out.get(0).doubleValue(), 30.0, 0.01); @@ -4125,6 +4293,7 @@ public void testNestedDoubleTimeDifference() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testPolyfit() throws Exception { String cexpr = "let(echo=true," + " a=array(0,1,2,3,4,5,6,7)," + @@ -4181,18 +4350,21 @@ public void testTtest() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"rawtypes"}) Map testResult = (Map)tuples.get(0).get("ttest"); Number tstat = (Number)testResult.get("t-statistic"); Number pval = (Number)testResult.get("p-value"); assertEquals(tstat.doubleValue(), 2.3666107120397575, .0001); assertEquals(pval.doubleValue(), 0.029680704317867967, .0001); + @SuppressWarnings({"rawtypes"}) Map testResult2 = (Map)tuples.get(0).get("onesamplettest"); Number tstat2 = (Number)testResult2.get("t-statistic"); Number pval2 = (Number)testResult2.get("p-value"); assertEquals(tstat2.doubleValue(), 0, .0001); assertEquals(pval2.doubleValue(), 1, .0001); + @SuppressWarnings({"rawtypes"}) Map testResult3 = (Map)tuples.get(0).get("pairedttest"); Number tstat3 = (Number)testResult3.get("t-statistic"); Number pval3 = (Number)testResult3.get("p-value"); @@ -4216,6 +4388,7 @@ public void testChiSquareDataSet() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"rawtypes"}) Map testResult = (Map)tuples.get(0).get("chisquare"); Number tstat = (Number)testResult.get("chisquare-statistic"); Number pval = (Number)testResult.get("p-value"); @@ -4239,6 +4412,7 @@ public void testGtestDataSet() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"rawtypes"}) Map testResult = (Map)tuples.get(0).get("gtest"); Number gstat = (Number)testResult.get("G-statistic"); Number pval = (Number)testResult.get("p-value"); @@ -4271,6 +4445,7 @@ public void testMultiVariateNormalDistribution() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> cov = (List>)tuples.get(0).get("h"); assertEquals(cov.size(), 2); List row1 = cov.get(0); @@ -4288,6 +4463,7 @@ public void testMultiVariateNormalDistribution() throws Exception { assertEquals(c, 56.66666666666667, 7); assertEquals(d, 723.8095238095239, 50); + @SuppressWarnings({"unchecked"}) List sample = (List)tuples.get(0).get("i"); assertEquals(sample.size(), 2); Number sample1 = sample.get(0); @@ -4300,6 +4476,7 @@ public void testMultiVariateNormalDistribution() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testKnn() throws Exception { String cexpr = "let(echo=true," + " a=setRowLabels(matrix(array(1,1,1,0,0,0),"+ @@ -4343,6 +4520,7 @@ public void testKnn() throws Exception { assertEquals(row2.get(4).doubleValue(), 1.0, 0.0); assertEquals(row2.get(5).doubleValue(), 1.0, 0.0); + @SuppressWarnings({"rawtypes"}) Map atts = (Map)tuples.get(0).get("e"); List dists = (List)atts.get("distances"); assertEquals(dists.size(), 2); @@ -4385,6 +4563,7 @@ public void testIntegrate() throws Exception { assertEquals(integral.doubleValue(), 20, 0.0); integral = (Number)tuples.get(0).get("e"); assertEquals(integral.doubleValue(), 29, 0.0); + @SuppressWarnings({"unchecked"}) List integrals = (List)tuples.get(0).get("f"); assertEquals(integrals.size(), 50); assertEquals(integrals.get(49).intValue(), 49); @@ -4406,6 +4585,7 @@ public void testLoess() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("fit"); assertTrue(out.size() == 8); assertEquals(out.get(0).doubleValue(), 0.0, 0.0); @@ -4417,6 +4597,7 @@ public void testLoess() throws Exception { assertEquals(out.get(6).doubleValue(), 6.0, 0.0); assertEquals(out.get(7).doubleValue(), 7.0, 0.0); + @SuppressWarnings({"unchecked"}) List out1 = (List)tuples.get(0).get("der"); assertTrue(out1.size() == 8); assertEquals(out1.get(0).doubleValue(), 1.0, 0.0); @@ -4447,6 +4628,7 @@ public void testSpline() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("fit"); assertTrue(out.size() == 8); assertEquals(out.get(0).doubleValue(), 1.0, 0.0001); @@ -4458,6 +4640,7 @@ public void testSpline() throws Exception { assertEquals(out.get(6).doubleValue(), 1.0, 0.0001); assertEquals(out.get(7).doubleValue(), 9.0, 0.0001); + @SuppressWarnings({"unchecked"}) List out1 = (List)tuples.get(0).get("der"); assertTrue(out1.size() == 8); @@ -4503,6 +4686,7 @@ public void testBicubicSpline() throws Exception { assertEquals(p2.doubleValue(), 536.8916383774491, 0.0); Number p3 = (Number)tuples.get(0).get("p3"); assertEquals(p3.doubleValue(), 659.921875, 0.0); + @SuppressWarnings({"unchecked"}) List p4 = (List)tuples.get(0).get("p4"); assertEquals(p4.get(0).doubleValue(), 449.7837701612903, 0.0); assertEquals(p4.get(1).doubleValue(), 536.8916383774491, 0.0); @@ -4526,6 +4710,7 @@ public void testAkima() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("fit"); assertTrue(out.size() == 8); assertEquals(out.get(0).doubleValue(), 1.0, 0.0001); @@ -4537,6 +4722,7 @@ public void testAkima() throws Exception { assertEquals(out.get(6).doubleValue(), 1.0, 0.0001); assertEquals(out.get(7).doubleValue(), 9.0, 0.0001); + @SuppressWarnings({"unchecked"}) List out1 = (List)tuples.get(0).get("der"); assertTrue(out1.size() == 8); assertEquals(out1.get(0).doubleValue(), 93.5, 0.0001); @@ -4568,8 +4754,10 @@ public void testOutliers() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked", "rawtypes"}) List out = (List)tuples.get(0).get("e"); assertEquals(out.size(), 2); + @SuppressWarnings({"rawtypes"}) Map high = out.get(0); assertEquals(((String)high.get("id")), "1"); @@ -4577,20 +4765,24 @@ public void testOutliers() throws Exception { assertEquals(((Number)high.get("highOutlierValue_d")).doubleValue(), 110.0, 0.0); + @SuppressWarnings({"rawtypes"}) Map low = out.get(1); assertEquals(((String)low.get("id")), "2"); assertEquals(((Number)low.get("cumulativeProbablity_d")).doubleValue(), 0.022750131948179167, 0.0 ); assertEquals(((Number)low.get("lowOutlierValue_d")).doubleValue(), 90, 0.0); + @SuppressWarnings({"unchecked", "rawtypes"}) List out1 = (List)tuples.get(0).get("f"); assertEquals(out1.size(), 2); + @SuppressWarnings({"rawtypes"}) Map high1 = out1.get(0); assert(high1.get("id") == null); assertEquals(((Number)high1.get("cumulativeProbablity_d")).doubleValue(), 0.9772498680518208, 0.0 ); assertEquals(((Number)high1.get("highOutlierValue_d")).doubleValue(), 110.0, 0.0); + @SuppressWarnings({"rawtypes"}) Map low1 = out1.get(1); assert(low1.get("id") == null); assertEquals(((Number)low1.get("cumulativeProbablity_d")).doubleValue(), 0.022750131948179167, 0.0 ); @@ -4616,6 +4808,7 @@ public void testLerp() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("fit"); assertTrue(out.size() == 8); assertEquals(out.get(0).doubleValue(), 1.0, 0.0001); @@ -4627,6 +4820,7 @@ public void testLerp() throws Exception { assertEquals(out.get(6).doubleValue(), 1.0, 0.0001); assertEquals(out.get(7).doubleValue(), 9.0, 0.0001); + @SuppressWarnings({"unchecked"}) List out1 = (List)tuples.get(0).get("der"); assertTrue(out1.size() == 8); assertEquals(out1.get(0).doubleValue(), 69.0, 0.0001); @@ -4652,6 +4846,7 @@ public void testHarmonicFit() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("s"); assertTrue(out.size() == 100); for(Number n : out) { @@ -4694,12 +4889,14 @@ public void testOlsRegress() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"rawtypes"}) Map regression = (Map)tuples.get(0).get("f"); Number rsquared = (Number)regression.get("RSquared"); assertEquals(rsquared.doubleValue(), 0.9667887860584002, .000001); + @SuppressWarnings({"unchecked"}) List regressionParameters = (List)regression.get("regressionParameters"); assertEquals(regressionParameters.get(0).doubleValue(), 7.676028542255028, .0001); @@ -4707,6 +4904,7 @@ public void testOlsRegress() throws Exception { assertEquals(regressionParameters.get(2).doubleValue(), 7.621051256504592, .0001); assertEquals(regressionParameters.get(3).doubleValue(), 0.8284680662898674, .0001); + @SuppressWarnings({"unchecked"}) List predictions = (List)tuples.get(0).get("g"); assertEquals(predictions.get(0).doubleValue(), 81.56082305847914, .0001); @@ -4722,6 +4920,7 @@ public void testOlsRegress() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testKnnRegress() throws Exception { String cexpr = "let(echo=true, a=array(8.5, 12.89999962, 5.199999809, 10.69999981, 3.099999905, 3.5, 9.199999809, 9, 15.10000038, 10.19999981), " + "b=array(5.099999905, 5.800000191, 2.099999905, 8.399998665, 2.900000095, 1.200000048, 3.700000048, 7.599999905, 7.699999809, 4.5)," + @@ -4920,6 +5119,7 @@ public void testDoubleArray() throws Exception { List tuples = getTuples(solrStream); assertEquals(tuples.size(), 1); + @SuppressWarnings({"unchecked"}) List doubles = (List)tuples.get(0).get("doubles"); assertEquals(doubles.get(0), 1.1, 0); assertEquals(doubles.get(1), 1.3, 0); @@ -4942,6 +5142,7 @@ public void testGaussfit() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List predictions = (List)tuples.get(0).get("g"); assertEquals(predictions.size(), 25); assertEquals(predictions.get(0).doubleValue(), 1.5217511259930976, 0); @@ -4986,6 +5187,7 @@ public void testPlot() throws Exception { assertTrue(tuples.size() == 1); String plot = tuples.get(0).getString("plot"); assertTrue(plot.equals("scatter")); + @SuppressWarnings({"unchecked"}) List> data = (List>)tuples.get(0).get("data"); assertTrue(data.size() == 3); List pair1 = data.get(0); @@ -5011,6 +5213,7 @@ public void testMovingAverage() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertTrue(out.size()==4); assertEquals((double) out.get(0), 2.5, .0); @@ -5031,13 +5234,14 @@ public void testMovingMAD() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertTrue(out.size()==4); System.out.println("MAD:"+out); - assertEquals((double) out.get(0).doubleValue(), 1, .0); - assertEquals((double) out.get(1).doubleValue(), 1, .0); - assertEquals((double) out.get(2).doubleValue(), 1, .0); - assertEquals((double) out.get(3).doubleValue(), 1.59375, .0); + assertEquals(out.get(0).doubleValue(), 1, .0); + assertEquals(out.get(1).doubleValue(), 1, .0); + assertEquals(out.get(2).doubleValue(), 1, .0); + assertEquals(out.get(3).doubleValue(), 1.59375, .0); } @Test @@ -5070,6 +5274,7 @@ public void testMovingMedian() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("return-value"); assertTrue(out.size() == 3); assertEquals(out.get(0).doubleValue(), 6.0, .0); @@ -5105,6 +5310,7 @@ public void testMonteCarlo() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("c"); assertTrue(out.size()==10); assertEquals(out.get(0).doubleValue(), 30.0, .0); @@ -5136,6 +5342,7 @@ public void testMonteCarloWithVariables() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List out = (List)tuples.get(0).get("c"); assertTrue(out.size()==10); assertEquals(out.get(0).doubleValue(), 40.0, .0); @@ -5170,9 +5377,13 @@ public void testWeibullDistribution() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"rawtypes"}) Map a = (Map)tuples.get(0).get("a"); + @SuppressWarnings({"rawtypes"}) Map b = (Map)tuples.get(0).get("b"); + @SuppressWarnings({"rawtypes"}) Map c = (Map)tuples.get(0).get("c"); + @SuppressWarnings({"rawtypes"}) Map d = (Map)tuples.get(0).get("d"); Number sa = (Number)a.get("skewness"); @@ -5214,6 +5425,7 @@ public void testGammaDistribution() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertEquals(tuples.size(), 1); + @SuppressWarnings({"unchecked"}) List b = (List)tuples.get(0).get("b"); assertEquals(10, b.size()); Number c = (Number)tuples.get(0).get("c"); @@ -5241,8 +5453,11 @@ public void testLogNormalDistribution() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"rawtypes"}) Map a = (Map)tuples.get(0).get("a"); + @SuppressWarnings({"rawtypes"}) Map b = (Map)tuples.get(0).get("b"); + @SuppressWarnings({"rawtypes"}) Map c = (Map)tuples.get(0).get("c"); Number sa = (Number)a.get("skewness"); @@ -5275,7 +5490,9 @@ public void testTriangularDistribution() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"rawtypes"}) Map a = (Map)tuples.get(0).get("a"); + @SuppressWarnings({"rawtypes"}) Map b = (Map)tuples.get(0).get("b"); Number sa = (Number)a.get("skewness"); @@ -5302,6 +5519,7 @@ public void testCovMatrix() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> cm = (List>)tuples.get(0).get("f"); assertEquals(cm.size(), 3); List row1 = cm.get(0); @@ -5324,6 +5542,7 @@ public void testCovMatrix() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testCorrMatrix() throws Exception { String cexpr = "let(echo=true," + "a=array(1,2,3), " + @@ -5453,6 +5672,7 @@ public void testPrecision() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List nums = (List)tuples.get(0).get("a"); assertTrue(nums.size() == 3); assertEquals(nums.get(0).doubleValue(), 1.4445, 0.0); @@ -5476,6 +5696,7 @@ public void testPrecisionMatrix() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List> rows = (List>)tuples.get(0).get("b"); assertTrue(rows.size() == 2); List row1 = rows.get(0); @@ -5490,6 +5711,7 @@ public void testPrecisionMatrix() throws Exception { } @Test + @SuppressWarnings({"unchecked"}) public void testMinMaxScale() throws Exception { String cexpr = "let(echo=true, a=minMaxScale(matrix(array(1,2,3,4,5), array(10,20,30,40,50))), " + "b=minMaxScale(matrix(array(1,2,3,4,5), array(10,20,30,40,50)), 0, 100)," + @@ -5642,6 +5864,7 @@ public void testScale() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List reverse = (List)tuples.get(0).get("reverse"); assertTrue(reverse.size() == 4); assertTrue(reverse.get(0).doubleValue() == 400D); @@ -5649,6 +5872,7 @@ public void testScale() throws Exception { assertTrue(reverse.get(2).doubleValue() == 500D); assertTrue(reverse.get(3).doubleValue() == 100D); + @SuppressWarnings({"unchecked"}) List ranked = (List)tuples.get(0).get("scaled"); assertTrue(ranked.size() == 4); assertTrue(ranked.get(0).doubleValue() == 200D); @@ -5699,6 +5923,7 @@ public void testConvolution() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked"}) List convolution = (List)(tuples.get(0)).get("conv"); assertTrue(convolution.size() == 7); assertTrue(convolution.get(0).equals(20000D)); @@ -5748,6 +5973,7 @@ public void testRegressAndPredict() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); Tuple tuple = tuples.get(0); + @SuppressWarnings({"rawtypes"}) Map regression = (Map)tuple.get("regress"); double slope = (double)regression.get("slope"); double intercept= (double) regression.get("intercept"); @@ -5757,6 +5983,7 @@ public void testRegressAndPredict() throws Exception { assertTrue(rSquare == 1.0D); double prediction = tuple.getDouble("p"); assertTrue(prediction == 600.0D); + @SuppressWarnings({"unchecked"}) List predictions = (List)tuple.get("pl"); assertList(predictions, 200D, 400D, 600D, 200D, 400D, 800D, 1200D); } @@ -5942,6 +6169,7 @@ public void testLength() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); Tuple tuple = tuples.get(0); + @SuppressWarnings({"rawtypes"}) Map regression = (Map)tuple.get("regress"); double slope = (double)regression.get("slope"); double intercept= (double) regression.get("intercept"); @@ -5977,7 +6205,7 @@ public void testConvertEvaluator() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); double d = (double)tuples.get(0).get("kilometers"); - assertTrue(d == (double)(10*1.61)); + assertTrue(d == (10*1.61)); expr = "select(search("+COLLECTIONORALIAS+", q=\"*:*\", sort=\"miles_i asc\", fl=\"miles_i\"), convert(miles, kilometers, miles_i) as kilometers)"; @@ -5991,9 +6219,9 @@ public void testConvertEvaluator() throws Exception { tuples = getTuples(solrStream); assertTrue(tuples.size() == 2); d = (double)tuples.get(0).get("kilometers"); - assertTrue(d == (double)(50*1.61)); + assertTrue(d == (50*1.61)); d = (double)tuples.get(1).get("kilometers"); - assertTrue(d == (double)(70*1.61)); + assertTrue(d == (70*1.61)); expr = "parallel("+COLLECTIONORALIAS+", workers=2, sort=\"miles_i asc\", select(search("+COLLECTIONORALIAS+", q=\"*:*\", partitionKeys=miles_i, sort=\"miles_i asc\", fl=\"miles_i\", qt=\"/export\"), convert(miles, kilometers, miles_i) as kilometers))"; paramsLoc = new ModifiableSolrParams(); @@ -6005,9 +6233,9 @@ public void testConvertEvaluator() throws Exception { tuples = getTuples(solrStream); assertTrue(tuples.size() == 2); d = (double)tuples.get(0).get("kilometers"); - assertTrue(d == (double)(50*1.61)); + assertTrue(d == (50*1.61)); d = (double)tuples.get(1).get("kilometers"); - assertTrue(d == (double)(70*1.61)); + assertTrue(d == (70*1.61)); expr = "select(stats("+COLLECTIONORALIAS+", q=\"*:*\", sum(miles_i)), convert(miles, kilometers, sum(miles_i)) as kilometers)"; paramsLoc = new ModifiableSolrParams(); @@ -6019,7 +6247,7 @@ public void testConvertEvaluator() throws Exception { tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); d = (double)tuples.get(0).get("kilometers"); - assertTrue(d == (double)(120*1.61)); + assertTrue(d == (120*1.61)); } protected List getTuples(TupleStream tupleStream) throws IOException { @@ -6057,7 +6285,7 @@ public boolean assertString(Tuple tuple, String fieldName, String expected) thro return true; } - private boolean assertList(List list, Object... vals) throws Exception { + private boolean assertList(@SuppressWarnings({"rawtypes"})List list, Object... vals) throws Exception { if(list.size() != vals.length) { throw new Exception("Lists are not the same size:"+list.size() +" : "+vals.length); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/RecordCountStream.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/RecordCountStream.java index fb93d86eae24..f1974bd56067 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/RecordCountStream.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/RecordCountStream.java @@ -103,7 +103,7 @@ public void open() throws IOException { } public List children() { - List l = new ArrayList(); + List l = new ArrayList<>(); l.add(stream); return l; } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java index cf86691e5a0f..add4331b6c4b 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java @@ -154,6 +154,7 @@ protected boolean assertMapOrder(List tuples, int... ids) throws Exceptio int i = 0; for(int val : ids) { Tuple t = tuples.get(i); + @SuppressWarnings({"rawtypes"}) List tip = t.getMaps("group"); int id = (int)tip.get(0).get("id"); if(id != val) { @@ -167,7 +168,7 @@ protected boolean assertMapOrder(List tuples, int... ids) throws Exceptio protected boolean assertFields(List tuples, String ... fields) throws Exception{ for(Tuple tuple : tuples){ for(String field : fields){ - if(!tuple.fields.containsKey(field)){ + if(!tuple.getFields().containsKey(field)){ throw new Exception(String.format(Locale.ROOT, "Expected field '%s' not found", field)); } } @@ -177,7 +178,7 @@ protected boolean assertFields(List tuples, String ... fields) throws Exc protected boolean assertNotFields(List tuples, String ... fields) throws Exception{ for(Tuple tuple : tuples){ for(String field : fields){ - if(tuple.fields.containsKey(field)){ + if(tuple.getFields().containsKey(field)){ throw new Exception(String.format(Locale.ROOT, "Unexpected field '%s' found", field)); } } @@ -229,13 +230,14 @@ public boolean assertString(Tuple tuple, String fieldName, String expected) thro return true; } - protected boolean assertMaps(List maps, int... ids) throws Exception { + protected boolean assertMaps(@SuppressWarnings({"rawtypes"})List maps, int... ids) throws Exception { if(maps.size() != ids.length) { throw new Exception("Expected id count != actual map count:"+ids.length+":"+maps.size()); } int i=0; for(int val : ids) { + @SuppressWarnings({"rawtypes"}) Map t = maps.get(i); String tip = (String)t.get("id"); if(!tip.equals(Integer.toString(val))) { @@ -246,7 +248,7 @@ protected boolean assertMaps(List maps, int... ids) throws Exception { return true; } - private boolean assertList(List list, Object... vals) throws Exception { + private boolean assertList(@SuppressWarnings({"rawtypes"})List list, Object... vals) throws Exception { if(list.size() != vals.length) { throw new Exception("Lists are not the same size:"+list.size() +" : "+vals.length); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java index 52aa3780aa59..6c88ffee9c16 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java @@ -61,6 +61,8 @@ import org.apache.solr.cloud.AbstractDistribZkTestBase; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.junit.Assume; @@ -491,6 +493,7 @@ public void testReducerStream() throws Exception { TupleStream stream; List tuples; Tuple t0, t1, t2; + @SuppressWarnings({"rawtypes"}) List maps0, maps1, maps2; StreamContext streamContext = new StreamContext(); SolrClientCache solrClientCache = new SolrClientCache(); @@ -1562,14 +1565,17 @@ public void testParallelReducerStream() throws Exception { assert (tuples.size() == 3); Tuple t0 = tuples.get(0); + @SuppressWarnings({"rawtypes"}) List maps0 = t0.getMaps("group"); assertMaps(maps0, 0, 1, 2, 9); Tuple t1 = tuples.get(1); + @SuppressWarnings({"rawtypes"}) List maps1 = t1.getMaps("group"); assertMaps(maps1, 3, 5, 7, 8); Tuple t2 = tuples.get(2); + @SuppressWarnings({"rawtypes"}) List maps2 = t2.getMaps("group"); assertMaps(maps2, 4, 6); @@ -2294,7 +2300,7 @@ public void testOuterHashJoinStreamWithKnownConflict() throws Exception { tuples = getTuples(stream); assertEquals(1, tuples.size()); - assertFalse(tuples.get(0).fields.containsKey("extra_s")); + assertFalse(tuples.get(0).getFields().containsKey("extra_s")); } finally { solrClientCache.close(); @@ -3654,7 +3660,20 @@ public void testClassifyStream() throws Exception { updateRequest.add(id, String.valueOf(1), "text_s", "a b e e f"); updateRequest.commit(cluster.getSolrClient(), "uknownCollection"); - String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS; + // find a node with a replica + ClusterState clusterState = cluster.getSolrClient().getClusterStateProvider().getClusterState(); + DocCollection coll = clusterState.getCollection(COLLECTIONORALIAS); + String node = coll.getReplicas().iterator().next().getNodeName(); + String url = null; + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { + if (jetty.getNodeName().equals(node)) { + url = jetty.getBaseUrl().toString()+"/"+COLLECTIONORALIAS; + break; + } + } + if (url == null) { + fail("unable to find a node with replica"); + } TupleStream updateTrainModelStream; ModifiableSolrParams paramsLoc; @@ -3783,6 +3802,7 @@ public void testLetStream() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); Tuple tuple1 = tuples.get(0); + @SuppressWarnings({"unchecked", "rawtypes"}) List results = (List)tuple1.get("results"); assertTrue(results.size() == 2); assertTrue(results.get(0).get("id").equals("hello1")); @@ -4452,7 +4472,7 @@ protected boolean assertOrderOf(List tuples, String fieldName, int... ids protected boolean assertFields(List tuples, String ... fields) throws Exception{ for(Tuple tuple : tuples){ for(String field : fields){ - if(!tuple.fields.containsKey(field)){ + if(!tuple.getFields().containsKey(field)){ throw new Exception(String.format(Locale.ROOT, "Expected field '%s' not found", field)); } } @@ -4462,7 +4482,7 @@ protected boolean assertFields(List tuples, String ... fields) throws Exc protected boolean assertNotFields(List tuples, String ... fields) throws Exception{ for(Tuple tuple : tuples){ for(String field : fields){ - if(tuple.fields.containsKey(field)){ + if(tuple.getFields().containsKey(field)){ throw new Exception(String.format(Locale.ROOT, "Unexpected field '%s' found", field)); } } @@ -4505,13 +4525,14 @@ public boolean assertString(Tuple tuple, String fieldName, String expected) thro return true; } - protected boolean assertMaps(List maps, int... ids) throws Exception { + protected boolean assertMaps(@SuppressWarnings({"rawtypes"})List maps, int... ids) throws Exception { if(maps.size() != ids.length) { throw new Exception("Expected id count != actual map count:"+ids.length+":"+maps.size()); } int i=0; for(int val : ids) { + @SuppressWarnings({"rawtypes"}) Map t = maps.get(i); String tip = (String)t.get("id"); if(!tip.equals(Integer.toString(val))) { @@ -4522,7 +4543,7 @@ protected boolean assertMaps(List maps, int... ids) throws Exception { return true; } - private boolean assertList(List list, Object... vals) throws Exception { + private boolean assertList(@SuppressWarnings({"rawtypes"})List list, Object... vals) throws Exception { if(list.size() != vals.length) { throw new Exception("Lists are not the same size:"+list.size() +" : "+vals.length); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java index bc8b40d59229..99e3cfb9f986 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java @@ -48,6 +48,8 @@ import org.apache.solr.client.solrj.io.stream.metrics.MaxMetric; import org.apache.solr.client.solrj.io.stream.metrics.MeanMetric; import org.apache.solr.client.solrj.io.stream.metrics.MinMetric; +import org.apache.solr.client.solrj.io.stream.metrics.PercentileMetric; +import org.apache.solr.client.solrj.io.stream.metrics.StdMetric; import org.apache.solr.client.solrj.io.stream.metrics.SumMetric; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.UpdateRequest; @@ -193,7 +195,7 @@ public void testCloudSolrStream() throws Exception { List shardUrls = TupleStream.getShards(cluster.getZkServer().getZkAddress(), COLLECTIONORALIAS, streamContext); - Map> shardsMap = new HashMap(); + Map> shardsMap = new HashMap<>(); shardsMap.put("myCollection", shardUrls); StreamContext context = new StreamContext(); context.put("shards", shardsMap); @@ -672,7 +674,8 @@ public void testRandomStream() throws Exception { solrStream = new SolrStream(jetty.getBaseUrl().toString() + "/collection1", sParams); tuples4 = getTuples(solrStream); assert(tuples4.size() == 500); - Map fields = tuples4.get(0).fields; + @SuppressWarnings({"rawtypes"}) + Map fields = tuples4.get(0).getFields(); assert(fields.containsKey("id")); assert(fields.containsKey("a_f")); assert(fields.containsKey("a_i")); @@ -762,7 +765,9 @@ public void testStatsStream() throws Exception { .withFunctionName("min", MinMetric.class) .withFunctionName("max", MaxMetric.class) .withFunctionName("avg", MeanMetric.class) - .withFunctionName("count", CountMetric.class); + .withFunctionName("count", CountMetric.class) + .withFunctionName("std", StdMetric.class) + .withFunctionName("per", PercentileMetric.class); StreamExpression expression; TupleStream stream; @@ -771,7 +776,7 @@ public void testStatsStream() throws Exception { SolrClientCache cache = new SolrClientCache(); try { streamContext.setSolrClientCache(cache); - String expr = "stats(" + COLLECTIONORALIAS + ", q=*:*, sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), count(*))"; + String expr = "stats(" + COLLECTIONORALIAS + ", q=*:*, sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), std(a_i), std(a_f), per(a_i, 50), per(a_f, 50), count(*))"; expression = StreamExpressionParser.parse(expr); stream = factory.constructStream(expression); stream.setStreamContext(streamContext); @@ -792,6 +797,10 @@ public void testStatsStream() throws Exception { Double maxf = tuple.getDouble("max(a_f)"); Double avgi = tuple.getDouble("avg(a_i)"); Double avgf = tuple.getDouble("avg(a_f)"); + Double stdi = tuple.getDouble("std(a_i)"); + Double stdf = tuple.getDouble("std(a_f)"); + Double peri = tuple.getDouble("per(a_i,50)"); + Double perf = tuple.getDouble("per(a_f,50)"); Double count = tuple.getDouble("count(*)"); assertTrue(sumi.longValue() == 70); @@ -802,11 +811,16 @@ public void testStatsStream() throws Exception { assertTrue(maxf.doubleValue() == 10.0D); assertTrue(avgi.doubleValue() == 7.0D); assertTrue(avgf.doubleValue() == 5.5D); + assertTrue(stdi.doubleValue() == 5.477225575051661D); + assertTrue(stdf.doubleValue() == 3.0276503540974917D); + assertTrue(peri.doubleValue() == 7.0D); + assertTrue(perf.doubleValue() == 5.5D); assertTrue(count.doubleValue() == 10); + //Test without query - expr = "stats(" + COLLECTIONORALIAS + ", sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), count(*))"; + expr = "stats(" + COLLECTIONORALIAS + ", sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), std(a_i), std(a_f), per(a_i, 50), per(a_f, 50), count(*))"; expression = StreamExpressionParser.parse(expr); stream = factory.constructStream(expression); stream.setStreamContext(streamContext); @@ -827,6 +841,10 @@ public void testStatsStream() throws Exception { maxf = tuple.getDouble("max(a_f)"); avgi = tuple.getDouble("avg(a_i)"); avgf = tuple.getDouble("avg(a_f)"); + stdi = tuple.getDouble("std(a_i)"); + stdf = tuple.getDouble("std(a_f)"); + peri = tuple.getDouble("per(a_i,50)"); + perf = tuple.getDouble("per(a_f,50)"); count = tuple.getDouble("count(*)"); assertTrue(sumi.longValue() == 70); @@ -837,13 +855,16 @@ public void testStatsStream() throws Exception { assertTrue(maxf.doubleValue() == 10.0D); assertTrue(avgi.doubleValue() == 7.0D); assertTrue(avgf.doubleValue() == 5.5D); + assertTrue(stdi.doubleValue() == 5.477225575051661D); + assertTrue(stdf.doubleValue() == 3.0276503540974917D); + assertTrue(peri.doubleValue() == 7.0D); + assertTrue(perf.doubleValue() == 5.5D); assertTrue(count.doubleValue() == 10); - //Test with shards parameter List shardUrls = TupleStream.getShards(cluster.getZkServer().getZkAddress(), COLLECTIONORALIAS, streamContext); - expr = "stats(myCollection, q=*:*, sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), count(*))"; - Map> shardsMap = new HashMap(); + expr = "stats(myCollection, q=*:*, sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), std(a_i), std(a_f), per(a_i, 50), per(a_f, 50), count(*))"; + Map> shardsMap = new HashMap<>(); shardsMap.put("myCollection", shardUrls); StreamContext context = new StreamContext(); context.put("shards", shardsMap); @@ -867,6 +888,10 @@ public void testStatsStream() throws Exception { maxf = tuple.getDouble("max(a_f)"); avgi = tuple.getDouble("avg(a_i)"); avgf = tuple.getDouble("avg(a_f)"); + stdi = tuple.getDouble("std(a_i)"); + stdf = tuple.getDouble("std(a_f)"); + peri = tuple.getDouble("per(a_i,50)"); + perf = tuple.getDouble("per(a_f,50)"); count = tuple.getDouble("count(*)"); assertTrue(sumi.longValue() == 70); @@ -877,6 +902,10 @@ public void testStatsStream() throws Exception { assertTrue(maxf.doubleValue() == 10.0D); assertTrue(avgi.doubleValue() == 7.0D); assertTrue(avgf.doubleValue() == 5.5D); + assertTrue(stdi.doubleValue() == 5.477225575051661D); + assertTrue(stdf.doubleValue() == 3.0276503540974917D); + assertTrue(peri.doubleValue() == 7.0D); + assertTrue(perf.doubleValue() == 5.5D); assertTrue(count.doubleValue() == 10); //Execersise the /stream hander @@ -1135,6 +1164,8 @@ public void testFacetStream() throws Exception { .withFunctionName("min", MinMetric.class) .withFunctionName("max", MaxMetric.class) .withFunctionName("avg", MeanMetric.class) + .withFunctionName("std", StdMetric.class) + .withFunctionName("per", PercentileMetric.class) .withFunctionName("count", CountMetric.class); // Basic test @@ -1150,6 +1181,8 @@ public void testFacetStream() throws Exception { + "min(a_i), min(a_f), " + "max(a_i), max(a_f), " + "avg(a_i), avg(a_f), " + + "std(a_i), std(a_f)," + + "per(a_i, 50), per(a_f, 50)," + "count(*)" + ")"; @@ -1158,7 +1191,6 @@ public void testFacetStream() throws Exception { assert(tuples.size() == 3); - //Test Long and Double Sums Tuple tuple = tuples.get(0); String bucket = tuple.getString("a_s"); @@ -1170,6 +1202,12 @@ public void testFacetStream() throws Exception { Double maxf = tuple.getDouble("max(a_f)"); Double avgi = tuple.getDouble("avg(a_i)"); Double avgf = tuple.getDouble("avg(a_f)"); + Double stdi = tuple.getDouble("std(a_i)"); + Double stdf = tuple.getDouble("std(a_f)"); + Double peri = tuple.getDouble("per(a_i,50)"); + Double perf = tuple.getDouble("per(a_f,50)"); + + Double count = tuple.getDouble("count(*)"); assertTrue(bucket.equals("hello4")); @@ -1182,6 +1220,11 @@ public void testFacetStream() throws Exception { assertTrue(avgi.doubleValue() == 7.5D); assertTrue(avgf.doubleValue() == 5.5D); assertTrue(count.doubleValue() == 2); + assertTrue(stdi.doubleValue() == 4.949747468305833D); + assertTrue(stdf.doubleValue() == 2.1213203435596424D); + assertTrue(peri.doubleValue() == 7.5D); + assertTrue(perf.doubleValue() == 5.5D); + tuple = tuples.get(1); bucket = tuple.getString("a_s"); @@ -1194,6 +1237,11 @@ public void testFacetStream() throws Exception { avgi = tuple.getDouble("avg(a_i)"); avgf = tuple.getDouble("avg(a_f)"); count = tuple.getDouble("count(*)"); + stdi = tuple.getDouble("std(a_i)"); + stdf = tuple.getDouble("std(a_f)"); + peri = tuple.getDouble("per(a_i,50)"); + perf = tuple.getDouble("per(a_f,50)"); + assertTrue(bucket.equals("hello0")); assertTrue(sumi.doubleValue() == 17.0D); @@ -1205,6 +1253,11 @@ public void testFacetStream() throws Exception { assertTrue(avgi.doubleValue() == 4.25D); assertTrue(avgf.doubleValue() == 4.5D); assertTrue(count.doubleValue() == 4); + assertTrue(stdi.doubleValue() == 6.551081335677848D); + assertTrue(stdf.doubleValue() == 4.041451884327381D); + assertTrue(peri.doubleValue() == 1.5D); + assertTrue(perf.doubleValue() == 3.5D); + tuple = tuples.get(2); bucket = tuple.getString("a_s"); @@ -1217,6 +1270,15 @@ public void testFacetStream() throws Exception { avgi = tuple.getDouble("avg(a_i)"); avgf = tuple.getDouble("avg(a_f)"); count = tuple.getDouble("count(*)"); + stdi = tuple.getDouble("std(a_i)"); + stdf = tuple.getDouble("std(a_f)"); + peri = tuple.getDouble("per(a_i,50)"); + perf = tuple.getDouble("per(a_f,50)"); + + + System.out.println("STD and Per:"+stdi+":"+stdf+":"+peri+":"+perf); +//STD and Per:4.509249752822894:2.6457513110645907:11.0:7.0 + //assert(false); assertTrue(bucket.equals("hello3")); assertTrue(sumi.doubleValue() == 38.0D); @@ -1228,6 +1290,10 @@ public void testFacetStream() throws Exception { assertTrue(avgi.doubleValue() == 9.5D); assertTrue(avgf.doubleValue() == 6.5D); assertTrue(count.doubleValue() == 4); + assertTrue(stdi.doubleValue() == 4.509249752822894D); + assertTrue(stdf.doubleValue() == 2.6457513110645907D); + assertTrue(peri.doubleValue() == 11.0D); + assertTrue(perf.doubleValue() == 7.0D); //Reverse the Sort. @@ -1244,6 +1310,8 @@ public void testFacetStream() throws Exception { + "min(a_i), min(a_f), " + "max(a_i), max(a_f), " + "avg(a_i), avg(a_f), " + + "std(a_i), std(a_f)," + + "per(a_i, 50), per(a_f, 50)," + "count(*)" + ")"; @@ -1264,6 +1332,10 @@ public void testFacetStream() throws Exception { avgi = tuple.getDouble("avg(a_i)"); avgf = tuple.getDouble("avg(a_f)"); count = tuple.getDouble("count(*)"); + stdi = tuple.getDouble("std(a_i)"); + stdf = tuple.getDouble("std(a_f)"); + peri = tuple.getDouble("per(a_i,50)"); + perf = tuple.getDouble("per(a_f,50)"); assertTrue(bucket.equals("hello3")); assertTrue(sumi.doubleValue() == 38.0D); @@ -1275,6 +1347,11 @@ public void testFacetStream() throws Exception { assertTrue(avgi.doubleValue() == 9.5D); assertTrue(avgf.doubleValue() == 6.5D); assertTrue(count.doubleValue() == 4); + assertTrue(stdi.doubleValue() == 4.509249752822894D); + assertTrue(stdf.doubleValue() == 2.6457513110645907D); + assertTrue(peri.doubleValue() == 11.0D); + assertTrue(perf.doubleValue() == 7.0D); + tuple = tuples.get(1); bucket = tuple.getString("a_s"); @@ -1287,6 +1364,10 @@ public void testFacetStream() throws Exception { avgi = tuple.getDouble("avg(a_i)"); avgf = tuple.getDouble("avg(a_f)"); count = tuple.getDouble("count(*)"); + stdi = tuple.getDouble("std(a_i)"); + stdf = tuple.getDouble("std(a_f)"); + peri = tuple.getDouble("per(a_i,50)"); + perf = tuple.getDouble("per(a_f,50)"); assertTrue(bucket.equals("hello0")); assertTrue(sumi.doubleValue() == 17.0D); @@ -1298,6 +1379,10 @@ public void testFacetStream() throws Exception { assertTrue(avgi.doubleValue() == 4.25D); assertTrue(avgf.doubleValue() == 4.5D); assertTrue(count.doubleValue() == 4); + assertTrue(stdi.doubleValue() == 6.551081335677848D); + assertTrue(stdf.doubleValue() == 4.041451884327381D); + assertTrue(peri.doubleValue() == 1.5D); + assertTrue(perf.doubleValue() == 3.5D); tuple = tuples.get(2); bucket = tuple.getString("a_s"); @@ -1310,6 +1395,10 @@ public void testFacetStream() throws Exception { avgi = tuple.getDouble("avg(a_i)"); avgf = tuple.getDouble("avg(a_f)"); count = tuple.getDouble("count(*)"); + stdi = tuple.getDouble("std(a_i)"); + stdf = tuple.getDouble("std(a_f)"); + peri = tuple.getDouble("per(a_i,50)"); + perf = tuple.getDouble("per(a_f,50)"); assertTrue(bucket.equals("hello4")); assertTrue(sumi.longValue() == 15); @@ -1321,6 +1410,10 @@ public void testFacetStream() throws Exception { assertTrue(avgi.doubleValue() == 7.5D); assertTrue(avgf.doubleValue() == 5.5D); assertTrue(count.doubleValue() == 2); + assertTrue(stdi.doubleValue() == 4.949747468305833D); + assertTrue(stdf.doubleValue() == 2.1213203435596424D); + assertTrue(peri.doubleValue() == 7.5D); + assertTrue(perf.doubleValue() == 5.5D); clause = "facet(" @@ -1477,6 +1570,8 @@ public void testFacetStream() throws Exception { + "min(a_i), min(a_f), " + "max(a_i), max(a_f), " + "avg(a_i), avg(a_f), " + + "std(a_i), std(a_f)," + + "per(a_i, 50), per(a_f, 50)," + "count(*)" + ")"; @@ -1497,7 +1592,10 @@ public void testFacetStream() throws Exception { avgi = tuple.getDouble("avg(a_i)"); avgf = tuple.getDouble("avg(a_f)"); count = tuple.getDouble("count(*)"); - + stdi = tuple.getDouble("std(a_i)"); + stdf = tuple.getDouble("std(a_f)"); + peri = tuple.getDouble("per(a_i,50)"); + perf = tuple.getDouble("per(a_f,50)"); assertTrue(bucket.equals("hello4")); assertTrue(sumi.longValue() == 15); @@ -1509,7 +1607,10 @@ public void testFacetStream() throws Exception { assertTrue(avgi.doubleValue() == 7.5D); assertTrue(avgf.doubleValue() == 5.5D); assertTrue(count.doubleValue() == 2); - + assertTrue(stdi.doubleValue() == 4.949747468305833D); + assertTrue(stdf.doubleValue() == 2.1213203435596424D); + assertTrue(peri.doubleValue() == 7.5D); + assertTrue(perf.doubleValue() == 5.5D); tuple = tuples.get(1); bucket = tuple.getString("a_s"); @@ -1522,6 +1623,10 @@ public void testFacetStream() throws Exception { avgi = tuple.getDouble("avg(a_i)"); avgf = tuple.getDouble("avg(a_f)"); count = tuple.getDouble("count(*)"); + stdi = tuple.getDouble("std(a_i)"); + stdf = tuple.getDouble("std(a_f)"); + peri = tuple.getDouble("per(a_i,50)"); + perf = tuple.getDouble("per(a_f,50)"); assertTrue(bucket.equals("hello3")); assertTrue(sumi.doubleValue() == 38.0D); @@ -1533,6 +1638,10 @@ public void testFacetStream() throws Exception { assertTrue(avgi.doubleValue() == 9.5D); assertTrue(avgf.doubleValue() == 6.5D); assertTrue(count.doubleValue() == 4); + assertTrue(stdi.doubleValue() == 4.509249752822894D); + assertTrue(stdf.doubleValue() == 2.6457513110645907D); + assertTrue(peri.doubleValue() == 11.0D); + assertTrue(perf.doubleValue() == 7.0D); tuple = tuples.get(2); @@ -1546,6 +1655,10 @@ public void testFacetStream() throws Exception { avgi = tuple.getDouble("avg(a_i)"); avgf = tuple.getDouble("avg(a_f)"); count = tuple.getDouble("count(*)"); + stdi = tuple.getDouble("std(a_i)"); + stdf = tuple.getDouble("std(a_f)"); + peri = tuple.getDouble("per(a_i,50)"); + perf = tuple.getDouble("per(a_f,50)"); assertTrue(bucket.equals("hello0")); assertTrue(sumi.doubleValue() == 17.0D); @@ -1557,6 +1670,11 @@ public void testFacetStream() throws Exception { assertTrue(avgi.doubleValue() == 4.25D); assertTrue(avgf.doubleValue() == 4.5D); assertTrue(count.doubleValue() == 4); + assertTrue(stdi.doubleValue() == 6.551081335677848D); + assertTrue(stdf.doubleValue() == 4.041451884327381D); + assertTrue(peri.doubleValue() == 1.5D); + assertTrue(perf.doubleValue() == 3.5D); + //Test index sort @@ -1810,14 +1928,16 @@ public void testSubFacetStream() throws Exception { .withFunctionName("min", MinMetric.class) .withFunctionName("max", MaxMetric.class) .withFunctionName("avg", MeanMetric.class) - .withFunctionName("count", CountMetric.class); + .withFunctionName("count", CountMetric.class) + .withFunctionName("std", StdMetric.class) + .withFunctionName("per", PercentileMetric.class); // Basic test clause = "facet(" + "collection1, " + "q=\"*:*\", " + "buckets=\"level1_s, level2_s\", " - + "bucketSorts=\"sum(a_i) desc, sum(a_i) desc)\", " + + "bucketSorts=\"sum(a_i) desc, sum(a_i) desc\", " + "bucketSizeLimit=100, " + "sum(a_i), count(*)" + ")"; @@ -1897,7 +2017,7 @@ public void testSubFacetStream() throws Exception { + "collection1, " + "q=\"*:*\", " + "buckets=\"level1_s, level2_s\", " - + "bucketSorts=\"level1_s desc, level2_s desc)\", " + + "bucketSorts=\"level1_s desc, level2_s desc\", " + "bucketSizeLimit=100, " + "sum(a_i), count(*)" + ")"; @@ -1972,6 +2092,89 @@ public void testSubFacetStream() throws Exception { assertTrue(bucket2.equals("a")); assertTrue(sumi.longValue() == 2); assertTrue(count.doubleValue() == 2); + + //Add sorts for percentile + + clause = "facet(" + + "collection1, " + + "q=\"*:*\", " + + "buckets=\"level1_s, level2_s\", " + + "bucketSorts=\"per(a_i, 50) desc, std(a_i) desc\", " + + "bucketSizeLimit=100, " + + "std(a_i), per(a_i,50)" + + ")"; + + stream = factory.constructStream(clause); + tuples = getTuples(stream); + + assert(tuples.size() == 6); + + tuple = tuples.get(0); + bucket1 = tuple.getString("level1_s"); + bucket2 = tuple.getString("level2_s"); + double stdi = tuple.getDouble("std(a_i)"); + double peri = tuple.getDouble("per(a_i,50)"); + + assertTrue(bucket1.equals("hello3")); + assertTrue(bucket2.equals("b")); + assertTrue(stdi == 1.5275252316519468D); + assertTrue(peri == 12.0D); + + tuple = tuples.get(1); + bucket1 = tuple.getString("level1_s"); + bucket2 = tuple.getString("level2_s"); + stdi = tuple.getDouble("std(a_i)"); + peri = tuple.getDouble("per(a_i,50)"); + + assertTrue(bucket1.equals("hello4")); + assertTrue(bucket2.equals("b")); + assertTrue(stdi == 0.0D); + assertTrue(peri == 11.0); + + tuple = tuples.get(2); + bucket1 = tuple.getString("level1_s"); + bucket2 = tuple.getString("level2_s"); + stdi = tuple.getDouble("std(a_i)"); + peri = tuple.getDouble("per(a_i,50)"); + + assertTrue(bucket1.equals("hello0")); + assertTrue(bucket2.equals("b")); + assertTrue(stdi == 9.192388155425117D); + assertTrue(peri == 7.5D); + + tuple = tuples.get(3); + bucket1 = tuple.getString("level1_s"); + bucket2 = tuple.getString("level2_s"); + stdi = tuple.getDouble("std(a_i)"); + peri = tuple.getDouble("per(a_i,50)"); + + assertTrue(bucket1.equals("hello4")); + assertTrue(bucket2.equals("a")); + assertTrue(stdi == 0.0D); + assertTrue(peri == 4.0D); + + tuple = tuples.get(4); + bucket1 = tuple.getString("level1_s"); + bucket2 = tuple.getString("level2_s"); + stdi = tuple.getDouble("std(a_i)"); + peri = tuple.getDouble("per(a_i,50)"); + + assertTrue(bucket1.equals("hello3")); + assertTrue(bucket2.equals("a")); + assertTrue(stdi == 0.0D); + assertTrue(peri == 3.0D); + + tuple = tuples.get(5); + bucket1 = tuple.getString("level1_s"); + bucket2 = tuple.getString("level2_s"); + stdi = tuple.getDouble("std(a_i)"); + peri = tuple.getDouble("per(a_i,50)"); + + assertTrue(bucket1.equals("hello0")); + assertTrue(bucket2.equals("a")); + assertTrue(stdi == 1.4142135623730951D); + assertTrue(peri == 1.0D); + } @Test @@ -2447,7 +2650,7 @@ public void testTimeSeriesStream() throws Exception { "end=\"2017-12-01T01:00:00.000Z\", " + "gap=\"+1YEAR\", " + "field=\"test_dt\", " + - "count(*), sum(price_f), max(price_f), min(price_f))"; + "count(*), sum(price_f), max(price_f), min(price_f), avg(price_f), std(price_f), per(price_f, 50))"; ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); paramsLoc.set("expr", expr); paramsLoc.set("qt", "/stream"); @@ -2465,38 +2668,52 @@ public void testTimeSeriesStream() throws Exception { assertTrue(tuples.get(0).getDouble("sum(price_f)").equals(10000D)); assertTrue(tuples.get(0).getDouble("max(price_f)").equals(100D)); assertTrue(tuples.get(0).getDouble("min(price_f)").equals(100D)); + assertTrue(tuples.get(0).getDouble("avg(price_f)").equals(100D)); + assertTrue(tuples.get(0).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(0).getDouble("per(price_f,50)").equals(100D)); assertTrue(tuples.get(1).get("test_dt").equals("2014-01-01T01:00:00Z")); assertTrue(tuples.get(1).getLong("count(*)").equals(50L)); assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(25000D)); assertTrue(tuples.get(1).getDouble("max(price_f)").equals(500D)); assertTrue(tuples.get(1).getDouble("min(price_f)").equals(500D)); + assertTrue(tuples.get(1).getDouble("avg(price_f)").equals(500D)); + assertTrue(tuples.get(1).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(1).getDouble("per(price_f,50)").equals(500D)); assertTrue(tuples.get(2).get("test_dt").equals("2015-01-01T01:00:00Z")); assertTrue(tuples.get(2).getLong("count(*)").equals(50L)); assertTrue(tuples.get(2).getDouble("sum(price_f)").equals(15000D)); assertTrue(tuples.get(2).getDouble("max(price_f)").equals(300D)); assertTrue(tuples.get(2).getDouble("min(price_f)").equals(300D)); + assertTrue(tuples.get(2).getDouble("avg(price_f)").equals(300D)); + assertTrue(tuples.get(2).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(2).getDouble("per(price_f,50)").equals(300D)); assertTrue(tuples.get(3).get("test_dt").equals("2016-01-01T01:00:00Z")); assertTrue(tuples.get(3).getLong("count(*)").equals(50L)); assertTrue(tuples.get(3).getDouble("sum(price_f)").equals(20000D)); assertTrue(tuples.get(3).getDouble("max(price_f)").equals(400D)); assertTrue(tuples.get(3).getDouble("min(price_f)").equals(400D)); + assertTrue(tuples.get(3).getDouble("avg(price_f)").equals(400D)); + assertTrue(tuples.get(3).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(3).getDouble("per(price_f,50)").equals(400D)); assertTrue(tuples.get(4).get("test_dt").equals("2017-01-01T01:00:00Z")); assertEquals((long)tuples.get(4).getLong("count(*)"), 0L); assertEquals(tuples.get(4).getDouble("sum(price_f)"), 0D, 0); assertEquals(tuples.get(4).getDouble("max(price_f)"),0D, 0); assertEquals(tuples.get(4).getDouble("min(price_f)"), 0D, 0); - + assertTrue(tuples.get(4).getDouble("avg(price_f)").equals(0D)); + assertTrue(tuples.get(4).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(4).getDouble("per(price_f,50)").equals(0D)); expr = "timeseries("+COLLECTIONORALIAS+", q=\"*:*\", start=\"2013-01-01T01:00:00.000Z\", " + "end=\"2016-12-01T01:00:00.000Z\", " + "gap=\"+1YEAR\", " + "field=\"test_dt\", " + "format=\"yyyy\", " + - "count(*), sum(price_f), max(price_f), min(price_f))"; + "count(*), sum(price_f), max(price_f), min(price_f), avg(price_f), std(price_f), per(price_f, 50))"; paramsLoc = new ModifiableSolrParams(); paramsLoc.set("expr", expr); paramsLoc.set("qt", "/stream"); @@ -2512,31 +2729,45 @@ public void testTimeSeriesStream() throws Exception { assertTrue(tuples.get(0).getDouble("sum(price_f)").equals(10000D)); assertTrue(tuples.get(0).getDouble("max(price_f)").equals(100D)); assertTrue(tuples.get(0).getDouble("min(price_f)").equals(100D)); + assertTrue(tuples.get(0).getDouble("avg(price_f)").equals(100D)); + assertTrue(tuples.get(0).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(0).getDouble("per(price_f,50)").equals(100D)); assertTrue(tuples.get(1).get("test_dt").equals("2014")); assertTrue(tuples.get(1).getLong("count(*)").equals(50L)); assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(25000D)); assertTrue(tuples.get(1).getDouble("max(price_f)").equals(500D)); assertTrue(tuples.get(1).getDouble("min(price_f)").equals(500D)); + assertTrue(tuples.get(1).getDouble("avg(price_f)").equals(500D)); + assertTrue(tuples.get(1).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(1).getDouble("per(price_f,50)").equals(500D)); + assertTrue(tuples.get(2).get("test_dt").equals("2015")); assertTrue(tuples.get(2).getLong("count(*)").equals(50L)); assertTrue(tuples.get(2).getDouble("sum(price_f)").equals(15000D)); assertTrue(tuples.get(2).getDouble("max(price_f)").equals(300D)); assertTrue(tuples.get(2).getDouble("min(price_f)").equals(300D)); + assertTrue(tuples.get(2).getDouble("avg(price_f)").equals(300D)); + assertTrue(tuples.get(2).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(2).getDouble("per(price_f,50)").equals(300D)); assertTrue(tuples.get(3).get("test_dt").equals("2016")); assertTrue(tuples.get(3).getLong("count(*)").equals(50L)); assertTrue(tuples.get(3).getDouble("sum(price_f)").equals(20000D)); assertTrue(tuples.get(3).getDouble("max(price_f)").equals(400D)); assertTrue(tuples.get(3).getDouble("min(price_f)").equals(400D)); + assertTrue(tuples.get(3).getDouble("avg(price_f)").equals(400D)); + assertTrue(tuples.get(3).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(3).getDouble("per(price_f,50)").equals(400D)); + expr = "timeseries("+COLLECTIONORALIAS+", q=\"*:*\", start=\"2013-01-01T01:00:00.000Z\", " + "end=\"2016-12-01T01:00:00.000Z\", " + "gap=\"+1YEAR\", " + "field=\"test_dt\", " + "format=\"yyyy-MM\", " + - "count(*), sum(price_f), max(price_f), min(price_f))"; + "count(*), sum(price_f), max(price_f), min(price_f), avg(price_f), std(price_f), per(price_f, 50))"; paramsLoc = new ModifiableSolrParams(); paramsLoc.set("expr", expr); paramsLoc.set("qt", "/stream"); @@ -2552,24 +2783,36 @@ public void testTimeSeriesStream() throws Exception { assertTrue(tuples.get(0).getDouble("sum(price_f)").equals(10000D)); assertTrue(tuples.get(0).getDouble("max(price_f)").equals(100D)); assertTrue(tuples.get(0).getDouble("min(price_f)").equals(100D)); + assertTrue(tuples.get(0).getDouble("avg(price_f)").equals(100D)); + assertTrue(tuples.get(0).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(0).getDouble("per(price_f,50)").equals(100D)); assertTrue(tuples.get(1).get("test_dt").equals("2014-01")); assertTrue(tuples.get(1).getLong("count(*)").equals(50L)); assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(25000D)); assertTrue(tuples.get(1).getDouble("max(price_f)").equals(500D)); assertTrue(tuples.get(1).getDouble("min(price_f)").equals(500D)); + assertTrue(tuples.get(1).getDouble("avg(price_f)").equals(500D)); + assertTrue(tuples.get(1).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(1).getDouble("per(price_f,50)").equals(500D)); assertTrue(tuples.get(2).get("test_dt").equals("2015-01")); assertTrue(tuples.get(2).getLong("count(*)").equals(50L)); assertTrue(tuples.get(2).getDouble("sum(price_f)").equals(15000D)); assertTrue(tuples.get(2).getDouble("max(price_f)").equals(300D)); assertTrue(tuples.get(2).getDouble("min(price_f)").equals(300D)); + assertTrue(tuples.get(2).getDouble("avg(price_f)").equals(300D)); + assertTrue(tuples.get(2).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(2).getDouble("per(price_f,50)").equals(300D)); assertTrue(tuples.get(3).get("test_dt").equals("2016-01")); assertTrue(tuples.get(3).getLong("count(*)").equals(50L)); assertTrue(tuples.get(3).getDouble("sum(price_f)").equals(20000D)); assertTrue(tuples.get(3).getDouble("max(price_f)").equals(400D)); assertTrue(tuples.get(3).getDouble("min(price_f)").equals(400D)); + assertTrue(tuples.get(3).getDouble("avg(price_f)").equals(400D)); + assertTrue(tuples.get(3).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(3).getDouble("per(price_f,50)").equals(400D)); expr = "timeseries("+COLLECTIONORALIAS+", q=\"*:*\", start=\"2012-01-01T01:00:00.000Z\", " + @@ -2577,7 +2820,7 @@ public void testTimeSeriesStream() throws Exception { "gap=\"+1YEAR\", " + "field=\"test_dt\", " + "format=\"yyyy-MM\", " + - "count(*), sum(price_f), max(price_f), min(price_f))"; + "count(*), sum(price_f), max(price_f), min(price_f), avg(price_f), std(price_f), per(price_f, 50))"; paramsLoc = new ModifiableSolrParams(); paramsLoc.set("expr", expr); paramsLoc.set("qt", "/stream"); @@ -2592,30 +2835,45 @@ public void testTimeSeriesStream() throws Exception { assertTrue(tuples.get(0).getDouble("sum(price_f)") == 0); assertTrue(tuples.get(0).getDouble("max(price_f)") == 0); assertTrue(tuples.get(0).getDouble("min(price_f)") == 0); + assertTrue(tuples.get(0).getDouble("avg(price_f)").equals(0D)); + assertTrue(tuples.get(0).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(0).getDouble("per(price_f,50)").equals(0D)); assertTrue(tuples.get(1).get("test_dt").equals("2013-01")); assertTrue(tuples.get(1).getLong("count(*)").equals(100L)); assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(10000D)); assertTrue(tuples.get(1).getDouble("max(price_f)").equals(100D)); assertTrue(tuples.get(1).getDouble("min(price_f)").equals(100D)); + assertTrue(tuples.get(1).getDouble("avg(price_f)").equals(100D)); + assertTrue(tuples.get(1).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(1).getDouble("per(price_f,50)").equals(100D)); assertTrue(tuples.get(2).get("test_dt").equals("2014-01")); assertTrue(tuples.get(2).getLong("count(*)").equals(50L)); assertTrue(tuples.get(2).getDouble("sum(price_f)").equals(25000D)); assertTrue(tuples.get(2).getDouble("max(price_f)").equals(500D)); assertTrue(tuples.get(2).getDouble("min(price_f)").equals(500D)); + assertTrue(tuples.get(2).getDouble("avg(price_f)").equals(500D)); + assertTrue(tuples.get(2).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(2).getDouble("per(price_f,50)").equals(500D)); assertTrue(tuples.get(3).get("test_dt").equals("2015-01")); assertTrue(tuples.get(3).getLong("count(*)").equals(50L)); assertTrue(tuples.get(3).getDouble("sum(price_f)").equals(15000D)); assertTrue(tuples.get(3).getDouble("max(price_f)").equals(300D)); assertTrue(tuples.get(3).getDouble("min(price_f)").equals(300D)); + assertTrue(tuples.get(3).getDouble("avg(price_f)").equals(300D)); + assertTrue(tuples.get(3).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(3).getDouble("per(price_f,50)").equals(300D)); assertTrue(tuples.get(4).get("test_dt").equals("2016-01")); assertTrue(tuples.get(4).getLong("count(*)").equals(50L)); assertTrue(tuples.get(4).getDouble("sum(price_f)").equals(20000D)); assertTrue(tuples.get(4).getDouble("max(price_f)").equals(400D)); assertTrue(tuples.get(4).getDouble("min(price_f)").equals(400D)); + assertTrue(tuples.get(4).getDouble("avg(price_f)").equals(400D)); + assertTrue(tuples.get(4).getDouble("std(price_f)").equals(0D)); + assertTrue(tuples.get(4).getDouble("per(price_f,50)").equals(400D)); } @Test @@ -2640,6 +2898,7 @@ public void testTupleStream() throws Exception { solrStream.setStreamContext(context); List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); + @SuppressWarnings({"unchecked", "rawtypes"}) List results = (List)tuples.get(0).get("results"); assertTrue(results.get(0).get("id").equals("hello1")); assertTrue(results.get(0).get("test_t").equals("l b c d c")); @@ -2771,7 +3030,7 @@ public void testBasicTextLogitStream() throws Exception { tuples = getTuples(stream); assertEquals(100, tuples.size()); Tuple lastModel = tuples.get(0); - ClassificationEvaluation evaluation = ClassificationEvaluation.create(lastModel.fields); + ClassificationEvaluation evaluation = ClassificationEvaluation.create(lastModel.getFields()); assertTrue(evaluation.getF1() >= 1.0); assertEquals(Math.log(5000.0 / (2500 + 1)), lastModel.getDoubles("idfs_ds").get(0), 0.0001); // make sure the tuples is retrieved in correct order @@ -3018,7 +3277,7 @@ public void testSignificantTermsStream() throws Exception { //Test with shards parameter List shardUrls = TupleStream.getShards(cluster.getZkServer().getZkAddress(), COLLECTIONORALIAS, streamContext); - Map> shardsMap = new HashMap(); + Map> shardsMap = new HashMap<>(); shardsMap.put("myCollection", shardUrls); StreamContext context = new StreamContext(); context.put("shards", shardsMap); @@ -3406,13 +3665,14 @@ public boolean assertDouble(Tuple tuple, String fieldName, double d) throws Exce return true; } - protected boolean assertMaps(List maps, int... ids) throws Exception { + protected boolean assertMaps(@SuppressWarnings({"rawtypes"})List maps, int... ids) throws Exception { if(maps.size() != ids.length) { throw new Exception("Expected id count != actual map count:"+ids.length+":"+maps.size()); } int i=0; for(int val : ids) { + @SuppressWarnings({"rawtypes"}) Map t = maps.get(i); String tip = (String)t.get("id"); if(!tip.equals(Integer.toString(val))) { @@ -3426,7 +3686,7 @@ protected boolean assertMaps(List maps, int... ids) throws Exception { private void assertTopicRun(TupleStream stream, String... idArray) throws Exception { long version = -1; int count = 0; - List ids = new ArrayList(); + List ids = new ArrayList<>(); for(String id : idArray) { ids.add(id); } @@ -3462,7 +3722,7 @@ private void assertTopicRun(TupleStream stream, String... idArray) throws Except private void assertTopicSubject(TupleStream stream, String... textArray) throws Exception { long version = -1; int count = 0; - List texts = new ArrayList(); + List texts = new ArrayList<>(); for(String text : textArray) { texts.add(text); } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java index 5769a9ff68ab..0c3c6093e7bb 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java @@ -422,14 +422,17 @@ public void testReducerStream() throws Exception { assertEquals(3, tuples.size()); Tuple t0 = tuples.get(0); + @SuppressWarnings({"rawtypes"}) List maps0 = t0.getMaps("group"); assertMaps(maps0, 0, 2, 1, 9); Tuple t1 = tuples.get(1); + @SuppressWarnings({"rawtypes"}) List maps1 = t1.getMaps("group"); assertMaps(maps1, 3, 5, 7, 8); Tuple t2 = tuples.get(2); + @SuppressWarnings({"rawtypes"}) List maps2 = t2.getMaps("group"); assertMaps(maps2, 4, 6); @@ -532,14 +535,17 @@ public void testParallelReducerStream() throws Exception { assertEquals(3, tuples.size()); Tuple t0 = tuples.get(0); + @SuppressWarnings({"rawtypes"}) List maps0 = t0.getMaps("group"); assertMaps(maps0, 9, 1, 2, 0); Tuple t1 = tuples.get(1); + @SuppressWarnings({"rawtypes"}) List maps1 = t1.getMaps("group"); assertMaps(maps1, 8, 7, 5, 3); Tuple t2 = tuples.get(2); + @SuppressWarnings({"rawtypes"}) List maps2 = t2.getMaps("group"); assertMaps(maps2, 6, 4); @@ -2323,6 +2329,7 @@ public void streamTests() throws Exception { * streaming expression to only consider data found on the local node. */ @Test + @SuppressWarnings({"unchecked"}) public void streamLocalTests() throws Exception { new UpdateRequest() @@ -2611,7 +2618,7 @@ public void testTupleStreamSorting(StreamContext streamContext, SolrParams solrP protected List getTuples(TupleStream tupleStream) throws IOException { tupleStream.open(); - List tuples = new ArrayList(); + List tuples = new ArrayList<>(); for(;;) { Tuple t = tupleStream.read(); if(t.EOF) { @@ -2647,9 +2654,11 @@ protected boolean assertOrder(List tuples, int... ids) throws Exception { } protected boolean assertGroupOrder(Tuple tuple, int... ids) throws Exception { + @SuppressWarnings({"rawtypes"}) List group = (List)tuple.get("tuples"); int i=0; for(int val : ids) { + @SuppressWarnings({"rawtypes"}) Map t = (Map)group.get(i); Long tip = (Long)t.get("id"); if(tip.intValue() != val) { @@ -2660,13 +2669,14 @@ protected boolean assertGroupOrder(Tuple tuple, int... ids) throws Exception { return true; } - protected boolean assertMaps(List maps, int... ids) throws Exception { + protected boolean assertMaps(@SuppressWarnings({"rawtypes"})List maps, int... ids) throws Exception { if(maps.size() != ids.length) { throw new Exception("Expected id count != actual map count:"+ids.length+":"+maps.size()); } int i=0; for(int val : ids) { + @SuppressWarnings({"rawtypes"}) Map t = maps.get(i); String tip = (String)t.get("id"); if(!tip.equals(Integer.toString(val))) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AscEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AscEvaluatorTest.java index 8029712a4b1f..fb0a35a88052 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AscEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AscEvaluatorTest.java @@ -44,6 +44,7 @@ public AscEvaluatorTest() { } @Test + @SuppressWarnings({"unchecked"}) public void integerSortTest() throws Exception{ StreamEvaluator evaluator = factory.constructEvaluator("asc(a)"); Object result; @@ -57,6 +58,7 @@ public void integerSortTest() throws Exception{ } @Test + @SuppressWarnings({"unchecked"}) public void doubleSortTest() throws Exception{ StreamEvaluator evaluator = factory.constructEvaluator("asc(a)"); Object result; @@ -70,6 +72,7 @@ public void doubleSortTest() throws Exception{ } @Test + @SuppressWarnings({"unchecked"}) public void doubleWithIntegersSortTest() throws Exception{ StreamEvaluator evaluator = factory.constructEvaluator("asc(a)"); Object result; @@ -83,6 +86,7 @@ public void doubleWithIntegersSortTest() throws Exception{ } @Test + @SuppressWarnings({"unchecked"}) public void stringSortTest() throws Exception{ StreamEvaluator evaluator = factory.constructEvaluator("asc(a)"); Object result; @@ -98,7 +102,9 @@ public void stringSortTest() throws Exception{ private void checkOrder(List expected, List actual){ Assert.assertEquals(expected.size(), actual.size()); for(int idx = 0; idx < expected.size(); ++idx){ + @SuppressWarnings({"unchecked"}) Comparable expectedValue = (Comparable)expected.get(idx); + @SuppressWarnings({"unchecked"}) Comparable actualValue = (Comparable)actual.get(idx); Assert.assertEquals(0, expectedValue.compareTo(actualValue)); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java index 2194b8f2a948..4997902f82eb 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java @@ -42,6 +42,7 @@ public class ConversionEvaluatorsTest { StreamFactory factory; Map values; + @SuppressWarnings({"unchecked"}) public ConversionEvaluatorsTest() { super(); @@ -69,6 +70,7 @@ public void testInvalidExpression() throws Exception { evaluator = factory.constructEvaluator("convert(inches, yards, 3)"); StreamContext streamContext = new StreamContext(); evaluator.setStreamContext(streamContext); + @SuppressWarnings({"rawtypes"}) Tuple tuple = new Tuple(new HashMap()); evaluator.evaluate(tuple); assertTrue(false); @@ -79,41 +81,41 @@ public void testInvalidExpression() throws Exception { @Test public void testInches() throws Exception { - testFunction("convert(inches, centimeters, 2)", (double)(2*2.54)); - testFunction("convert(inches, meters, 2)", (double)(2*0.0254)); - testFunction("convert(inches, millimeters, 2)", (double)(2*25.40)); + testFunction("convert(inches, centimeters, 2)", (2*2.54)); + testFunction("convert(inches, meters, 2)", (2*0.0254)); + testFunction("convert(inches, millimeters, 2)", (2*25.40)); } @Test public void testYards() throws Exception { - testFunction("convert(yards, meters, 2)", (double)(2*.91)); - testFunction("convert(yards, kilometers, 2)", (double)(2*.00091)); + testFunction("convert(yards, meters, 2)", (2*.91)); + testFunction("convert(yards, kilometers, 2)", (2*.00091)); } @Test public void testMiles() throws Exception { - testFunction("convert(miles, kilometers, 2)", (double)(2*1.61)); + testFunction("convert(miles, kilometers, 2)", (2*1.61)); } @Test public void testMillimeters() throws Exception { - testFunction("convert(millimeters, inches, 2)", (double)(2*.039)); + testFunction("convert(millimeters, inches, 2)", (2*.039)); } @Test public void testCentimeters() throws Exception { - testFunction("convert(centimeters, inches, 2)", (double)(2*.39)); + testFunction("convert(centimeters, inches, 2)", (2*.39)); } @Test public void testMeters() throws Exception { - testFunction("convert(meters, feet, 2)", (double)(2*3.28)); + testFunction("convert(meters, feet, 2)", (2*3.28)); } @Test public void testKiloMeters() throws Exception { - testFunction("convert(kilometers, feet, 2)", (double)(2*3280.8)); - testFunction("convert(kilometers, miles, 2)", (double)(2*.62)); + testFunction("convert(kilometers, feet, 2)", (2*3280.8)); + testFunction("convert(kilometers, miles, 2)", (2*.62)); } public void testFunction(String expression, Number expected) throws Exception { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ReverseEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ReverseEvaluatorTest.java index 66f45a78f21b..e70a86595e96 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ReverseEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ReverseEvaluatorTest.java @@ -47,6 +47,7 @@ public void test() throws IOException { values.clear(); values.put("l1", l1); + @SuppressWarnings({"rawtypes"}) List result = ((List)factory.constructEvaluator("reverse(l1)").evaluate(new Tuple(values))); Assert.assertEquals(4.5, result.get(0)); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java index fbf99ab0d8ef..38b912cc4b43 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java @@ -63,6 +63,7 @@ public class TemporalEvaluatorsTest { StreamFactory factory; Map values; + @SuppressWarnings({"unchecked"}) public TemporalEvaluatorsTest() { super(); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/ops/ConcatOperationTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/ops/ConcatOperationTest.java index bbc6755ccaa0..d6d9093aea19 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/ops/ConcatOperationTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/ops/ConcatOperationTest.java @@ -37,6 +37,7 @@ public class ConcatOperationTest extends SolrTestCase { StreamFactory factory; Map values; + @SuppressWarnings({"unchecked"}) public ConcatOperationTest() { super(); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/ops/OperationsTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/ops/OperationsTest.java index 91129b7019eb..22a0bc1f4c76 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/ops/OperationsTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/ops/OperationsTest.java @@ -37,6 +37,7 @@ public class OperationsTest extends SolrTestCase { StreamFactory factory; Map values; + @SuppressWarnings({"unchecked"}) public OperationsTest() { super(); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java index 98cc023418e2..fcb10d6c75fb 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java @@ -63,9 +63,11 @@ private static void assertValidSchemaResponse(SolrResponseBase schemaResponse) { private static void assertFailedSchemaResponse(ThrowingRunnable runnable, String expectedErrorMessage) { BaseHttpSolrClient.RemoteExecutionException e = expectThrows(BaseHttpSolrClient.RemoteExecutionException.class, runnable); + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap errorMap = (SimpleOrderedMap)e.getMetaData().get("error"); assertEquals("org.apache.solr.api.ApiBag$ExceptionWithErrObject", ((NamedList)errorMap.get("metadata")).get("error-class")); + @SuppressWarnings({"rawtypes"}) List details = (List)errorMap.get("details"); assertTrue(((List)((Map)details.get(0)).get("errorMessages")).get(0).toString().contains(expectedErrorMessage)); } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestConfigSetAdminRequest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestConfigSetAdminRequest.java index 345a16563c68..5c78159481b4 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestConfigSetAdminRequest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestConfigSetAdminRequest.java @@ -28,6 +28,7 @@ public class TestConfigSetAdminRequest extends SolrTestCaseJ4 { @Test public void testNoAction() { + @SuppressWarnings({"rawtypes"}) ConfigSetAdminRequest request = new MyConfigSetAdminRequest(); verifyException(request, "action"); } @@ -46,7 +47,7 @@ public void testDelete() { verifyException(delete, "ConfigSet"); } - private void verifyException(ConfigSetAdminRequest request, String errorContains) { + private void verifyException(@SuppressWarnings({"rawtypes"})ConfigSetAdminRequest request, String errorContains) { Exception e = expectThrows(Exception.class, request::getParams); assertTrue("Expected exception message to contain: " + errorContains, e.getMessage().contains(errorContains)); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java index 44247a700310..9038f0b7647e 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java @@ -90,8 +90,8 @@ public void testConfigSet() throws Exception { SolrClient client = getSolrAdmin(); File testDir = createTempDir(LuceneTestCase.getTestClass().getSimpleName()).toFile(); - File newCoreInstanceDir = new File(testDir, "newcore"); + cores.getAllowPaths().add(testDir.toPath()); // Allow the test dir CoreAdminRequest.Create req = new CoreAdminRequest.Create(); req.setCoreName("corewithconfigset"); @@ -115,6 +115,8 @@ public void testCustomUlogDir() throws Exception { File dataDir = createTempDir("data").toFile(); File newCoreInstanceDir = createTempDir("instance").toFile(); + cores.getAllowPaths().add(dataDir.toPath()); // Allow the test dir + cores.getAllowPaths().add(newCoreInstanceDir.toPath()); // Allow the test dir File instanceDir = new File(cores.getSolrHome()); FileUtils.copyDirectory(instanceDir, new File(newCoreInstanceDir, diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java index e5f2cce1c9f5..a44a668a94ec 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java @@ -157,7 +157,9 @@ public void testIteratable() throws IOException { } //this format accepts a 1:1 mapping of the json format and javabin format + @SuppressWarnings({"unchecked"}) public void testStreamableInputDocFormat() throws IOException { + @SuppressWarnings({"rawtypes"}) Map m = Utils.makeMap("id","1","desc" ,"The desc 1"); m.put(CHILDDOC, (MapWriter) ew -> { ew.put("id","1.1"); @@ -175,13 +177,14 @@ public void testStreamableInputDocFormat() throws IOException { ew.put("des", "The desc 2"); }; + @SuppressWarnings({"rawtypes"}) List l = new ArrayList(); l.add(m); l.add(m2); ByteArrayOutputStream baos = new ByteArrayOutputStream(); new JavaBinCodec().marshal(l.iterator(), baos); - List l2 = new ArrayList(); + List l2 = new ArrayList<>(); new JavaBinUpdateRequestCodec().unmarshal(new ByteArrayInputStream(baos.toByteArray()), (document, req, commitWithin, override) -> l2.add(document)); @@ -261,6 +264,7 @@ public void testBackCompat4_5() throws IOException { } + @SuppressWarnings({"unchecked", "rawtypes"}) private void compareDocs(String m, SolrInputDocument expectedDoc, SolrInputDocument actualDoc) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestV2Request.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestV2Request.java index b05b889b9778..c663e9955f6b 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestV2Request.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestV2Request.java @@ -53,6 +53,7 @@ public void testApiPathAvailability() throws Exception { .forceV2(true) .withMethod(SolrRequest.METHOD.GET).build() .process(cluster.getSolrClient()); + @SuppressWarnings({"rawtypes"}) List l = (List) rsp._get("nodes",null); assertNotNull(l); assertFalse(l.isEmpty()); @@ -119,9 +120,10 @@ private void doTest(SolrClient client) throws IOException, SolrServerException { assertSuccess(client, new V2Request.Builder("/c/test").withMethod(SolrRequest.METHOD.DELETE).build()); NamedList res = client.request(new V2Request.Builder("/c").build()); - List collections = (List) res.get("collections"); + // TODO: this is not guaranteed now - beast test if you try to fix + //List collections = (List) res.get("collections"); // assertFalse( collections.contains("test")); try{ NamedList res1 = client.request(new V2Request.Builder("/collections") diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DomainMapTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DomainMapTest.java index 5b46514e98e1..da8b89d271aa 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DomainMapTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DomainMapTest.java @@ -40,6 +40,7 @@ public void testRejectsInvalidFilters() { public void testStoresFilterWithCorrectKey() { final DomainMap domain = new DomainMap() .withFilter("name:Solr"); + @SuppressWarnings({"unchecked"}) final List filterList = (List) domain.get("filter"); assertTrue("Expected filter list to contain provided filter", filterList.contains("name:Solr")); @@ -50,6 +51,7 @@ public void testStoresMultipleFilters() { final DomainMap domain = new DomainMap() .withFilter("name:Solr") .withFilter("cat:search"); + @SuppressWarnings({"unchecked"}) final List filterList = (List) domain.get("filter"); assertTrue("Expected filter list to contain 1st provided filter", filterList.contains("name:Solr")); @@ -69,6 +71,7 @@ public void testRejectsInvalidQueries() { public void testStoresQueryWithCorrectKey() { final DomainMap domain = new DomainMap() .withQuery("name:Solr"); + @SuppressWarnings({"unchecked"}) final List queryList = (List) domain.get("query"); assertTrue("Expected query list to contain provided query", queryList.contains("name:Solr")); @@ -79,6 +82,7 @@ public void testStoresMultipleQueries() { final DomainMap domain = new DomainMap() .withQuery("name:Solr") .withQuery("cat:search"); + @SuppressWarnings({"unchecked"}) final List queryList = (List) domain.get("query"); assertTrue("Expected query list to contain 1st provided query", queryList.contains("name:Solr")); @@ -98,6 +102,7 @@ public void testRejectsInvalidTagsToExclude() { public void testStoresTagsToExcludeWithCorrectKey() { final DomainMap domain = new DomainMap() .withTagsToExclude("BRAND"); + @SuppressWarnings({"unchecked"}) final List exclusionList = (List) domain.get("excludeTags"); assertTrue("Expected tag-exclusion list to contain provided tag", exclusionList.contains("BRAND")); @@ -108,6 +113,7 @@ public void testStoresMultipleTagExclusionStrings() { final DomainMap domain = new DomainMap() .withTagsToExclude("BRAND") .withTagsToExclude("COLOR"); + @SuppressWarnings({"unchecked"}) final List exclusionList = (List) domain.get("excludeTags"); assertTrue("Expected tag-exclusion list to contain provided 1st tag", exclusionList.contains("BRAND")); @@ -170,6 +176,7 @@ public void testStoresJoinValuesWithCorrectKey() { .setJoinTransformation("any-from-field", "any-to-field"); assertTrue(domain.containsKey("join")); + @SuppressWarnings({"unchecked"}) final Map joinParams = (Map) domain.get("join"); assertEquals("any-from-field", joinParams.get("from")); assertEquals("any-to-field", joinParams.get("to")); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/AnlysisResponseBaseTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/AnlysisResponseBaseTest.java index da7f7ed506cd..803769f54ad0 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/AnlysisResponseBaseTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/AnlysisResponseBaseTest.java @@ -38,6 +38,7 @@ public class AnlysisResponseBaseTest extends SolrTestCase { @Test public void testBuildTokenInfo() throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList tokenNL = new NamedList(); tokenNL.add("text", "JUMPING"); tokenNL.add("type", "word"); @@ -76,6 +77,7 @@ public void testBuildTokenInfo() throws Exception { public void testBuildPhases() throws Exception { final AnalysisResponseBase.TokenInfo tokenInfo = new AnalysisResponseBase.TokenInfo("text", null, "type", 0, 3, 1, false); + @SuppressWarnings({"rawtypes"}) NamedList nl = new NamedList(); nl.add("Tokenizer", buildFakeTokenInfoList(6)); nl.add("Filter1", buildFakeTokenInfoList(5)); @@ -84,7 +86,7 @@ public void testBuildPhases() throws Exception { AnalysisResponseBase response = new AnalysisResponseBase() { @Override - protected TokenInfo buildTokenInfo(NamedList tokenNL) { + protected TokenInfo buildTokenInfo(@SuppressWarnings({"rawtypes"})NamedList tokenNL) { return tokenInfo; } }; @@ -104,6 +106,7 @@ protected TokenInfo buildTokenInfo(NamedList tokenNL) { */ @Test public void testCharFilterBuildPhases() throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList nl = new NamedList(); nl.add("CharFilter1", "CharFilterOutput"); //not list of tokens AnalysisResponseBase response = new AnalysisResponseBase(); @@ -113,6 +116,7 @@ public void testCharFilterBuildPhases() throws Exception { //================================================ Helper Methods ================================================== + @SuppressWarnings({"rawtypes"}) private List buildFakeTokenInfoList(int numberOfTokens) { List list = new ArrayList<>(numberOfTokens); for (int i = 0; i < numberOfTokens; i++) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/DocumentAnalysisResponseTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/DocumentAnalysisResponseTest.java index 1c970a24902c..546cd0389f03 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/DocumentAnalysisResponseTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/DocumentAnalysisResponseTest.java @@ -35,6 +35,7 @@ public class DocumentAnalysisResponseTest extends SolrTestCase { * Tests the {@link DocumentAnalysisResponse#setResponse(org.apache.solr.common.util.NamedList)} method */ @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testSetResponse() throws Exception { // the parsing of the analysis phases is already tested in the AnalysisResponseBaseTest. So we can just fake @@ -96,6 +97,7 @@ protected List buildPhases(NamedList phaseNL) { //================================================ Helper Methods ================================================== + @SuppressWarnings({"unchecked", "rawtypes"}) private NamedList buildResponse() { NamedList response = new NamedList(); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/FieldAnalysisResponseTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/FieldAnalysisResponseTest.java index 6ce7e4b24014..b609dfb5d5e0 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/FieldAnalysisResponseTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/FieldAnalysisResponseTest.java @@ -45,6 +45,7 @@ public void testSetResponse() throws Exception { AnalysisResponseBase.AnalysisPhase expectedPhase = new AnalysisResponseBase.AnalysisPhase("Tokenizer"); phases.add(expectedPhase); + @SuppressWarnings({"rawtypes"}) NamedList responseNL = buildResponse(); FieldAnalysisResponse response = new FieldAnalysisResponse() { @Override @@ -79,6 +80,7 @@ protected List buildPhases(NamedList phaseNL) { //================================================ Helper Methods ================================================== + @SuppressWarnings({"rawtypes"}) private NamedList buildResponse() { NamedList response = new NamedList(); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java index 1aa80ad3bef2..cc1e8ade0cd5 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java @@ -87,7 +87,9 @@ public void testQueryParse() throws Exception { private void assertResponse(String responseString) throws IOException { ResponseParser xmlResponseParser = new XMLResponseParser(); + @SuppressWarnings({"rawtypes"}) NamedList expectedResponse = xmlResponseParser.processResponse(IOUtils.toInputStream(responseString, "UTF-8"), "UTF-8"); + @SuppressWarnings({"unchecked"}) List documentList = (List) expectedResponse.getAll("response").get(0); assertEquals(1, documentList.size()); SolrDocument solrDocument = documentList.get(0); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java index af9da2605824..174b24a90dc5 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java @@ -42,6 +42,7 @@ * @since solr 1.3 */ @Limit(bytes=20000) +@SuppressWarnings({"rawtypes"}) public class QueryResponseTest extends SolrTestCase { @Test // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestDelegationTokenResponse.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestDelegationTokenResponse.java index b5508df8b09a..54b8e6cc32ce 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestDelegationTokenResponse.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestDelegationTokenResponse.java @@ -34,7 +34,7 @@ public class TestDelegationTokenResponse extends SolrTestCase { - private void delegationTokenResponse(DelegationTokenRequest request, + private void delegationTokenResponse(@SuppressWarnings({"rawtypes"})DelegationTokenRequest request, DelegationTokenResponse response, String responseBody) throws Exception { ResponseParser parser = request.getResponseParser(); response.setResponse(parser.processResponse( diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGeneratorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGeneratorTest.java index 5b64fde54ba4..c0ebad379d6f 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGeneratorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGeneratorTest.java @@ -88,6 +88,19 @@ public void replicaTypeAndReplicaBase() { ) ); + // Add a PULL replica so that there's a tie for "last place" + replicas.add( + new Replica( + "node5", + map( + ZkStateReader.BASE_URL_PROP, "http://host2_2:8983/solr", + ZkStateReader.NODE_NAME_PROP, "node5", + ZkStateReader.CORE_NAME_PROP, "collection1", + ZkStateReader.REPLICA_TYPE, "PULL" + ), "c1","s1" + ) + ); + // replicaType and replicaBase combined rule param String rulesParam = ShardParams.SHARDS_PREFERENCE_REPLICA_TYPE + ":NRT," + ShardParams.SHARDS_PREFERENCE_REPLICA_TYPE + ":TLOG," + @@ -101,6 +114,7 @@ public void replicaTypeAndReplicaBase() { assertEquals("node2", replicas.get(1).getNodeName()); assertEquals("node4", replicas.get(2).getNodeName()); assertEquals("node3", replicas.get(3).getNodeName()); + assertEquals("node5", replicas.get(4).getNodeName()); params.set("routingPreference", "1"); rlt = generator.getReplicaListTransformer(params); @@ -108,7 +122,8 @@ public void replicaTypeAndReplicaBase() { assertEquals("node1", replicas.get(0).getNodeName()); assertEquals("node4", replicas.get(1).getNodeName()); assertEquals("node2", replicas.get(2).getNodeName()); - assertEquals("node3", replicas.get(3).getNodeName()); + assertEquals("node5", replicas.get(3).getNodeName()); + assertEquals("node3", replicas.get(4).getNodeName()); } @SuppressWarnings("unchecked") diff --git a/solr/solrj/src/test/org/apache/solr/common/SolrDocumentTest.java b/solr/solrj/src/test/org/apache/solr/common/SolrDocumentTest.java index d6a2b2e2a577..1bcfd35c76ce 100644 --- a/solr/solrj/src/test/org/apache/solr/common/SolrDocumentTest.java +++ b/solr/solrj/src/test/org/apache/solr/common/SolrDocumentTest.java @@ -29,7 +29,8 @@ */ public class SolrDocumentTest extends SolrTestCase { - public void testSimple() + @SuppressWarnings({"unchecked"}) + public void testSimple() { Float fval = 10.01f; Boolean bval = Boolean.TRUE; @@ -127,8 +128,10 @@ public void testAddCollections() doc.clear(); assertEquals( 0, doc.getFieldNames().size() ); + @SuppressWarnings({"rawtypes"}) Iterable iter = new Iterable() { @Override + @SuppressWarnings({"rawtypes"}) public Iterator iterator() { return c0.iterator(); } diff --git a/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java b/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java index 7dd3427c9004..8e4a24e20a10 100644 --- a/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java +++ b/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java @@ -67,8 +67,10 @@ public void testParseMapErrorChecking() { assertTrue(e.toString(), e.getMessage().contains("Invalid type")); } + @SuppressWarnings({"unchecked"}) public void testParseMap() { // trivial + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap valid = new SimpleOrderedMap(); valid.add("type", CmdType.ADD.toString()); valid.add("id", "some id"); diff --git a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCloudCollectionsListeners.java b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCloudCollectionsListeners.java index 1ef806e3696f..b0de38379a7e 100644 --- a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCloudCollectionsListeners.java +++ b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCloudCollectionsListeners.java @@ -193,114 +193,4 @@ public void testCollectionDeletion() throws Exception { client.getZkStateReader().removeCloudCollectionsListener(watcher1); } - - @Test - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 17-Aug-2018 - public void testWatchesWorkForBothStateFormats() throws Exception { - CloudSolrClient client = cluster.getSolrClient(); - - Map> oldResults = new HashMap<>(); - Map> newResults = new HashMap<>(); - - CloudCollectionsListener watcher1 = (oldCollections, newCollections) -> { - log.info("New set of collections: {}, {}", oldCollections, newCollections); - oldResults.put(1, oldCollections); - newResults.put(1, newCollections); - }; - client.getZkStateReader().registerCloudCollectionsListener(watcher1); - CloudCollectionsListener watcher2 = (oldCollections, newCollections) -> { - log.info("New set of collections: {}, {}", oldCollections, newCollections); - oldResults.put(2, oldCollections); - newResults.put(2, newCollections); - }; - client.getZkStateReader().registerCloudCollectionsListener(watcher2); - - assertEquals("CloudCollectionsListener has old collections with size > 0 after registration", 0, oldResults.get(1).size()); - assertEquals("CloudCollectionsListener has old collections with size > 0 after registration", 0, oldResults.get(2).size()); - assertEquals("CloudCollectionsListener has new collections with size > 0 after registration", 0, newResults.get(1).size()); - assertEquals("CloudCollectionsListener has new collections with size > 0 after registration", 0, newResults.get(2).size()); - - // Creating old state format collection - - CollectionAdminRequest.createCollection("testcollection1", "config", 4, 1) - .setStateFormat(1) - .processAndWait(client, MAX_WAIT_TIMEOUT); - cluster.waitForActiveCollection("testcollection1", 4, 4); - - assertEquals("CloudCollectionsListener has old collections with size > 0 after collection created with old stateFormat", 0, oldResults.get(1).size()); - assertEquals("CloudCollectionsListener has old collections with size > 0 after collection created with old stateFormat", 0, oldResults.get(2).size()); - assertEquals("CloudCollectionsListener not updated with created collection with old stateFormat", 1, newResults.get(1).size()); - assertTrue("CloudCollectionsListener not updated with created collection with old stateFormat", newResults.get(1).contains("testcollection1")); - assertEquals("CloudCollectionsListener not updated with created collection with old stateFormat", 1, newResults.get(2).size()); - assertTrue("CloudCollectionsListener not updated with created collection with old stateFormat", newResults.get(2).contains("testcollection1")); - - // Creating new state format collection - - CollectionAdminRequest.createCollection("testcollection2", "config", 4, 1) - .processAndWait(client, MAX_WAIT_TIMEOUT); - cluster.waitForActiveCollection("testcollection2", 4, 4); - - assertEquals("CloudCollectionsListener has incorrect old collections after collection created with new stateFormat", 1, oldResults.get(1).size()); - assertEquals("CloudCollectionsListener has incorrect old collections after collection created with new stateFormat", 1, oldResults.get(2).size()); - assertEquals("CloudCollectionsListener not updated with created collection with new stateFormat", 2, newResults.get(1).size()); - assertTrue("CloudCollectionsListener not updated with created collection with new stateFormat", newResults.get(1).contains("testcollection2")); - assertEquals("CloudCollectionsListener not updated with created collection with new stateFormat", 2, newResults.get(2).size()); - assertTrue("CloudCollectionsListener not updated with created collection with new stateFormat", newResults.get(2).contains("testcollection2")); - - client.getZkStateReader().removeCloudCollectionsListener(watcher2); - - // Creating old state format collection - - CollectionAdminRequest.createCollection("testcollection3", "config", 4, 1) - .setStateFormat(1) - .processAndWait(client, MAX_WAIT_TIMEOUT); - cluster.waitForActiveCollection("testcollection3", 4, 4); - - assertEquals("CloudCollectionsListener has incorrect old collections after collection created with old stateFormat", 2, oldResults.get(1).size()); - assertEquals("CloudCollectionsListener updated after removal", 1, oldResults.get(2).size()); - assertEquals("CloudCollectionsListener not updated with created collection with old stateFormat", 3, newResults.get(1).size()); - assertTrue("CloudCollectionsListener not updated with created collection with old stateFormat", newResults.get(1).contains("testcollection3")); - assertEquals("CloudCollectionsListener updated after removal", 2, newResults.get(2).size()); - assertFalse("CloudCollectionsListener updated after removal", newResults.get(2).contains("testcollection3")); - - // Adding back listener - client.getZkStateReader().registerCloudCollectionsListener(watcher2); - - assertEquals("CloudCollectionsListener has old collections after registration", 0, oldResults.get(2).size()); - assertEquals("CloudCollectionsListener doesn't have all collections after registration", 3, newResults.get(2).size()); - - // Deleting old state format collection - - CollectionAdminRequest.deleteCollection("testcollection1").processAndWait(client, MAX_WAIT_TIMEOUT); - - assertEquals("CloudCollectionsListener doesn't have all old collections after collection removal", 3, oldResults.get(1).size()); - assertEquals("CloudCollectionsListener doesn't have all old collections after collection removal", 3, oldResults.get(2).size()); - assertEquals("CloudCollectionsListener doesn't have correct new collections after collection removal", 2, newResults.get(1).size()); - assertEquals("CloudCollectionsListener doesn't have correct new collections after collection removal", 2, newResults.get(2).size()); - assertFalse("CloudCollectionsListener not updated with deleted collection with old stateFormat", newResults.get(1).contains("testcollection1")); - assertFalse("CloudCollectionsListener not updated with deleted collection with old stateFormat", newResults.get(2).contains("testcollection1")); - - CollectionAdminRequest.deleteCollection("testcollection2").processAndWait(client, MAX_WAIT_TIMEOUT); - - assertEquals("CloudCollectionsListener doesn't have all old collections after collection removal", 2, oldResults.get(1).size()); - assertEquals("CloudCollectionsListener doesn't have all old collections after collection removal", 2, oldResults.get(2).size()); - assertEquals("CloudCollectionsListener doesn't have correct new collections after collection removal", 1, newResults.get(1).size()); - assertEquals("CloudCollectionsListener doesn't have correct new collections after collection removal", 1, newResults.get(2).size()); - assertFalse("CloudCollectionsListener not updated with deleted collection with new stateFormat", newResults.get(1).contains("testcollection2")); - assertFalse("CloudCollectionsListener not updated with deleted collection with new stateFormat", newResults.get(2).contains("testcollection2")); - - client.getZkStateReader().removeCloudCollectionsListener(watcher1); - - CollectionAdminRequest.deleteCollection("testcollection3").processAndWait(client, MAX_WAIT_TIMEOUT); - - assertEquals("CloudCollectionsListener updated after removal", 2, oldResults.get(1).size()); - assertEquals("CloudCollectionsListener doesn't have all old collections after collection removal", 1, oldResults.get(2).size()); - assertEquals("CloudCollectionsListener updated after removal", 1, newResults.get(1).size()); - assertEquals("CloudCollectionsListener doesn't have correct new collections after collection removal", 0, newResults.get(2).size()); - assertTrue("CloudCollectionsListener updated after removal", newResults.get(1).contains("testcollection3")); - assertFalse("CloudCollectionsListener not updated with deleted collection with old stateFormat", newResults.get(2).contains("testcollection3")); - - client.getZkStateReader().removeCloudCollectionsListener(watcher2); - } - } diff --git a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java index ab3dc954e12a..8c19f3e48c20 100644 --- a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java +++ b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java @@ -337,27 +337,4 @@ public void testLiveNodeChangesTriggerWatches() throws Exception { () -> client.getZkStateReader().getStateWatchers("test_collection").size() == 0); } - - @Test - public void testWatchesWorkForStateFormat1() throws Exception { - - final CloudSolrClient client = cluster.getSolrClient(); - - Future future = waitInBackground("stateformat1", MAX_WAIT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 1, 1)); - - CollectionAdminRequest.createCollection("stateformat1", "config", 1, 1).setStateFormat(1) - .processAndWait(client, MAX_WAIT_TIMEOUT); - assertTrue("CollectionStateWatcher not notified of stateformat=1 collection creation", - future.get()); - - Future migrated = waitInBackground("stateformat1", MAX_WAIT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> c != null && c.getStateFormat() == 2); - - CollectionAdminRequest.migrateCollectionFormat("stateformat1") - .processAndWait(client, MAX_WAIT_TIMEOUT); - assertTrue("CollectionStateWatcher did not persist over state format migration", migrated.get()); - - } - } diff --git a/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java b/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java index 22d687e5c066..f22c7cdc5ca0 100644 --- a/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java +++ b/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java @@ -264,30 +264,4 @@ public void testDeletionsTriggerWatches() throws Exception { assertTrue("DocCollectionWatcher not notified of delete call", future.get()); } - - @Test - public void testWatchesWorkForStateFormat1() throws Exception { - - final CloudSolrClient client = cluster.getSolrClient(); - - Future future = waitInBackground("stateformat1", MAX_WAIT_TIMEOUT, TimeUnit.SECONDS, - (c) -> (null != c) ); - - CollectionAdminRequest.createCollection("stateformat1", "config", 1, 1).setStateFormat(1) - .processAndWait(client, MAX_WAIT_TIMEOUT); - client.waitForState("stateformat1", MAX_WAIT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 1, 1)); - - assertTrue("DocCollectionWatcher not notified of stateformat=1 collection creation", - future.get()); - - Future migrated = waitInBackground("stateformat1", MAX_WAIT_TIMEOUT, TimeUnit.SECONDS, - (c) -> c != null && c.getStateFormat() == 2); - - CollectionAdminRequest.migrateCollectionFormat("stateformat1") - .processAndWait(client, MAX_WAIT_TIMEOUT); - assertTrue("DocCollectionWatcher did not persist over state format migration", migrated.get()); - - } - } diff --git a/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java b/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java index 98e23682cd52..c38463735040 100755 --- a/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java +++ b/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java @@ -33,4 +33,6 @@ public class CommonParamsTest extends SolrTestCase public void testRowsDefault() { assertEquals(10, CommonParams.ROWS_DEFAULT); } public void testPreferLocalShards() { assertEquals("preferLocalShards", CommonParams.PREFER_LOCAL_SHARDS); } + + public void testMinExactCount() { assertEquals("minExactCount", CommonParams.MIN_EXACT_COUNT); } } diff --git a/solr/solrj/src/test/org/apache/solr/common/util/JsonValidatorTest.java b/solr/solrj/src/test/org/apache/solr/common/util/JsonValidatorTest.java index 8c0626674799..404661aefa46 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/JsonValidatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/JsonValidatorTest.java @@ -46,6 +46,7 @@ public void testSchema() { public void testSchemaValidation() { ValidatingJsonMap spec = Utils.getSpec("collections.Commands").getSpec(); + @SuppressWarnings({"rawtypes"}) Map createSchema = spec.getMap("commands", NOT_NULL).getMap("create-alias", NOT_NULL); JsonSchemaValidator validator = new JsonSchemaValidator(createSchema); List errs = validator.validateJson(Utils.fromJSONString("{name : x, collections: [ c1 , c2]}")); @@ -181,8 +182,10 @@ public void testNullObjectValue() { private void checkSchema(String name) { ValidatingJsonMap spec = Utils.getSpec(name).getSpec(); + @SuppressWarnings({"rawtypes"}) Map commands = (Map) spec.get("commands"); for (Object o : commands.entrySet()) { + @SuppressWarnings({"rawtypes"}) Map.Entry cmd = (Map.Entry) o; try { JsonSchemaValidator validator = new JsonSchemaValidator((Map) cmd.getValue()); diff --git a/solr/solrj/src/test/org/apache/solr/common/util/NamedListTest.java b/solr/solrj/src/test/org/apache/solr/common/util/NamedListTest.java index cd737c2253a9..c50edfc3b13d 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/NamedListTest.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/NamedListTest.java @@ -132,7 +132,7 @@ public void testRecursive() { NamedList nl2 = new NamedList<>(); nl2.add("key2a", "value2a"); nl2.add("key2b", nl2b); - nl2.add("k2int1", (int) 5); + nl2.add("k2int1", 5); NamedList nl3 = new NamedList<>(); nl3.add("key3a", nl3a); nl3.add("key3b", "value3b"); @@ -197,9 +197,12 @@ public void testRecursive() { @Test // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 + @SuppressWarnings({"unchecked"}) public void testShallowMap() { + @SuppressWarnings({"rawtypes"}) NamedList nl = new NamedList(); nl.add("key1", "Val1"); + @SuppressWarnings({"rawtypes"}) Map m = nl.asShallowMap(); m.put("key1", "Val1_"); assertEquals("Val1_", nl.get("key1")); diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestFastJavabinDecoder.java b/solr/solrj/src/test/org/apache/solr/common/util/TestFastJavabinDecoder.java index ecbdf4498561..5e7d774174a6 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestFastJavabinDecoder.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestFastJavabinDecoder.java @@ -77,32 +77,38 @@ public void testSimple() throws IOException { "}"; + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSONString(sampleObj); BinaryRequestWriter.BAOS baos = new BinaryRequestWriter.BAOS(); try (JavaBinCodec jbc = new JavaBinCodec()) { jbc.marshal(m, baos); } + @SuppressWarnings({"rawtypes"}) Map m2; try (JavaBinCodec jbc = new JavaBinCodec()) { m2 = (Map) jbc.unmarshal(new FastInputStream(null, baos.getbuf(), 0, baos.size())); } + @SuppressWarnings({"rawtypes"}) LinkedHashMap fastMap = (LinkedHashMap) new FastJavaBinDecoder() .withInputStream(new FastInputStream(null, baos.getbuf(), 0, baos.size())) .decode(FastJavaBinDecoder.getEntryListener()); assertEquals(Utils.writeJson(m2, new StringWriter(), true).toString(), Utils.writeJson(fastMap, new StringWriter(), true).toString()); + @SuppressWarnings({"unchecked"}) Object newMap = new FastJavaBinDecoder() .withInputStream(new FastInputStream(null, baos.getbuf(), 0, baos.size())) .decode(e -> { e.listenContainer(new LinkedHashMap<>(), e_ -> { + @SuppressWarnings({"rawtypes"}) Map rootMap = (Map) e_.ctx(); if (e_.type() == DataEntry.Type.ENTRY_ITER) { e_.listenContainer(rootMap.computeIfAbsent(e_.name(), NEW_ARRAYLIST_FUN), FastJavaBinDecoder.getEntryListener()); } else if (e_.type() == DataEntry.Type.KEYVAL_ITER) { e_.listenContainer(rootMap.computeIfAbsent(e_.name(), NEW_LINKED_HASHMAP_FUN), e1 -> { + @SuppressWarnings({"rawtypes"}) Map m1 = (Map) e1.ctx(); if ("k1".equals(e1.name())) { m1.put(e1.name(), e1.val().toString()); @@ -128,6 +134,7 @@ public void testFastJavabinStreamingDecoder() throws IOException { SolrDocumentList list; try (JavaBinCodec jbc = new JavaBinCodec()) { + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap o = (SimpleOrderedMap) jbc.unmarshal(baos.toByteArray()); list = (SolrDocumentList) o.get("response"); } @@ -138,6 +145,7 @@ class Pojo { CharSequence id; boolean inStock; float price; + @SuppressWarnings({"rawtypes"}) List children; } StreamingBinaryResponseParser parser = new StreamingBinaryResponseParser(new FastStreamingDocsCallback() { @@ -184,6 +192,7 @@ public void endDoc(Object docObj) { parser.processResponse(new FastInputStream(null, baos.getbuf(), 0, baos.size()), null); } + @SuppressWarnings({"unchecked"}) public void testParsingWithChildDocs() throws IOException { SolrDocument d1 = TestJavaBinCodec.generateSolrDocumentWithChildDocs(); d1.setField("id", "101"); @@ -197,6 +206,7 @@ public void testParsingWithChildDocs() throws IOException { sdocs.add(d1); sdocs.add(d2); + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap orderedMap = new SimpleOrderedMap(); orderedMap.add("response", sdocs); @@ -219,6 +229,7 @@ public void compare(SolrDocument d) { assertEquals(subject, d.getFieldValue("subject")); assertEquals(cat, d.getFieldValue("cat")); assertEquals(d.getChildDocumentCount(), children.size()); + @SuppressWarnings({"unchecked"}) List l = (List) d.getFieldValue("longs"); if(l != null){ assertNotNull(longs); @@ -245,6 +256,7 @@ public Object initDocList(Long numFound, Long start, Float maxScore) { } @Override + @SuppressWarnings({"unchecked"}) public Object startDoc(Object docListObj) { Pojo pojo = new Pojo(); ((List) docListObj).add(pojo); @@ -264,6 +276,7 @@ public void field(DataEntry field, Object docObj) { if(useListener[0]){ field.listenContainer(pojo.longs = new long[field.length()], READLONGS); } else { + @SuppressWarnings({"unchecked"}) List longList = (List) field.val(); pojo.longs = new long[longList.size()]; for (int i = 0; i < longList.size(); i++) { diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java b/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java index 7737f0c7fa6b..b9db14fb2a39 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java @@ -16,6 +16,19 @@ */ package org.apache.solr.common.util; +import org.apache.commons.io.IOUtils; +import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.EnumFieldValue; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; +import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.SolrInputField; +import org.apache.solr.util.ConcurrentLRUCache; +import org.apache.solr.util.RTimer; +import org.junit.Test; +import org.noggit.CharArr; + import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -31,19 +44,6 @@ import java.util.Map; import java.util.Random; -import org.apache.commons.io.IOUtils; -import org.apache.lucene.util.TestUtil; -import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.common.EnumFieldValue; -import org.apache.solr.common.SolrDocument; -import org.apache.solr.common.SolrDocumentList; -import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.common.SolrInputField; -import org.apache.solr.util.ConcurrentLRUCache; -import org.apache.solr.util.RTimer; -import org.junit.Test; -import org.noggit.CharArr; - public class TestJavaBinCodec extends SolrTestCaseJ4 { private static final String SOLRJ_JAVABIN_BACKCOMPAT_BIN = "/solrj/javabin_backcompat.bin"; @@ -105,6 +105,7 @@ public static SolrDocument generateSolrDocumentWithChildDocs() { return parentDocument; } + @SuppressWarnings({"unchecked"}) private List generateAllDataTypes() { List types = new ArrayList<>(); @@ -137,6 +138,7 @@ private List generateAllDataTypes() { SolrDocumentList solrDocs = new SolrDocumentList(); solrDocs.setMaxScore(1.0f); solrDocs.setNumFound(1); + solrDocs.setNumFoundExact(Boolean.TRUE); solrDocs.setStart(0); solrDocs.add(0, doc); types.add(solrDocs); @@ -171,6 +173,7 @@ private List generateAllDataTypes() { types.add(1); types.add((long) 2); + @SuppressWarnings({"rawtypes"}) SimpleOrderedMap simpleOrderedMap = new SimpleOrderedMap(); simpleOrderedMap.add("bar", "barbar"); types.add(simpleOrderedMap); @@ -191,6 +194,7 @@ public List readIterator(DataInputInputStream fis) throws IOException { } };) { + @SuppressWarnings({"unchecked"}) List unmarshaledObj = (List) javabin.unmarshal(is); List matchObj = generateAllDataTypes(); compareObjects(unmarshaledObj, matchObj); @@ -200,7 +204,8 @@ public List readIterator(DataInputInputStream fis) throws IOException { } - private void compareObjects(List unmarshaledObj, List matchObj) { + private void compareObjects(@SuppressWarnings({"rawtypes"})List unmarshaledObj, + @SuppressWarnings({"rawtypes"})List matchObj) { assertEquals(unmarshaledObj.size(), matchObj.size()); for (int i = 0; i < unmarshaledObj.size(); i++) { @@ -393,6 +398,7 @@ public void testResponseChildDocuments() throws IOException { assertNull(grandChildDocuments.get(0).getChildDocuments()); } @Test + @SuppressWarnings({"unchecked", "rawtypes"}) public void testStringCaching() throws Exception { Map m = Utils.makeMap("key1", "val1", "key2", "val2"); byte[] b1 = getBytes(m);//copy 1 @@ -515,7 +521,7 @@ public String put(JavaBinCodec.StringBytes key, String val) { } private static void runInThreads(int count, Runnable runnable) throws InterruptedException { - ArrayList t =new ArrayList(); + ArrayList t =new ArrayList<>(); for(int i=0;i 0 ? new ConcurrentLRUCache<>(cacheSz,cacheSz-cacheSz/10,cacheSz,cacheSz/10,false,true,null) : null; // the cache in the first version of the patch was 10000,9000,10000,1000,false,true,null final JavaBinCodec.StringCache stringCache = underlyingCache==null ? null : new JavaBinCodec.StringCache(underlyingCache); if (nThreads <= 0) { diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestJsonRecordReader.java b/solr/solrj/src/test/org/apache/solr/common/util/TestJsonRecordReader.java index 58ddb53257e2..adda6753e0a4 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestJsonRecordReader.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestJsonRecordReader.java @@ -162,6 +162,7 @@ public void handle(Map record, String path) { String buf = parser.getBuf(); parser.resetBuf(); + @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSONString(buf); if (count == 1) { assertEquals(m.get("id"), "123"); @@ -285,7 +286,9 @@ public void testNestedDocs() throws Exception { "}}"; streamer.streamRecords(new StringReader(json), (record, path) -> { assertEquals(record.get("x"), "y"); + @SuppressWarnings({"rawtypes"}) List l = (List) record.get("b"); + @SuppressWarnings({"rawtypes"}) Map m = (Map) l.get(0); assertEquals(m.get("c"), "c1"); assertEquals(m.get("e"), "e1"); @@ -296,7 +299,9 @@ public void testNestedDocs() throws Exception { streamer = JsonRecordReader.getInst("/|/a/b", Arrays.asList("$FQN:/**")); streamer.streamRecords(new StringReader(json), (record, path) -> { assertEquals(record.get("a.x"), "y"); + @SuppressWarnings({"rawtypes"}) List l = (List) record.get("b"); + @SuppressWarnings({"rawtypes"}) Map m = (Map) l.get(0); assertEquals(m.get("c"), "c1"); assertEquals(m.get("e"), "e1"); diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestNamedListCodec.java b/solr/solrj/src/test/org/apache/solr/common/util/TestNamedListCodec.java index b68b7895a5c9..070dfe6961ee 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestNamedListCodec.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestNamedListCodec.java @@ -33,6 +33,7 @@ public class TestNamedListCodec extends SolrTestCase { @Test // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 + @SuppressWarnings({"unchecked", "rawtypes"}) public void testSimple() throws Exception{ NamedList nl = new NamedList(); @@ -97,15 +98,19 @@ public void testSimple() throws Exception{ @Test // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 + @SuppressWarnings({"unchecked"}) public void testIterator() throws Exception{ + @SuppressWarnings({"rawtypes"}) NamedList nl = new NamedList(); Float fval = 10.01f; Boolean bval = Boolean.TRUE; String sval = "12qwaszx"; // Set up a simple document + @SuppressWarnings({"rawtypes"}) NamedList r = new NamedList(); + @SuppressWarnings({"rawtypes"}) List list = new ArrayList(); SolrDocument doc = new SolrDocument(); @@ -132,15 +137,18 @@ public void testIterator() throws Exception{ nl = (NamedList) jbc.unmarshal(bais); } + @SuppressWarnings({"rawtypes"}) List l = (List) nl.get("zzz"); assertEquals(list.size(), l.size()); } @Test // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 + @SuppressWarnings({"unchecked"}) public void testIterable() throws Exception { + @SuppressWarnings({"rawtypes"}) NamedList r = new NamedList(); Map map = new HashMap<>(); @@ -159,14 +167,17 @@ public void testIterable() throws Exception { } try (JavaBinCodec jbc = new JavaBinCodec(); ByteArrayInputStream bais = new ByteArrayInputStream(arr)) { + @SuppressWarnings({"rawtypes"}) NamedList result = (NamedList) jbc.unmarshal(bais); assertTrue("result is null and it shouldn't be", result != null); + @SuppressWarnings({"rawtypes"}) List keys = (List) result.get("keys"); assertTrue("keys is null and it shouldn't be", keys != null); assertTrue("keys Size: " + keys.size() + " is not: " + 3, keys.size() == 3); String less = (String) result.get("more"); assertTrue("less is null and it shouldn't be", less != null); assertTrue(less + " is not equal to " + "less", less.equals("less") == true); + @SuppressWarnings({"rawtypes"}) List values = (List) result.get("values"); assertTrue("values is null and it shouldn't be", values != null); assertTrue("values Size: " + values.size() + " is not: " + 3, values.size() == 3); @@ -199,6 +210,7 @@ public String rStr(int sz) { } + @SuppressWarnings({"unchecked", "rawtypes"}) public NamedList rNamedList(int lev) { int sz = lev<= 0 ? 0 : r.nextInt(3); NamedList nl = new NamedList(); @@ -208,6 +220,7 @@ public NamedList rNamedList(int lev) { return nl; } + @SuppressWarnings({"unchecked", "rawtypes"}) public List rList(int lev) { int sz = lev<= 0 ? 0 : r.nextInt(3); ArrayList lst = new ArrayList(); @@ -259,7 +272,9 @@ public void testRandom() throws Exception { // let's keep it deterministic since just the wrong // random stuff could cause failure because of an OOM (too big) + @SuppressWarnings({"rawtypes"}) NamedList nl; + @SuppressWarnings({"rawtypes"}) NamedList res; String cmp; diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java b/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java index 52a661ff6341..5a568210b0e7 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java @@ -30,6 +30,7 @@ public class TestPathTrie extends SolrTestCaseJ4 { + @SuppressWarnings({"unchecked"}) public void testPathTrie() { PathTrie pathTrie = new PathTrie<>(ImmutableSet.of("_introspect")); pathTrie.insert("/", emptyMap(), "R"); @@ -39,6 +40,7 @@ public void testPathTrie() { pathTrie.insert("/aa/bb/{cc}/{xx}", emptyMap(), "b"); pathTrie.insert("/aa/bb", emptyMap(), "c"); + @SuppressWarnings({"rawtypes"}) HashMap templateValues = new HashMap<>(); assertEquals("R", pathTrie.lookup("/", templateValues, null)); assertEquals("d", pathTrie.lookup("/aa", templateValues, null)); diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestValidatingJsonMap.java b/solr/solrj/src/test/org/apache/solr/common/util/TestValidatingJsonMap.java index b53488ac3000..7c04ff4df26d 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestValidatingJsonMap.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestValidatingJsonMap.java @@ -42,6 +42,7 @@ public void testBasic() throws Exception { expectThrows(RuntimeException.class, () -> m.getList("l", ENUM_OF, ImmutableSet.of("X", "Z"))); + @SuppressWarnings({"rawtypes"}) List l = m.getList("l", ENUM_OF, ImmutableSet.of("X", "Y", "Z")); assertEquals(2,l.size()); m.getList("l", NOT_NULL); diff --git a/solr/solrj/src/test/org/apache/solr/common/util/Utf8CharSequenceTest.java b/solr/solrj/src/test/org/apache/solr/common/util/Utf8CharSequenceTest.java index bd45da9af5f1..c3bfbed295ba 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/Utf8CharSequenceTest.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/Utf8CharSequenceTest.java @@ -27,6 +27,7 @@ public class Utf8CharSequenceTest extends SolrTestCaseJ4 { + @SuppressWarnings({"unchecked"}) public void testLargeString() throws IOException { StringBuilder sb = new StringBuilder(); for (int i = 0; i < 100; i++) { @@ -48,6 +49,7 @@ public void testLargeString() throws IOException { utf81 = new ByteArrayUtf8CharSequence(result, 0, result.length); assertTrue(utf81.equals(utf8)); + @SuppressWarnings({"rawtypes"}) Map m0 = new HashMap(); m0.put("str", utf8); baos.reset(); @@ -56,6 +58,7 @@ public void testLargeString() throws IOException { } result = baos.toByteArray(); try (JavaBinCodec jbc = new JavaBinCodec()) { + @SuppressWarnings({"rawtypes"}) Map m1 = (Map) jbc .setReadStringAsCharSeq(true) .unmarshal(new ByteArrayInputStream(result)); @@ -64,7 +67,9 @@ public void testLargeString() throws IOException { } } + @SuppressWarnings({"unchecked"}) public void testUnMarshal() throws IOException { + @SuppressWarnings({"rawtypes"}) NamedList nl = new NamedList(); String str = " The value!"; for (int i = 0; i < 5; i++) { @@ -88,6 +93,7 @@ public void testUnMarshal() throws IOException { } byte[] bytes = baos.toByteArray(); + @SuppressWarnings({"rawtypes"}) NamedList nl1; try (JavaBinCodec jbc = new JavaBinCodec()) { nl1 = (NamedList) jbc diff --git a/solr/test-framework/build.gradle b/solr/test-framework/build.gradle index b6cbdbad60f3..62a724c506dd 100644 --- a/solr/test-framework/build.gradle +++ b/solr/test-framework/build.gradle @@ -15,9 +15,10 @@ * limitations under the License. */ - apply plugin: 'java-library' +description = 'Solr Test Framework' + dependencies { api project(':solr:core') api project(':solr:solrj') diff --git a/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java b/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java index 78f1faef27b4..66c1974a7262 100644 --- a/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java +++ b/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java @@ -160,7 +160,7 @@ public static void initHostContext() { // paranoia, we *really* don't want to ever get "//" in a path... final String hc = hostContext.toString().replaceAll("\\/+","/"); - log.info("Setting hostContext system property: " + hc); + log.info("Setting hostContext system property: {}", hc); System.setProperty("hostContext", hc); } @@ -737,17 +737,21 @@ public static int flags(Map handle, Object key) { return f == null ? 0 : f; } - public static String compare(NamedList a, NamedList b, int flags, Map handle) { + @SuppressWarnings({"unchecked"}) + public static String compare(@SuppressWarnings({"rawtypes"})NamedList a, + @SuppressWarnings({"rawtypes"})NamedList b, int flags, Map handle) { // System.out.println("resp a:" + a); // System.out.println("resp b:" + b); boolean ordered = (flags & UNORDERED) == 0; if (!ordered) { + @SuppressWarnings({"rawtypes"}) Map mapA = new HashMap(a.size()); for (int i=0; i handle) { + public static String compare1(@SuppressWarnings({"rawtypes"})Map a, + @SuppressWarnings({"rawtypes"})Map b, + int flags, Map handle) { String cmp; for (Object keya : a.keySet()) { @@ -832,7 +838,9 @@ public static String compare1(Map a, Map b, int flags, Map hand return null; } - public static String compare(Map a, Map b, int flags, Map handle) { + public static String compare(@SuppressWarnings({"rawtypes"})Map a, + @SuppressWarnings({"rawtypes"})Map b, + int flags, Map handle) { String cmp; cmp = compare1(a, b, flags, handle); if (cmp != null) return cmp; @@ -994,7 +1002,7 @@ protected void compareSolrResponses(SolrResponse a, SolrResponse b) { handle.put("rf", SKIPVAL); String cmp = compare(a.getResponse(), b.getResponse(), flags, handle); if (cmp != null) { - log.error("Mismatched responses:\n" + a + "\n" + b); + log.error("Mismatched responses:\n{}\n{}", a, b); Assert.fail(cmp); } } @@ -1071,6 +1079,7 @@ private ShardsFixedStatement(int numShards, Statement statement) { } @Override + @SuppressWarnings({"rawtypes"}) public void callStatement() throws Throwable { RandVal.uniqueValues = new HashSet(); // reset random values fixShardCount(numShards); @@ -1098,6 +1107,7 @@ private ShardsRepeatStatement(int min, int max, Statement statement) { } @Override + @SuppressWarnings({"rawtypes"}) public void callStatement() throws Throwable { for (shardCount = min; shardCount <= max; shardCount++) { @@ -1155,7 +1165,9 @@ public void validateControlData(QueryResponse control) throws Exception { /* no-op */ } + @SuppressWarnings({"unchecked"}) public static abstract class RandVal { + @SuppressWarnings({"rawtypes"}) public static Set uniqueValues = new HashSet(); public abstract Object val(); diff --git a/solr/test-framework/src/java/org/apache/solr/JSONTestUtil.java b/solr/test-framework/src/java/org/apache/solr/JSONTestUtil.java index f5b2ffb45f70..b1acca823a21 100644 --- a/solr/test-framework/src/java/org/apache/solr/JSONTestUtil.java +++ b/solr/test-framework/src/java/org/apache/solr/JSONTestUtil.java @@ -105,6 +105,7 @@ public NoDupsObjectBuilder(JSONParser parser) throws IOException { @Override public void addKeyVal(Object map, Object key, Object val) throws IOException { + @SuppressWarnings({"unchecked"}) Object prev = ((Map) map).put(key, val); if (prev != null) { throw new RuntimeException("REPEATED JSON OBJECT KEY: key=" + key + " prevValue=" + prev + " thisValue" + val); @@ -259,7 +260,9 @@ private boolean handleSpecialString(String str) { } boolean matchList() { + @SuppressWarnings({"rawtypes"}) List expectedList = (List)expected; + @SuppressWarnings({"rawtypes"}) List v = asList(); if (v == null) return false; int a = 0; @@ -290,6 +293,7 @@ boolean matchList() { private static Set reserved = new HashSet<>(Arrays.asList("_SKIP_","_MATCH_","_ORDERED_","_UNORDERED_")); + @SuppressWarnings({"unchecked", "rawtypes"}) boolean matchMap() { Map expectedMap = (Map)expected; Map v = asMap(); @@ -397,6 +401,7 @@ public boolean seek(String seekPath) { return seek(pathList); } + @SuppressWarnings({"rawtypes"}) List asList() { // TODO: handle native arrays if (val instanceof List) { @@ -406,6 +411,7 @@ List asList() { return null; } + @SuppressWarnings({"unchecked"}) Map asMap() { // TODO: handle NamedList if (val instanceof Map) { @@ -420,6 +426,7 @@ public boolean seek(List seekPath) { String seg = seekPath.get(0); if (seg.charAt(0)=='[') { + @SuppressWarnings({"rawtypes"}) List listVal = asList(); if (listVal==null) return false; diff --git a/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java b/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java index 6dcccb4c099e..750dd15f701e 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java @@ -113,7 +113,7 @@ public static JettySolrRunner createAndStartJetty(String solrHome, Properties no jetty = new JettySolrRunner(solrHome, nodeProps, jettyConfig); jetty.start(); port = jetty.getLocalPort(); - log.info("Jetty Assigned Port#" + port); + log.info("Jetty Assigned Port#{}", port); return jetty; } diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java index 3935dfe9591b..525cd7010ad4 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java @@ -83,7 +83,7 @@ public class SolrTestCase extends LuceneTestCase { public static void setDefaultConfigDirSysPropIfNotSet() { final String existingValue = System.getProperty(SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE); if (null != existingValue) { - log.info("Test env includes configset dir system property '{}'='{}'"); + log.info("Test env includes configset dir system property '{}'='{}'", SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE, existingValue); return; } final File extPath = new File(ExternalPaths.DEFAULT_CONFIGSET); diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java index 3da476acd9d4..919a1c914f83 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java @@ -45,12 +45,14 @@ import org.apache.solr.client.solrj.impl.NoOpResponseParser; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.request.UpdateRequest; +import org.apache.solr.client.solrj.response.DelegationTokenResponse; import org.apache.solr.client.solrj.response.UpdateResponse; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.util.Utils; import org.apache.solr.core.CoreDescriptor; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.IndexSchema; @@ -79,6 +81,7 @@ public static Set set(T... a) { return s; } + @SuppressWarnings({"unchecked"}) public static T rand(T... vals) { return vals[ random().nextInt(vals.length) ]; } @@ -107,9 +110,13 @@ public static Map toObject(Doc doc, IndexSchema schema, Collectio } - public static Object createDocObjects(Map fullModel, Comparator sort, int rows, Collection fieldNames) { + @SuppressWarnings({"unchecked"}) + public static Object createDocObjects(@SuppressWarnings({"rawtypes"})Map fullModel, + @SuppressWarnings({"rawtypes"})Comparator sort, int rows, + Collection fieldNames) { List docList = new ArrayList<>(fullModel.values()); Collections.sort(docList, sort); + @SuppressWarnings({"rawtypes"}) List sortedDocs = new ArrayList(rows); for (Doc doc : docList) { if (sortedDocs.size() >= rows) break; @@ -120,18 +127,16 @@ public static Object createDocObjects(Map fullModel, Comparator } - public static void compare(SolrQueryRequest req, String path, Object model, Map fullModel) throws Exception { + public static void compare(SolrQueryRequest req, String path, Object model, + @SuppressWarnings({"rawtypes"})Map fullModel) throws Exception { String strResponse = h.query(req); Object realResponse = ObjectBuilder.fromJSON(strResponse); String err = JSONTestUtil.matchObj(path, realResponse, model); if (err != null) { - log.error("RESPONSE MISMATCH: " + err - + "\n\trequest="+req - + "\n\tresult="+strResponse - + "\n\texpected="+ JSONUtil.toJSON(model) - + "\n\tmodel="+ fullModel - ); + log.error("RESPONSE MISMATCH: {}\n\trequest={}\n\tresult={}" + + "\n\texpected={}\n\tmodel={}" + , err, req, strResponse, JSONUtil.toJSON(model), fullModel); // re-execute the request... good for putting a breakpoint here for debugging String rsp = h.query(req); @@ -184,17 +189,15 @@ public static void matchJSON(String response, String... tests) throws Exception String err = JSONTestUtil.match(response, test, JSONTestUtil.DEFAULT_DELTA); failed = false; if (err != null) { - log.error("query failed JSON validation. error=" + err + - "\n expected =" + test + - "\n response = " + response + log.error("query failed JSON validation. error={}\n expected ={}\n response = {}" + , err, test, response ); throw new RuntimeException(err); } } finally { if (failed) { - log.error("JSON query validation threw an exception." + - "\n expected =" + test + - "\n response = " + response + log.error("JSON query validation threw an exception.\n expected ={}\n response = {}" + , test, response ); } } @@ -230,12 +233,15 @@ public static String getQueryResponse(SolrClient client, String wt, SolrParams p query.setPath(path); } - query.setResponseParser(new NoOpResponseParser(wt)); - NamedList rsp = client.request(query); - - String raw = (String)rsp.get("response"); - - return raw; + if ("json".equals(wt)) { + query.setResponseParser(new DelegationTokenResponse.JsonMapResponseParser()); + NamedList rsp = client.request(query); + return Utils.toJSONString(rsp); + } else { + query.setResponseParser(new NoOpResponseParser(wt)); + NamedList rsp = client.request(query); + return (String)rsp.get("response"); + } } public static String getQueryResponse(String wt, SolrParams params) throws Exception { @@ -524,7 +530,9 @@ public void start() throws Exception { jetty.start(); port = jetty.getLocalPort(); - log.info("===> Started solr server port=" + port + " home="+getBaseDir()); + if (log.isInfoEnabled()) { + log.info("===> Started solr server port={} home={}", port, getBaseDir()); + } } public void stop() throws Exception { diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java index 704b4040746a..d70013be3494 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java @@ -136,6 +136,7 @@ import org.apache.solr.update.processor.DistributedUpdateProcessor; import org.apache.solr.update.processor.DistributedZkUpdateProcessor; import org.apache.solr.update.processor.UpdateRequestProcessor; +import org.apache.solr.util.BaseTestHarness; import org.apache.solr.util.ExternalPaths; import org.apache.solr.util.LogLevel; import org.apache.solr.util.RandomizeSSL; @@ -445,7 +446,9 @@ public static String clearObjectTrackerAndCheckEmpty(int waitSeconds, boolean tr private static Map savedClassLogLevels = new HashMap<>(); public static void initClassLogLevels() { + @SuppressWarnings({"rawtypes"}) Class currentClass = RandomizedContext.current().getTargetClass(); + @SuppressWarnings({"unchecked"}) LogLevel annotation = (LogLevel) currentClass.getAnnotation(LogLevel.class); if (annotation == null) { return; @@ -517,8 +520,10 @@ private static SSLTestConfig buildSSLConfig() { } SSLTestConfig result = sslRandomizer.createSSLTestConfig(); - log.info("Randomized ssl ({}) and clientAuth ({}) via: {}", - result.isSSLMode(), result.isClientAuthMode(), sslRandomizer.debug); + if (log.isInfoEnabled()) { + log.info("Randomized ssl ({}) and clientAuth ({}) via: {}", + result.isSSLMode(), result.isClientAuthMode(), sslRandomizer.debug); + } return result; } @@ -593,12 +598,16 @@ public static Throwable getWrappedException(Throwable e) { @Override public void setUp() throws Exception { super.setUp(); - log.info("###Starting " + getTestName()); // returns ??? + if (log.isInfoEnabled()) { + log.info("###Starting {}", getTestName()); // returns ??? + } } @Override public void tearDown() throws Exception { - log.info("###Ending " + getTestName()); + if (log.isInfoEnabled()) { + log.info("###Ending {}", getTestName()); + } super.tearDown(); } @@ -625,7 +634,9 @@ protected static File initAndGetDataDir() { final int id = dataDirCount.incrementAndGet(); dataDir = initCoreDataDir = createTempDir("data-dir-"+ id).toFile(); assertNotNull(dataDir); - log.info("Created dataDir: {}", dataDir.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("Created dataDir: {}", dataDir.getAbsolutePath()); + } } return dataDir; } @@ -671,7 +682,7 @@ public static void startTrackingSearchers() { numCloses = SolrIndexSearcher.numCloses.getAndSet(0); if (numOpens != 0 || numCloses != 0) { // NOTE: some other tests don't use this base class and hence won't reset the counts. - log.warn("startTrackingSearchers: numOpens="+numOpens+" numCloses="+numCloses); + log.warn("startTrackingSearchers: numOpens={} numCloses={}", numOpens, numCloses); numOpens = numCloses = 0; } } @@ -857,7 +868,9 @@ public static boolean hasInitException(Class exceptionType) * to log the fact that their setUp process has ended. */ public void postSetUp() { - log.info("####POSTSETUP " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####POSTSETUP {}", getTestName()); + } } @@ -867,7 +880,9 @@ public void postSetUp() { * tearDown method. */ public void preTearDown() { - log.info("####PRETEARDOWN " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####PRETEARDOWN {}", getTestName()); + } } /** @@ -976,7 +991,7 @@ public static void assertQ(String message, SolrQueryRequest req, String... tests tests = allTests; } - String results = h.validateXPath(response, tests); + String results = BaseTestHarness.validateXPath(response, tests); if (null != results) { String msg = "REQUEST FAILED: xpath=" + results @@ -1012,7 +1027,7 @@ public static String JQ(SolrQueryRequest req) throws Exception { failed = false; } finally { if (failed) { - log.error("REQUEST FAILED: " + req.getParamString()); + log.error("REQUEST FAILED: {}", req.getParamString()); } } @@ -1061,7 +1076,7 @@ public static String assertJQ(SolrQueryRequest req, double delta, String... test failed = false; } finally { if (failed) { - log.error("REQUEST FAILED: " + req.getParamString()); + log.error("REQUEST FAILED: {}", req.getParamString()); } } @@ -1074,19 +1089,15 @@ public static String assertJQ(SolrQueryRequest req, double delta, String... test String err = JSONTestUtil.match(response, testJSON, delta); failed = false; if (err != null) { - log.error("query failed JSON validation. error=" + err + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + req.getParamString() + log.error("query failed JSON validation. error={}\n expected ={}\n response = {}\n request = {}" + , err, testJSON, response, req.getParamString() ); throw new RuntimeException(err); } } finally { if (failed) { - log.error("JSON query validation threw an exception." + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + req.getParamString() + log.error("JSON query validation threw an exception.\n expected ={} \n response = {}\n request = {}" + , testJSON, response, req.getParamString() ); } } @@ -1283,6 +1294,7 @@ public static ModifiableSolrParams params(String... params) { return msp; } + @SuppressWarnings({"unchecked", "rawtypes"}) public static Map map(Object... params) { LinkedHashMap ret = new LinkedHashMap(); for (int i=0; iout array with JSON from the doc. * Doesn't currently handle boosts, but does recursively handle child documents */ + @SuppressWarnings({"unchecked"}) public static void json(SolrInputDocument doc, CharArr out) { try { out.append('{'); @@ -1567,7 +1580,9 @@ public static Long addAndGetVersion(SolrInputDocument sdoc, SolrParams params) t params = mparams; } String response = updateJ(jsonAdd(sdoc), params); + @SuppressWarnings({"rawtypes"}) Map rsp = (Map)ObjectBuilder.fromJSON(response); + @SuppressWarnings({"rawtypes"}) List lst = (List)rsp.get("adds"); if (lst == null || lst.size() == 0) return null; return (Long) lst.get(1); @@ -1580,7 +1595,9 @@ public static Long deleteAndGetVersion(String id, SolrParams params) throws Exce params = mparams; } String response = updateJ(jsonDelId(id), params); + @SuppressWarnings({"rawtypes"}) Map rsp = (Map)ObjectBuilder.fromJSON(response); + @SuppressWarnings({"rawtypes"}) List lst = (List)rsp.get("deletes"); if (lst == null || lst.size() == 0) return null; return (Long) lst.get(1); @@ -1593,7 +1610,9 @@ public static Long deleteByQueryAndGetVersion(String q, SolrParams params) throw params = mparams; } String response = updateJ(jsonDelQ(q), params); + @SuppressWarnings({"rawtypes"}) Map rsp = (Map)ObjectBuilder.fromJSON(response); + @SuppressWarnings({"rawtypes"}) List lst = (List)rsp.get("deleteByQuery"); if (lst == null || lst.size() == 0) return null; return (Long) lst.get(1); @@ -1604,8 +1623,9 @@ public static Long deleteByQueryAndGetVersion(String q, SolrParams params) throw ///////////////////////////////////////////////////////////////////////////////////// public abstract static class Vals { + @SuppressWarnings({"rawtypes"}) public abstract Comparable get(); - public String toJSON(Comparable val) { + public String toJSON(@SuppressWarnings({"rawtypes"})Comparable val) { return JSONUtil.toJSON(val); } @@ -1632,6 +1652,7 @@ public int getInt() { } @Override + @SuppressWarnings({"rawtypes"}) public Comparable get() { return getInt(); } @@ -1658,6 +1679,7 @@ public int getInt() { } @Override + @SuppressWarnings({"rawtypes"}) public Comparable get() { return getInt(); } @@ -1677,6 +1699,7 @@ public float getFloat() { } @Override + @SuppressWarnings({"rawtypes"}) public Comparable get() { return getFloat(); } @@ -1685,6 +1708,7 @@ public Comparable get() { public static class BVal extends Vals { @Override + @SuppressWarnings({"rawtypes"}) public Comparable get() { return random().nextBoolean(); } @@ -1708,6 +1732,7 @@ public SVal(char start, char end, int minLength, int maxLength) { } @Override + @SuppressWarnings({"rawtypes"}) public Comparable get() { char[] arr = new char[between(minLength,maxLength)]; for (int i=0; i fields; @@ -1745,12 +1771,14 @@ public boolean equals(Object o) { } @Override + @SuppressWarnings({"unchecked"}) public int compareTo(Object o) { if (!(o instanceof Doc)) return this.getClass().hashCode() - o.getClass().hashCode(); Doc other = (Doc)o; return this.id.compareTo(other.id); } + @SuppressWarnings({"rawtypes"}) public List getValues(String field) { for (Fld fld : fields) { if (fld.ftype.fname.equals(field)) return fld.vals; @@ -1758,6 +1786,7 @@ public List getValues(String field) { return null; } + @SuppressWarnings({"rawtypes"}) public Comparable getFirstValue(String field) { List vals = getValues(field); return vals==null || vals.size()==0 ? null : vals.get(0); @@ -1780,6 +1809,7 @@ public Map toObject(IndexSchema schema) { public static class Fld { public FldType ftype; + @SuppressWarnings({"rawtypes"}) public List vals; @Override public String toString() { @@ -1802,10 +1832,12 @@ public FldType(String fname, IVals numValues, Vals vals) { this.vals = vals; } + @SuppressWarnings({"rawtypes"}) public Comparable createValue() { return vals.get(); } + @SuppressWarnings({"rawtypes"}) public List createValues() { int nVals = numValues.getInt(); if (nVals <= 0) return null; @@ -1816,6 +1848,7 @@ public List createValues() { } public Fld createField() { + @SuppressWarnings({"rawtypes"}) List vals = createValues(); if (vals == null) return null; @@ -1837,6 +1870,7 @@ public static void assertResponseValues(SolrResponseBase rsp, Object... assertio } }); } + @SuppressWarnings({"rawtypes"}) public Map indexDocs(List descriptor, Map model, int nDocs) throws Exception { if (model == null) { model = new LinkedHashMap<>(); @@ -1881,6 +1915,7 @@ public Map indexDocs(List descriptor, Map docList = (List)response; int order = 0; for (Map doc : docList) { @@ -1957,6 +1992,7 @@ public static Comparator createComparator(final String field, final boolean } return new Comparator() { + @SuppressWarnings({"rawtypes"}) private Comparable zeroVal(Comparable template) { if (template == null) return null; if (template instanceof String) return null; // fast-path for string @@ -1971,8 +2007,11 @@ private Comparable zeroVal(Comparable template) { } @Override + @SuppressWarnings({"unchecked"}) public int compare(Doc o1, Doc o2) { + @SuppressWarnings({"rawtypes"}) Comparable v1 = o1.getFirstValue(field); + @SuppressWarnings({"rawtypes"}) Comparable v2 = o2.getFirstValue(field); v1 = v1 == null ? zeroVal(v2) : v1; @@ -2026,7 +2065,7 @@ public static String toJSON(Doc doc) { out.append('['); } boolean firstVal = true; - for (Comparable val : fld.vals) { + for (@SuppressWarnings({"rawtypes"})Comparable val : fld.vals) { if (firstVal) firstVal=false; else out.append(','); out.append(JSONUtil.toJSON(val)); @@ -2043,6 +2082,7 @@ public static String toJSON(Doc doc) { } /** Return a Map from field value to a list of document ids */ + @SuppressWarnings({"rawtypes"}) public Map> invertField(Map model, String field) { Map> value_to_id = new HashMap<>(); @@ -2335,12 +2375,16 @@ public boolean compareSolrInputDocument(Object expected, Object actual) { if (!(sdoc2.get(key2).getFirstValue() instanceof SolrInputDocument)) { return false; } + @SuppressWarnings({"rawtypes"}) Collection col1 = (Collection) val1; + @SuppressWarnings({"rawtypes"}) Collection col2 = (Collection) val2; if (col1.size() != col2.size()) { return false; } + @SuppressWarnings({"unchecked"}) Iterator colIter1 = col1.iterator(); + @SuppressWarnings({"unchecked"}) Iterator colIter2 = col2.iterator(); while (colIter1.hasNext()) { if (!compareSolrInputDocument(colIter1.next(), colIter2.next())) { @@ -2543,7 +2587,7 @@ public static CloudSolrClient getCloudSolrClient(String zkHost, boolean shardLea } public static CloudSolrClientBuilder newCloudSolrClient(String zkHost) { - return (CloudSolrClientBuilder) new CloudSolrClientBuilder(Collections.singletonList(zkHost), Optional.empty()); + return new CloudSolrClientBuilder(Collections.singletonList(zkHost), Optional.empty()); } /** @@ -2907,6 +2951,7 @@ protected static void systemClearPropertySolrDisableShardsWhitelist() { System.clearProperty(SYSTEM_PROPERTY_SOLR_DISABLE_SHARDS_WHITELIST); } + @SuppressWarnings({"unchecked"}) protected T pickRandom(T... options) { return options[random().nextInt(options.length)]; } @@ -2956,7 +3001,9 @@ public static void randomizeUpdateLogImpl() { } else { System.setProperty(UPDATELOG_SYSPROP,"solr.UpdateLog"); } - log.info("updateLog impl={}", System.getProperty(UPDATELOG_SYSPROP)); + if (log.isInfoEnabled()) { + log.info("updateLog impl={}", System.getProperty(UPDATELOG_SYSPROP)); + } } /** @@ -2969,6 +3016,7 @@ public static void randomizeUpdateLogImpl() { * @lucene.experimental * @lucene.internal */ + @SuppressWarnings({"rawtypes"}) private static void randomizeNumericTypesProperties() { final boolean useDV = random().nextBoolean(); @@ -2982,7 +3030,7 @@ private static void randomizeNumericTypesProperties() { if (RandomizedContext.current().getTargetClass().isAnnotationPresent(SolrTestCaseJ4.SuppressPointFields.class) || (! usePoints)) { - log.info("Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP="+useDV); + log.info("Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP={}", useDV); org.apache.solr.schema.PointField.TEST_HACK_IGNORE_USELESS_TRIEFIELD_ARGS = false; private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Integer.class, "solr.TrieIntField"); @@ -2994,7 +3042,7 @@ private static void randomizeNumericTypesProperties() { System.setProperty(NUMERIC_POINTS_SYSPROP, "false"); } else { - log.info("Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP="+useDV); + log.info("Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP={}", useDV); org.apache.solr.schema.PointField.TEST_HACK_IGNORE_USELESS_TRIEFIELD_ARGS = true; private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Integer.class, "solr.IntPointField"); @@ -3032,7 +3080,7 @@ private static void clearNumericTypesProperties() { org.apache.solr.schema.PointField.TEST_HACK_IGNORE_USELESS_TRIEFIELD_ARGS = false; System.clearProperty("solr.tests.numeric.points"); System.clearProperty("solr.tests.numeric.points.dv"); - for (Class c : RANDOMIZED_NUMERIC_FIELDTYPES.keySet()) { + for (@SuppressWarnings({"rawtypes"})Class c : RANDOMIZED_NUMERIC_FIELDTYPES.keySet()) { System.clearProperty("solr.tests." + c.getSimpleName() + "FieldType"); } private_RANDOMIZED_NUMERIC_FIELDTYPES.clear(); @@ -3048,6 +3096,7 @@ private static SolrDocument toSolrDoc(SolrInputDocument sid) { private static boolean isChildDoc(Object o) { if(o instanceof Collection) { + @SuppressWarnings({"rawtypes"}) Collection col = (Collection) o; if(col.size() == 0) { return false; @@ -3057,6 +3106,7 @@ private static boolean isChildDoc(Object o) { return o instanceof SolrInputDocument; } + @SuppressWarnings({"rawtypes"}) private static final Map private_RANDOMIZED_NUMERIC_FIELDTYPES = new HashMap<>(); /** @@ -3068,6 +3118,7 @@ private static boolean isChildDoc(Object o) { * * @see #randomizeNumericTypesProperties */ + @SuppressWarnings({"rawtypes"}) protected static final Map RANDOMIZED_NUMERIC_FIELDTYPES = Collections.unmodifiableMap(private_RANDOMIZED_NUMERIC_FIELDTYPES); diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java index a58c39bc826c..5bf7bbbfaf2f 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java @@ -204,7 +204,8 @@ public static void waitForRecoveriesToFinish(String collection, public static void waitForCollectionToDisappear(String collection, ZkStateReader zkStateReader, boolean failOnTimeout, int timeoutSeconds) throws Exception { - log.info("Wait for collection to disappear - collection: " + collection + " failOnTimeout:" + failOnTimeout + " timeout (sec):" + timeoutSeconds); + log.info("Wait for collection to disappear - collection: {} failOnTimeout:{} timeout (sec):{}" + , collection, failOnTimeout, timeoutSeconds); zkStateReader.waitForState(collection, timeoutSeconds, TimeUnit.SECONDS, (docCollection) -> docCollection == null); log.info("Collection has disappeared - collection:{}", collection); @@ -221,7 +222,10 @@ static void waitForNewLeader(CloudSolrClient cloudClient, String shardName, Repl DocCollection coll = clusterState.getCollection("collection1"); Slice slice = coll.getSlice(shardName); if (slice.getLeader() != null && !slice.getLeader().equals(oldLeader) && slice.getLeader().getState() == Replica.State.ACTIVE) { - log.info("Old leader {}, new leader {}. New leader got elected in {} ms", oldLeader, slice.getLeader(),timeOut.timeElapsed(MILLISECONDS) ); + if (log.isInfoEnabled()) { + log.info("Old leader {}, new leader {}. New leader got elected in {} ms" + , oldLeader, slice.getLeader(), timeOut.timeElapsed(MILLISECONDS)); + } break; } @@ -240,7 +244,10 @@ static void waitForNewLeader(CloudSolrClient cloudClient, String shardName, Repl Slice slice = docCollection.getSlice(shardName); if (slice != null && slice.getLeader() != null && !slice.getLeader().equals(oldLeader) && slice.getLeader().getState() == Replica.State.ACTIVE) { - log.info("Old leader {}, new leader {}. New leader got elected in {} ms", oldLeader, slice.getLeader(), timeOut.timeElapsed(MILLISECONDS) ); + if (log.isInfoEnabled()) { + log.info("Old leader {}, new leader {}. New leader got elected in {} ms" + , oldLeader, slice.getLeader(), timeOut.timeElapsed(MILLISECONDS)); + } return true; } return false; diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java index 1361af93ebf5..2160681bc099 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java @@ -383,20 +383,12 @@ public static void waitForCollection(ZkStateReader reader, String collection, in } } - protected String defaultStateFormat = String.valueOf( 1 + random().nextInt(2)); - - protected String getStateFormat() { - String stateFormat = System.getProperty("tests.solr.stateFormat", null); - if (stateFormat != null) { - defaultStateFormat = stateFormat; - } - return defaultStateFormat; // random - } - protected List createJettys(int numJettys) throws Exception { List jettys = Collections.synchronizedList(new ArrayList<>()); List clients = Collections.synchronizedList(new ArrayList<>()); + @SuppressWarnings({"rawtypes"}) List createReplicaRequests = Collections.synchronizedList(new ArrayList<>()); + @SuppressWarnings({"rawtypes"}) List createPullReplicaRequests = Collections.synchronizedList(new ArrayList<>()); StringBuilder sb = new StringBuilder(); @@ -408,7 +400,6 @@ protected List createJettys(int numJettys) throws Exception { // jetty instances are started) assertEquals(0, CollectionAdminRequest .createCollection(DEFAULT_COLLECTION, "conf1", sliceCount, 1) // not real rep factor! - .setStateFormat(Integer.parseInt(getStateFormat())) .setCreateNodeSet("") // empty node set prevents creation of cores .process(cloudClient).getStatus()); @@ -419,8 +410,11 @@ protected List createJettys(int numJettys) throws Exception { ExecutorService customThreadPool = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("closeThreadPool")); int numOtherReplicas = numJettys - getPullReplicaCount() * sliceCount; - - log.info("Creating jetty instances pullReplicaCount={} numOtherReplicas={}", getPullReplicaCount(), numOtherReplicas); + + if (log.isInfoEnabled()) { + log.info("Creating jetty instances pullReplicaCount={} numOtherReplicas={}" + , getPullReplicaCount(), numOtherReplicas); + } int addedReplicas = 0; for (int i = 1; i <= numJettys; i++) { @@ -435,7 +429,10 @@ protected List createJettys(int numJettys) throws Exception { if (numOtherReplicas > 0) { numOtherReplicas--; if (useTlogReplicas()) { - log.info("create jetty {} in directory {} of type {} in shard {}", i, jettyDir, Replica.Type.TLOG, ((currentI % sliceCount) + 1)); + if (log.isInfoEnabled()) { + log.info("create jetty {} in directory {} of type {} in shard {}" + , i, jettyDir, Replica.Type.TLOG, ((currentI % sliceCount) + 1)); // logOk + } customThreadPool.submit(() -> { try { JettySolrRunner j = createJetty(jettyDir, useJettyDataDir ? getDataDir(testDir + "/jetty" @@ -464,7 +461,10 @@ protected List createJettys(int numJettys) throws Exception { addedReplicas++; } else { - log.info("create jetty {} in directory {} of type {} for shard{}", i, jettyDir, Replica.Type.NRT, ((currentI % sliceCount) + 1)); + if (log.isInfoEnabled()) { + log.info("create jetty {} in directory {} of type {} for shard{}" + , i, jettyDir, Replica.Type.NRT, ((currentI % sliceCount) + 1)); // logOk + } customThreadPool.submit(() -> { try { @@ -492,7 +492,7 @@ protected List createJettys(int numJettys) throws Exception { addedReplicas++; } } else { - log.info("create jetty {} in directory {} of type {} for shard{}", i, jettyDir, Replica.Type.PULL, ((currentI % sliceCount) + 1)); + log.info("create jetty {} in directory {} of type {} for shard{}", i, jettyDir, Replica.Type.PULL, ((currentI % sliceCount) + 1)); // logOk customThreadPool.submit(() -> { try { JettySolrRunner j = createJetty(jettyDir, useJettyDataDir ? getDataDir(testDir + "/jetty" @@ -524,7 +524,7 @@ protected List createJettys(int numJettys) throws Exception { customThreadPool = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("createReplicaRequests")); - for (CollectionAdminRequest r : createReplicaRequests) { + for (@SuppressWarnings({"rawtypes"})CollectionAdminRequest r : createReplicaRequests) { customThreadPool.submit(() -> { CollectionAdminResponse response; try { @@ -542,7 +542,7 @@ protected List createJettys(int numJettys) throws Exception { customThreadPool = ExecutorUtil .newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("createPullReplicaRequests")); - for (CollectionAdminRequest r : createPullReplicaRequests) { + for (@SuppressWarnings({"rawtypes"})CollectionAdminRequest r : createPullReplicaRequests) { customThreadPool.submit(() -> { CollectionAdminResponse response; try { @@ -588,7 +588,9 @@ protected List createJettys(int numJettys) throws Exception { } protected void waitForLiveNode(JettySolrRunner j) throws InterruptedException, TimeoutException { - log.info("waitForLiveNode: {}", j.getNodeName()); + if (log.isInfoEnabled()) { + log.info("waitForLiveNode: {}", j.getNodeName()); + } cloudClient.getZkStateReader().waitForLiveNodes(30, TimeUnit.SECONDS, SolrCloudTestCase.containsLiveNode(j.getNodeName())); } @@ -945,10 +947,10 @@ protected static int sendDocsWithRetry(CloudSolrClient cloudClient, String colle } catch (Exception exc) { Throwable rootCause = SolrException.getRootCause(exc); if (++numRetries <= maxRetries) { - log.warn("ERROR: " + rootCause + " ... Sleeping for " + waitBeforeRetry + " seconds before re-try ..."); + log.warn("ERROR: {} ... Sleeping for {} seconds before re-try ...", rootCause, waitBeforeRetry); Thread.sleep(waitBeforeRetry * 1000L); } else { - log.error("No more retries available! Add batch failed due to: " + rootCause); + log.error("No more retries available! Add batch failed due to: {}", rootCause); throw exc; } } @@ -1058,7 +1060,7 @@ protected void waitForRecoveriesToFinish(String collection, boolean verbose) protected void waitForRecoveriesToFinish(boolean verbose, long timeoutSeconds) throws Exception { ZkStateReader zkStateReader = cloudClient.getZkStateReader(); - super.waitForRecoveriesToFinish(DEFAULT_COLLECTION, zkStateReader, verbose, true, timeoutSeconds); + waitForRecoveriesToFinish(DEFAULT_COLLECTION, zkStateReader, verbose, true, timeoutSeconds); } protected void checkQueries() throws Exception { @@ -1418,7 +1420,7 @@ protected void randomlyEnableAutoSoftCommit() { } protected void enableAutoSoftCommit(int time) { - log.info("Turning on auto soft commit: " + time); + log.info("Turning on auto soft commit: {}", time); for (List jettyList : shardToJetty.values()) { for (CloudJettyRunner jetty : jettyList) { CoreContainer cores = jetty.jetty.getCoreContainer(); @@ -1801,10 +1803,7 @@ protected CollectionAdminResponse createCollection(Map> co collectionInfos.put(collectionName, list); } params.set("name", collectionName); - if ("1".equals(getStateFormat()) ) { - log.info("Creating collection with stateFormat=1: " + collectionName); - params.set(DocCollection.STATE_FORMAT, "1"); - } + @SuppressWarnings({"rawtypes"}) SolrRequest request = new QueryRequest(params); request.setPath("/admin/collections"); @@ -1981,8 +1980,10 @@ protected CloudSolrClient getCommonCloudSolrClient() { random().nextBoolean(), 5000, 120000); commonCloudSolrClient.setDefaultCollection(DEFAULT_COLLECTION); commonCloudSolrClient.connect(); - log.info("Created commonCloudSolrClient with updatesToLeaders={} and parallelUpdates={}", - commonCloudSolrClient.isUpdatesToLeaders(), commonCloudSolrClient.isParallelUpdates()); + if (log.isInfoEnabled()) { + log.info("Created commonCloudSolrClient with updatesToLeaders={} and parallelUpdates={}", + commonCloudSolrClient.isUpdatesToLeaders(), commonCloudSolrClient.isParallelUpdates()); + } } } return commonCloudSolrClient; @@ -2119,14 +2120,18 @@ protected List ensureAllReplicasAreActive(String testCollectionName, St leader = shard.getLeader(); assertNotNull(leader); - log.info("Found "+replicas.size()+" replicas and leader on "+ - leader.getNodeName()+" for "+shardId+" in "+testCollectionName); + if (log.isInfoEnabled()) { + log.info("Found {} replicas and leader on {} for {} in {}" + , replicas.size(), leader.getNodeName(), shardId, testCollectionName); + } // ensure all replicas are "active" and identify the non-leader replica for (Replica replica : replicas) { if (!zkShardTerms.canBecomeLeader(replica.getName()) || replica.getState() != Replica.State.ACTIVE) { - log.info("Replica {} is currently {}", replica.getName(), replica.getState()); + if (log.isInfoEnabled()) { + log.info("Replica {} is currently {}", replica.getName(), replica.getState()); + } allReplicasUp = false; } @@ -2150,7 +2155,9 @@ protected List ensureAllReplicasAreActive(String testCollectionName, St if (notLeaders.isEmpty()) fail("Didn't isolate any replicas that are not the leader! ClusterState: " + printClusterStateInfo()); - log.info("Took {} ms to see all replicas become active.", timer.getTime()); + if (log.isInfoEnabled()) { + log.info("Took {} ms to see all replicas become active.", timer.getTime()); + } List replicas = new ArrayList<>(notLeaders.values()); return replicas; @@ -2186,7 +2193,7 @@ protected boolean reloadCollection(Replica replica, String testCollectionName) t Thread.sleep(1000); // send reload command for the collection - log.info("Sending RELOAD command for "+testCollectionName); + log.info("Sending RELOAD command for {}", testCollectionName); ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.RELOAD.toString()); params.set("name", testCollectionName); @@ -2231,7 +2238,7 @@ protected void logReplicaTypesReplicationInfo(String collectionName, ZkStateRead logReplicationDetails(r, builder); } } - log.info("Summary of the cluster: " + builder.toString()); + log.info("Summary of the cluster: {}", builder); } protected void waitForReplicationFromReplicas(String collectionName, ZkStateReader zkStateReader, TimeOut timeout) throws KeeperException, InterruptedException, IOException { @@ -2265,7 +2272,10 @@ protected void waitForReplicationFromReplicas(String collectionName, ZkStateRead while (true) { long replicaIndexVersion = getIndexVersion(pullReplica); if (leaderIndexVersion == replicaIndexVersion) { - log.info("Leader replica's version ({}) in sync with replica({}): {} == {}", leader.getName(), pullReplica.getName(), leaderIndexVersion, replicaIndexVersion); + if (log.isInfoEnabled()) { + log.info("Leader replica's version ({}) in sync with replica({}): {} == {}" + , leader.getName(), pullReplica.getName(), leaderIndexVersion, replicaIndexVersion); + } // Make sure the host is serving the correct version try (SolrCore core = containers.get(pullReplica.getNodeName()).getCore(pullReplica.getCoreName())) { @@ -2276,7 +2286,10 @@ protected void waitForReplicationFromReplicas(String collectionName, ZkStateRead if (Long.parseLong(servingVersion) == replicaIndexVersion) { break; } else { - log.info("Replica {} has the correct version replicated, but the searcher is not ready yet. Replicated version: {}, Serving version: {}", pullReplica.getName(), replicaIndexVersion, servingVersion); + if (log.isInfoEnabled()) { + log.info("Replica {} has the correct version replicated, but the searcher is not ready yet. Replicated version: {}, Serving version: {}" + , pullReplica.getName(), replicaIndexVersion, servingVersion); + } } } finally { if (ref != null) ref.decref(); @@ -2288,9 +2301,15 @@ protected void waitForReplicationFromReplicas(String collectionName, ZkStateRead fail(String.format(Locale.ROOT, "Timed out waiting for replica %s (%d) to replicate from leader %s (%d)", pullReplica.getName(), replicaIndexVersion, leader.getName(), leaderIndexVersion)); } if (leaderIndexVersion > replicaIndexVersion) { - log.info("{} version is {} and leader's is {}, will wait for replication", pullReplica.getName(), replicaIndexVersion, leaderIndexVersion); + if (log.isInfoEnabled()) { + log.info("{} version is {} and leader's is {}, will wait for replication" + , pullReplica.getName(), replicaIndexVersion, leaderIndexVersion); + } } else { - log.info("Leader replica's version ({}) is lower than pull replica({}): {} < {}", leader.getName(), pullReplica.getName(), leaderIndexVersion, replicaIndexVersion); + if (log.isInfoEnabled()) { + log.info("Leader replica's version ({}) is lower than pull replica({}): {} < {}" + , leader.getName(), pullReplica.getName(), leaderIndexVersion, replicaIndexVersion); + } } } Thread.sleep(1000); @@ -2387,6 +2406,7 @@ static RequestStatusState getRequestState(int requestId, SolrClient client) thro static RequestStatusState getRequestState(String requestId, SolrClient client) throws IOException, SolrServerException { CollectionAdminResponse response = getStatusResponse(requestId, client); + @SuppressWarnings({"rawtypes"}) NamedList innerResponse = (NamedList) response.getResponse().get("status"); return RequestStatusState.fromKey((String) innerResponse.get("state")); } diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java index ee489d041411..7dfdc36d7ce5 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java @@ -518,11 +518,11 @@ public void run() { } public static void monkeyLog(String msg) { - log.info("monkey: " + msg); + log.info("monkey: {}", msg); } public static void monkeyLog(String msg, Object...logParams) { - log.info("monkey: " + msg, logParams); + log.info("monkey: {}", msg, logParams); } public void stopTheMonkey() { @@ -654,7 +654,7 @@ private static void logCollectionStateSummary(String collectionName, ZkStateRead for (Slice slice:docCollection.getSlices()) { builder.append(slice.getName()).append(": {"); for (Replica replica:slice.getReplicas()) { - log.info(replica.toString()); + log.info("{}", replica); java.util.regex.Matcher m = portPattern.matcher(replica.getBaseUrl()); m.find(); String jettyPort = m.group(1); diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/CloudInspectUtil.java b/solr/test-framework/src/java/org/apache/solr/cloud/CloudInspectUtil.java index a72f06bb7abc..bc3b1086e86b 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/CloudInspectUtil.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/CloudInspectUtil.java @@ -48,23 +48,28 @@ public class CloudInspectUtil { * @param bDeleteFails null or list of the ids of deletes that failed for b * @return true if the difference in a and b is legal */ + @SuppressWarnings({"unchecked"}) public static boolean checkIfDiffIsLegal(SolrDocumentList a, SolrDocumentList b, String aName, String bName, Set bAddFails, Set bDeleteFails) { boolean legal = true; + @SuppressWarnings({"rawtypes"}) Set setA = new HashSet<>(); for (SolrDocument sdoc : a) { - setA.add(new HashMap(sdoc)); + setA.add(new HashMap<>(sdoc)); } + @SuppressWarnings({"rawtypes"}) Set setB = new HashSet<>(); for (SolrDocument sdoc : b) { - setB.add(new HashMap(sdoc)); + setB.add(new HashMap<>(sdoc)); } + @SuppressWarnings({"rawtypes"}) Set onlyInA = new HashSet<>(setA); onlyInA.removeAll(setB); + @SuppressWarnings({"rawtypes"}) Set onlyInB = new HashSet<>(setB); onlyInB.removeAll(setA); @@ -75,7 +80,7 @@ public static boolean checkIfDiffIsLegal(SolrDocumentList a, System.err.println("###### Only in " + aName + ": " + onlyInA); System.err.println("###### Only in " + bName + ": " + onlyInB); - for (Map doc : onlyInA) { + for (@SuppressWarnings({"rawtypes"})Map doc : onlyInA) { if (bAddFails == null || !bAddFails.contains(doc.get("id"))) { legal = false; // System.err.println("###### Only in " + aName + ": " + doc.get("id")); @@ -86,7 +91,7 @@ public static boolean checkIfDiffIsLegal(SolrDocumentList a, } } - for (Map doc : onlyInB) { + for (@SuppressWarnings({"rawtypes"})Map doc : onlyInB) { if (bDeleteFails == null || !bDeleteFails.contains(doc.get("id"))) { legal = false; // System.err.println("###### Only in " + bName + ": " + doc.get("id")); @@ -109,6 +114,7 @@ public static boolean checkIfDiffIsLegal(SolrDocumentList a, * @param bName label for the second list * @return the documents only in list a */ + @SuppressWarnings({"unchecked", "rawtypes"}) public static Set showDiff(SolrDocumentList a, SolrDocumentList b, String aName, String bName) { System.err.println("######" + aName + ": " + toStr(a, 10)); @@ -187,12 +193,12 @@ public static boolean compareResults(SolrClient controlClient, SolrClient cloudC q = SolrTestCaseJ4.params("q", "*:*", "rows", "100000", "fl", "id", "tests", "checkShardConsistency(vsControl)/getIds"); // add a tag to aid in debugging via logs controlDocList = controlClient.query(q).getResults(); if (controlDocs != controlDocList.getNumFound()) { - log.error("Something changed! control now " + controlDocList.getNumFound()); + log.error("Something changed! control now {}", controlDocList.getNumFound()); } cloudDocList = cloudClient.query(q).getResults(); if (cloudClientDocs != cloudDocList.getNumFound()) { - log.error("Something changed! cloudClient now " + cloudDocList.getNumFound()); + log.error("Something changed! cloudClient now {}", cloudDocList.getNumFound()); } if (controlDocs != cloudClientDocs && (addFails != null || deleteFails != null)) { @@ -203,6 +209,7 @@ public static boolean compareResults(SolrClient controlClient, SolrClient cloudC } } + @SuppressWarnings({"rawtypes"}) Set differences = CloudInspectUtil.showDiff(controlDocList, cloudDocList, "controlDocList", "cloudDocList"); @@ -213,7 +220,7 @@ public static boolean compareResults(SolrClient controlClient, SolrClient cloudC // use filter() to allow being parsed as 'terms in set' query instead of a (weighted/scored) // BooleanQuery so we don't trip too many boolean clauses StringBuilder ids = new StringBuilder("filter(id:("); - for (Map doc : differences) { + for (@SuppressWarnings({"rawtypes"})Map doc : differences) { ids.append(" ").append(doc.get("id")); foundId = true; } @@ -230,7 +237,7 @@ public static boolean compareResults(SolrClient controlClient, SolrClient cloudC SolrDocumentList a = controlClient.query(q, SolrRequest.METHOD.POST).getResults(); SolrDocumentList b = cloudClient.query(q, SolrRequest.METHOD.POST).getResults(); - log.error("controlClient :" + a + "\n\tcloudClient :" + b); + log.error("controlClient :{}\n\tcloudClient :{}", a, b); } } catch (Exception e) { // swallow any exceptions, this is just useful for producing debug output, diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/ConfigRequest.java b/solr/test-framework/src/java/org/apache/solr/cloud/ConfigRequest.java index 12b5a37df56b..6b4c61770d04 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/ConfigRequest.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/ConfigRequest.java @@ -29,6 +29,7 @@ * e.g. to add custom components, handlers, parsers, etc. to an otherwise * generic configset. */ +@SuppressWarnings({"rawtypes"}) public class ConfigRequest extends SolrRequest { protected final String message; diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/IpTables.java b/solr/test-framework/src/java/org/apache/solr/cloud/IpTables.java index 4ddbd27f5983..7e73f6acc569 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/IpTables.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/IpTables.java @@ -37,7 +37,7 @@ public class IpTables { public static void blockPort(int port) throws IOException, InterruptedException { if (ENABLED) { - log.info("Block port with iptables: " + port); + log.info("Block port with iptables: {}", port); BLOCK_PORTS.add(port); runCmd(("iptables -A INPUT -p tcp --dport " + port + " -j DROP") .split("\\s")); @@ -48,7 +48,7 @@ public static void blockPort(int port) throws IOException, InterruptedException public static void unblockPort(int port) throws IOException, InterruptedException { if (ENABLED && BLOCK_PORTS.contains(port)) { - log.info("Unblock port with iptables: " + port); + log.info("Unblock port with iptables: {}", port); runCmd(("iptables -D INPUT -p tcp --dport " + port + " -j DROP") .split("\\s")); runCmd(("iptables -D OUTPUT -p tcp --dport " + port + " -j DROP") diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java index 4df46dfcf5c7..f65374f35780 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java @@ -355,7 +355,9 @@ private void waitForAllNodes(int numServers, int timeoutSeconds) throws IOExcept public void waitForNode(JettySolrRunner jetty, int timeoutSeconds) throws IOException, InterruptedException, TimeoutException { - log.info("waitForNode: {}", jetty.getNodeName()); + if (log.isInfoEnabled()) { + log.info("waitForNode: {}", jetty.getNodeName()); + } ZkStateReader reader = getSolrClient().getZkStateReader(); @@ -693,7 +695,9 @@ public void expireZkSession(JettySolrRunner jetty) { zkClient.getSolrZooKeeper().closeCnxn(); long sessionId = zkClient.getSolrZooKeeper().getSessionId(); zkServer.expire(sessionId); - log.info("Expired zookeeper session {} from node {}", sessionId, jetty.getBaseUrl()); + if (log.isInfoEnabled()) { + log.info("Expired zookeeper session {} from node {}", sessionId, jetty.getBaseUrl()); + } } } @@ -794,7 +798,9 @@ public static CollectionStatePredicate expectedShardsAndActiveReplicas(int expec } public void waitForJettyToStop(JettySolrRunner runner) throws TimeoutException { - log.info("waitForJettyToStop: {}", runner.getLocalPort()); + if (log.isInfoEnabled()) { + log.info("waitForJettyToStop: {}", runner.getLocalPort()); + } TimeOut timeout = new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME); while(!timeout.hasTimedOut()) { if (runner.isStopped()) { diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudAuthTestCase.java b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudAuthTestCase.java index 9485c80a438e..2d23857e4a79 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudAuthTestCase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudAuthTestCase.java @@ -64,6 +64,7 @@ public class SolrCloudAuthTestCase extends SolrCloudTestCase { private static final List AUTH_METRICS_TIMER_KEYS = Collections.singletonList("requestTimes"); private static final String METRICS_PREFIX_PKI = "SECURITY./authentication/pki."; private static final String METRICS_PREFIX = "SECURITY./authentication."; + @SuppressWarnings({"rawtypes"}) public static final Predicate NOT_NULL_PREDICATE = o -> o != null; private static final List AUDIT_METRICS_KEYS = Arrays.asList("count"); private static final List AUTH_METRICS_TO_COMPARE = Arrays.asList("requests", "authenticated", "passThrough", "failWrongCredentials", "failMissingCredentials", "errors"); @@ -188,6 +189,7 @@ protected void verifySecurityStatus(HttpClient cl, String url, String objPath, } + @SuppressWarnings({"unchecked"}) private static void verifySecurityStatus(HttpClient cl, String url, String objPath, Object expected, int count, String authHeader) throws IOException, InterruptedException { boolean success = false; @@ -198,6 +200,7 @@ private static void verifySecurityStatus(HttpClient cl, String url, String objPa if (authHeader != null) setAuthorizationHeader(get, authHeader); HttpResponse rsp = cl.execute(get); s = EntityUtils.toString(rsp.getEntity()); + @SuppressWarnings({"rawtypes"}) Map m = null; try { m = (Map) Utils.fromJSONString(s); @@ -207,6 +210,7 @@ private static void verifySecurityStatus(HttpClient cl, String url, String objPa Utils.consumeFully(rsp.getEntity()); Object actual = Utils.getObjectByPath(m, true, hierarchy); if (expected instanceof Predicate) { + @SuppressWarnings({"rawtypes"}) Predicate predicate = (Predicate) expected; if (predicate.test(actual)) { success = true; diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java index fccb4abdeec8..b646e2e82f65 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java @@ -234,11 +234,13 @@ public MiniSolrCloudCluster build() throws Exception { } public Builder withDefaultClusterProperty(String key, String value) { + @SuppressWarnings({"unchecked"}) HashMap defaults = (HashMap) this.clusterProperties.get(CollectionAdminParams.DEFAULTS); if (defaults == null) { defaults = new HashMap<>(); this.clusterProperties.put(CollectionAdminParams.DEFAULTS, defaults); } + @SuppressWarnings({"unchecked"}) HashMap cluster = (HashMap) defaults.get(CollectionAdminParams.CLUSTER); if (cluster == null) { cluster = new HashMap<>(); @@ -347,7 +349,9 @@ public static CollectionStatePredicate activeClusterShape(int expectedShards, in return (liveNodes, collectionState) -> { if (collectionState == null) return false; - log.info("active slice count: " + collectionState.getActiveSlices().size() + " expected:" + expectedShards); + if (log.isInfoEnabled()) { + log.info("active slice count: {} expected: {}", collectionState.getActiveSlices().size(), expectedShards); + } if (collectionState.getActiveSlices().size() != expectedShards) return false; return compareActiveReplicaCountsForShards(expectedReplicas, liveNodes, collectionState); @@ -389,7 +393,7 @@ private static boolean compareActiveReplicaCountsForShards(int expectedReplicas, } } - log.info("active replica count: " + activeReplicas + " expected replica count: " + expectedReplicas); + log.info("active replica count: {} expected replica count: {}", activeReplicas, expectedReplicas); return activeReplicas == expectedReplicas; @@ -445,6 +449,7 @@ protected static CoreStatus getCoreStatus(Replica replica) throws IOException, S } } + @SuppressWarnings({"rawtypes"}) protected NamedList waitForResponse(Predicate predicate, SolrRequest request, int intervalInMillis, int numRetries, String messageOnFail) { log.info("waitForResponse: {}", request); int i = 0; diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/StoppableCommitThread.java b/solr/test-framework/src/java/org/apache/solr/cloud/StoppableCommitThread.java index 4d60b4e06e1d..17b3eb6cd698 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/StoppableCommitThread.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/StoppableCommitThread.java @@ -58,7 +58,9 @@ public void run() { break; } } - log.info("StoppableCommitThread finished. Committed {} times. Failed {} times.", numCommits.get(), numFails.get()); + if (log.isInfoEnabled()) { + log.info("StoppableCommitThread finished. Committed {} times. Failed {} times.", numCommits.get(), numFails.get()); + } } @Override diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/StoppableSearchThread.java b/solr/test-framework/src/java/org/apache/solr/cloud/StoppableSearchThread.java index c579f22def3d..7e55231dd43e 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/StoppableSearchThread.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/StoppableSearchThread.java @@ -68,7 +68,7 @@ public void run() { } } - log.info("num searches done:" + numSearches + " with " + queryFails + " fails"); + log.info("num searches done: {} with {} fails", numSearches, queryFails); } @Override diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java b/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java index 7195cd1f4629..6f684998d266 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java @@ -21,7 +21,6 @@ import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.cloud.ZkNodeProps; -import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.IOUtils; import org.apache.solr.common.util.ObjectReleaseTracker; import org.apache.solr.common.util.TimeSource; @@ -178,7 +177,9 @@ public void updateForWatch(String key, Watcher watcher) { } public void updateForFire(WatchedEvent event) { - log.debug("Watch fired: {}: {}", desc, event.getPath()); + if (log.isDebugEnabled()) { + log.debug("Watch fired: {}: {}", desc, event.getPath()); + } counters.decrementAndGet(event.getPath()); } @@ -559,7 +560,7 @@ public void setClientPort(int clientPort) { } else { this.clientPortAddress = new InetSocketAddress(clientPort); } - log.info("client port:" + this.clientPortAddress); + log.info("client port: {}", this.clientPortAddress); } }; try { @@ -592,7 +593,7 @@ public void setClientPort(int clientPort) { } cnt++; } - log.info("start zk server on port:" + port); + log.info("start zk server on port: {}", port); waitForServerUp(getZkHost(), 30000); @@ -705,7 +706,7 @@ public static class HostPort { public static String send4LetterWord(String host, int port, String cmd) throws IOException { - log.info("connecting to " + host + " " + port); + log.info("connecting to {} {}", host, port); BufferedReader reader = null; try (Socket sock = new Socket(host, port)) { OutputStream outstream = sock.getOutputStream(); @@ -730,7 +731,7 @@ public static String send4LetterWord(String host, int port, String cmd) } public static List parseHostPortList(String hplist) { - log.info("parse host and port list: " + hplist); + log.info("parse host and port list: {}", hplist); ArrayList alist = new ArrayList<>(); for (String hp : hplist.split(",")) { int idx = hp.lastIndexOf(':'); @@ -803,7 +804,9 @@ public static void putConfig(String confName, SolrZkClient zkClient, String zkCh File file = new File(solrhome, "collection1" + File.separator + "conf" + File.separator + srcName); if (!file.exists()) { - log.info("skipping " + file.getAbsolutePath() + " because it doesn't exist"); + if (log.isInfoEnabled()) { + log.info("skipping {} because it doesn't exist", file.getAbsolutePath()); + } return; } @@ -811,7 +814,9 @@ public static void putConfig(String confName, SolrZkClient zkClient, String zkCh if (zkChroot != null) { destPath = zkChroot + destPath; } - log.info("put " + file.getAbsolutePath() + " to " + destPath); + if (log.isInfoEnabled()) { + log.info("put {} to {}", file.getAbsolutePath(), destPath); + } zkClient.makePath(destPath, file, false, true); } @@ -840,9 +845,6 @@ public void buildZooKeeper(File solrhome, String config, String schema) throws E ops.add(Op.create(path, null, chRootClient.getZkACLProvider().getACLsToAdd(path), CreateMode.PERSISTENT)); chRootClient.multi(ops, true); - // this workaround is acceptable until we remove legacyCloud because we just init a single core here - String defaultClusterProps = "{\""+ZkStateReader.LEGACY_CLOUD+"\":\"true\"}"; - chRootClient.makePath(ZkStateReader.CLUSTER_PROPS, defaultClusterProps.getBytes(StandardCharsets.UTF_8), CreateMode.PERSISTENT, true); // for now, always upload the config and schema to the canonical names putConfig("conf1", chRootClient, solrhome, config, "solrconfig.xml"); putConfig("conf1", chRootClient, solrhome, schema, "schema.xml"); diff --git a/solr/test-framework/src/java/org/apache/solr/core/MockTracerConfigurator.java b/solr/test-framework/src/java/org/apache/solr/core/MockTracerConfigurator.java index fd02bda7efe0..b1c7cb9168db 100644 --- a/solr/test-framework/src/java/org/apache/solr/core/MockTracerConfigurator.java +++ b/solr/test-framework/src/java/org/apache/solr/core/MockTracerConfigurator.java @@ -29,7 +29,7 @@ public Tracer getTracer() { } @Override - public void init(NamedList args) { + public void init(@SuppressWarnings({"rawtypes"})NamedList args) { } } diff --git a/solr/test-framework/src/java/org/apache/solr/handler/component/TrackingShardHandlerFactory.java b/solr/test-framework/src/java/org/apache/solr/handler/component/TrackingShardHandlerFactory.java index 4b8c1d49c0cc..8e77b0c0c373 100644 --- a/solr/test-framework/src/java/org/apache/solr/handler/component/TrackingShardHandlerFactory.java +++ b/solr/test-framework/src/java/org/apache/solr/handler/component/TrackingShardHandlerFactory.java @@ -156,7 +156,7 @@ public void submit(ShardRequest sreq, String shard, ModifiableSolrParams params) } @Override - protected NamedList request(String url, SolrRequest req) throws IOException, SolrServerException { + protected NamedList request(String url, @SuppressWarnings({"rawtypes"})SolrRequest req) throws IOException, SolrServerException { try (SolrClient client = new HttpSolrClient.Builder(url).withHttpClient(httpClient).build()) { return client.request(req); } @@ -218,6 +218,7 @@ public static void setTrackingQueue(List runners, Queue [] entries = new NamedListEntry[q.length / 2]; for (int i = 0; i < q.length; i += 2) { entries[i/2] = new NamedListEntry<>(q[i], q[i+1]); } + @SuppressWarnings({"rawtypes"}) NamedList nl = new NamedList(entries); if(nl.get("wt" ) == null) nl.add("wt","xml"); return new LocalSolrQueryRequest(TestHarness.this.getCore(), nl); diff --git a/solr/webapp/build.gradle b/solr/webapp/build.gradle index df875b7ea7c9..a687fdb38ce6 100644 --- a/solr/webapp/build.gradle +++ b/solr/webapp/build.gradle @@ -20,6 +20,8 @@ plugins { id 'war' } +description = 'Solr webapp' + configurations { war {} } diff --git a/solr/webapp/web/js/angular/controllers/index.js b/solr/webapp/web/js/angular/controllers/index.js index f8b074721ce4..e931d59e1eb5 100644 --- a/solr/webapp/web/js/angular/controllers/index.js +++ b/solr/webapp/web/js/angular/controllers/index.js @@ -21,7 +21,7 @@ solrAdminApp.controller('IndexController', function($scope, System, Cores, Const System.get(function(data) { $scope.system = data; - if (username in data.security) { + if ("username" in data.security) { // Needed for Kerberos, since this is the only place from where // Kerberos username can be obtained. sessionStorage.setItem("auth.username", data.security.username); diff --git a/solr/webapp/web/js/angular/controllers/login.js b/solr/webapp/web/js/angular/controllers/login.js index 8127c6fdac0b..b76ec1f4a8a1 100644 --- a/solr/webapp/web/js/angular/controllers/login.js +++ b/solr/webapp/web/js/angular/controllers/login.js @@ -47,7 +47,7 @@ solrAdminApp.controller('LoginController', sessionStorage.setItem("auth.scheme", authScheme); } - var supportedSchemes = ['Basic', 'Bearer', 'Negotiate']; + var supportedSchemes = ['Basic', 'Bearer', 'Negotiate', 'Certificate']; $scope.authSchemeSupported = supportedSchemes.includes(authScheme); if (authScheme === 'Bearer') { diff --git a/solr/webapp/web/partials/login.html b/solr/webapp/web/partials/login.html index 29c8c71501b0..c21f26222950 100644 --- a/solr/webapp/web/partials/login.html +++ b/solr/webapp/web/partials/login.html @@ -76,6 +76,23 @@

    Kerberos Authentication

    WWW-Authenticate: {{wwwAuthHeader}}
    + +
    +

    Certificate Authentication

    +

    Your browser did not provide the required information to authenticate using PKI Certificates. + Please check that your computer has a valid PKI certificate for communicating with Solr, + and that your browser is properly configured to provide that certificate when required. + For more information, consult + + Solr's Certificate Authentication documentation + . +

    + The response from the server was: +
    +
    HTTP 401 {{statusText}}
    +WWW-Authenticate: {{wwwAuthHeader}}
    +
    +

    OpenID Connect (JWT) authentication

    diff --git a/versions.lock b/versions.lock index ee2d41a7c315..cbd7a0b25389 100644 --- a/versions.lock +++ b/versions.lock @@ -1,7 +1,7 @@ # Run ./gradlew --write-locks to regenerate this file com.adobe.xmp:xmpcore:5.1.3 (1 constraints: 0b050a36) com.beust:jcommander:1.35 (1 constraints: b50c1901) -com.carrotsearch:hppc:0.8.1 (2 constraints: af0fd8a6) +com.carrotsearch:hppc:0.8.2 (2 constraints: b00ffaa6) com.carrotsearch.randomizedtesting:randomizedtesting-runner:2.7.6 (1 constraints: 11051036) com.carrotsearch.thirdparty:simple-xml-safe:2.7.1 (1 constraints: a60a82ca) com.cybozu.labs:langdetect:1.1-20120112 (1 constraints: 5c066d5e) @@ -11,7 +11,7 @@ com.fasterxml.jackson.core:jackson-annotations:2.10.1 (2 constraints: 331dcd4e) com.fasterxml.jackson.core:jackson-core:2.10.1 (3 constraints: 633586b7) com.fasterxml.jackson.core:jackson-databind:2.10.1 (3 constraints: 941aba96) com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.10.1 (1 constraints: 3605303b) -com.github.ben-manes.caffeine:caffeine:2.8.0 (1 constraints: 0c050d36) +com.github.ben-manes.caffeine:caffeine:2.8.4 (1 constraints: 10051136) com.github.jnr:jffi:1.2.18 (1 constraints: b20902ab) com.github.jnr:jnr-constants:0.9.12 (4 constraints: ed2c9d5d) com.github.jnr:jnr-enxio:0.19 (2 constraints: 2a167d08) @@ -46,11 +46,10 @@ com.sun.mail:gimap:1.5.1 (1 constraints: 09050036) com.sun.mail:javax.mail:1.5.1 (2 constraints: 830d2844) com.tdunning:t-digest:3.1 (1 constraints: a804212c) com.vaadin.external.google:android-json:0.0.20131108.vaadin1 (1 constraints: 34092a9e) -commons-cli:commons-cli:1.2 (1 constraints: a7041c2c) +commons-cli:commons-cli:1.4 (1 constraints: a9041e2c) commons-codec:commons-codec:1.13 (1 constraints: d904f430) commons-collections:commons-collections:3.2.2 (1 constraints: 09050236) -commons-fileupload:commons-fileupload:1.3.3 (1 constraints: 0905fc35) -commons-io:commons-io:2.6 (2 constraints: bf145380) +commons-io:commons-io:2.6 (1 constraints: ac04232c) commons-logging:commons-logging:1.1.3 (2 constraints: c8149e7f) de.l3s.boilerpipe:boilerpipe:1.1.0 (1 constraints: 0405f335) io.dropwizard.metrics:metrics-core:4.1.5 (5 constraints: 2543e4c0) @@ -117,9 +116,9 @@ org.apache.kerby:kerby-asn1:1.0.1 (2 constraints: 001155df) org.apache.kerby:kerby-config:1.0.1 (1 constraints: 860b05e6) org.apache.kerby:kerby-pkix:1.0.1 (2 constraints: 741065ca) org.apache.kerby:kerby-util:1.0.1 (2 constraints: 6518bdb6) -org.apache.logging.log4j:log4j-api:2.11.2 (4 constraints: c83394ae) -org.apache.logging.log4j:log4j-core:2.11.2 (3 constraints: 102588ba) -org.apache.logging.log4j:log4j-slf4j-impl:2.11.2 (1 constraints: 3805343b) +org.apache.logging.log4j:log4j-api:2.13.2 (4 constraints: d033f2b0) +org.apache.logging.log4j:log4j-core:2.13.2 (2 constraints: 0d16b224) +org.apache.logging.log4j:log4j-slf4j-impl:2.13.2 (1 constraints: 3a053a3b) org.apache.opennlp:opennlp-tools:1.9.1 (1 constraints: 0d050c36) org.apache.pdfbox:fontbox:2.0.17 (1 constraints: 3c05323b) org.apache.pdfbox:jempbox:1.8.16 (1 constraints: 42054b3b) @@ -223,7 +222,7 @@ org.apache.kerby:kerb-identity:1.0.1 (1 constraints: 0405f135) org.apache.kerby:kerb-server:1.0.1 (1 constraints: 0405f135) org.apache.kerby:kerb-simplekdc:1.0.1 (1 constraints: 0405f135) org.apache.kerby:kerby-kdc:1.0.1 (1 constraints: 0405f135) -org.apache.logging.log4j:log4j-1.2-api:2.11.2 (1 constraints: 3805343b) +org.apache.logging.log4j:log4j-1.2-api:2.13.2 (1 constraints: 3a053a3b) org.hsqldb:hsqldb:2.4.0 (1 constraints: 08050136) org.locationtech.jts:jts-core:1.15.0 (1 constraints: 3905383b) org.mockito:mockito-core:2.23.4 (1 constraints: 3d05403b) diff --git a/versions.props b/versions.props index 6eb127918929..1b696c9a1ef0 100644 --- a/versions.props +++ b/versions.props @@ -1,11 +1,11 @@ com.adobe.xmp:xmpcore=5.1.3 com.carrotsearch.randomizedtesting:*=2.7.6 -com.carrotsearch:hppc=0.8.1 +com.carrotsearch:hppc=0.8.2 com.cybozu.labs:langdetect=1.1-20120112 com.drewnoakes:metadata-extractor=2.11.0 com.epam:parso=2.0.11 com.fasterxml.jackson*:*=2.10.1 -com.github.ben-manes.caffeine:caffeine=2.8.0 +com.github.ben-manes.caffeine:caffeine=2.8.4 com.github.virtuald:curvesapi=1.06 com.github.zafarkhaja:java-semver=0.9.0 com.google.guava:guava=25.1-jre @@ -25,10 +25,9 @@ com.sun.mail:*=1.5.1 com.tdunning:t-digest=3.1 com.vaadin.external.google:android-json=0.0.20131108.vaadin1 commons-beanutils:commons-beanutils=1.9.3 -commons-cli:commons-cli=1.2 +commons-cli:commons-cli=1.4 commons-codec:commons-codec=1.13 commons-collections:commons-collections=3.2.2 -commons-fileupload:commons-fileupload=1.3.3 commons-io:commons-io=2.6 commons-logging:commons-logging=1.1.3 de.l3s.boilerpipe:boilerpipe=1.1.0 @@ -67,7 +66,7 @@ org.apache.httpcomponents:httpcore=4.4.12 org.apache.httpcomponents:httpmime=4.5.10 org.apache.james:apache-mime4j*=0.8.3 org.apache.kerby:*=1.0.1 -org.apache.logging.log4j:*=2.11.2 +org.apache.logging.log4j:*=2.13.2 org.apache.opennlp:opennlp-tools=1.9.1 org.apache.pdfbox:*=2.0.17 org.apache.pdfbox:jempbox=1.8.16