diff --git a/dev-tools/maven/README.maven b/dev-tools/maven/README.maven index 2a741c136e2d..e214190f2d20 100644 --- a/dev-tools/maven/README.maven +++ b/dev-tools/maven/README.maven @@ -16,7 +16,7 @@ A. How to use nightly Jenkins-built Lucene/Solr Maven artifacts The most recently produced nightly Jenkins-built Lucene and Solr Maven snapshot artifacts are available in the Apache Snapshot repository here: - http://repository.apache.org/snapshots + https://repository.apache.org/snapshots An example POM snippet: @@ -27,7 +27,7 @@ A. How to use nightly Jenkins-built Lucene/Solr Maven artifacts apache.snapshots Apache Snapshot Repository - http://repository.apache.org/snapshots + https://repository.apache.org/snapshots false @@ -57,12 +57,12 @@ C. How to deploy Maven artifacts to a repository as in B. above, with the addition of two system properties: ant -Dm2.repository.id=my-repo-id \ - -Dm2.repository.url=http://example.org/my/repo \ + -Dm2.repository.url=https://example.org/my/repo \ generate-maven-artifacts The repository ID given in the above command corresponds to a entry in either your ~/.m2/settings.xml or ~/.ant/settings.xml. See - for more information. + for more information. (Note that as of version 2.1.3, Maven Ant Tasks cannot handle encrypted passwords.) diff --git a/dev-tools/maven/pom.xml.template b/dev-tools/maven/pom.xml.template index bcf5c4b8dc15..b08489591965 100644 --- a/dev-tools/maven/pom.xml.template +++ b/dev-tools/maven/pom.xml.template @@ -32,15 +32,15 @@ pom Grandparent POM for Apache Lucene Core and Apache Solr Grandparent POM for Apache Lucene Core and Apache Solr - http://lucene.apache.org + https://lucene.apache.org lucene solr - http://git-wip-us.apache.org/repos/asf/lucene-solr.git - https://git-wip-us.apache.org/repos/asf/lucene-solr.git - https://git1-us-west.apache.org/repos/asf?p=lucene-solr.git;a=tree + https://gitbox.apache.org/repos/asf/lucene-solr.git + https://gitbox.apache.org/repos/asf/lucene-solr.git + https://gitbox.apache.org/repos/asf?p=lucene-solr.git @spec.version@ yyyy-MM-dd HH:mm:ss 8 @@ -82,7 +82,7 @@ general-subscribe@lucene.apache.org general-unsubscribe@lucene.apache.org - http://mail-archives.apache.org/mod_mbox/lucene-general/ + https://mail-archives.apache.org/mod_mbox/lucene-general/ @@ -90,21 +90,21 @@ java-user-subscribe@lucene.apache.org java-user-unsubscribe@lucene.apache.org - http://mail-archives.apache.org/mod_mbox/lucene-java-user/ + https://mail-archives.apache.org/mod_mbox/lucene-java-user/ Java Developer List dev-subscribe@lucene.apache.org dev-unsubscribe@lucene.apache.org - http://mail-archives.apache.org/mod_mbox/lucene-dev/ + https://mail-archives.apache.org/mod_mbox/lucene-dev/ Java Commits List commits-subscribe@lucene.apache.org commits-unsubscribe@lucene.apache.org - http://mail-archives.apache.org/mod_mbox/lucene-java-commits/ + https://mail-archives.apache.org/mod_mbox/lucene-java-commits/ @@ -124,7 +124,7 @@ apache.snapshots Apache Snapshot Repository - http://repository.apache.org/snapshots + https://repository.apache.org/snapshots false @@ -264,7 +264,7 @@ . true - diff --git a/dev-tools/maven/solr/pom.xml.template b/dev-tools/maven/solr/pom.xml.template index 56aa1c58eedf..827eb26401a2 100644 --- a/dev-tools/maven/solr/pom.xml.template +++ b/dev-tools/maven/solr/pom.xml.template @@ -55,21 +55,21 @@ solr-user-subscribe@lucene.apache.org solr-user-unsubscribe@lucene.apache.org - http://mail-archives.apache.org/mod_mbox/solr-user/ + https://mail-archives.apache.org/mod_mbox/solr-user/ Java Developer List dev-subscribe@lucene.apache.org dev-unsubscribe@lucene.apache.org - http://mail-archives.apache.org/mod_mbox/lucene-dev/ + https://mail-archives.apache.org/mod_mbox/lucene-dev/ Java Commits List commits-subscribe@lucene.apache.org commits-unsubscribe@lucene.apache.org - http://mail-archives.apache.org/mod_mbox/lucene-java-commits/ + https://mail-archives.apache.org/mod_mbox/lucene-java-commits/ @@ -78,12 +78,12 @@ maven-restlet Public online Restlet repository - http://maven.restlet.org + https://maven.restlet.com releases.cloudera.com Cloudera Releases - https://repository.cloudera.com/artifactory/libs-release + https://repository.cloudera.com/artifactory/libs-release-local/ diff --git a/dev-tools/scripts/jenkins.build.ref.guide.sh b/dev-tools/scripts/jenkins.build.ref.guide.sh index f1a8bdb8c9df..c3203ee3fe7a 100755 --- a/dev-tools/scripts/jenkins.build.ref.guide.sh +++ b/dev-tools/scripts/jenkins.build.ref.guide.sh @@ -61,10 +61,11 @@ echoRun "rvm $RUBY_VERSION@$GEMSET" # Activate this project's gemset # Install gems in the gemset. Param --force disables dependency conflict detection. echoRun "gem install --force --version 3.5.0 jekyll" echoRun "gem uninstall --all --ignore-dependencies asciidoctor" # Get rid of all versions -echoRun "gem install --force --version 1.5.6.2 asciidoctor" -echoRun "gem install --force --version 2.1.0 jekyll-asciidoc" -echoRun "gem install --force --version 1.1.2 pygments.rb" -echoRun "gem install --force --version 3.0.9 slim" +echoRun "gem install --force --version 2.0.10 asciidoctor" +echoRun "gem install --force --version 3.0.0 jekyll-asciidoc" +echoRun "gem install --force --version 4.0.1 slim" +echoRun "gem install --force --version 2.0.10 tilt" +echoRun "gem install --force --version 1.1.5 concurrent-ruby" cd solr/solr-ref-guide diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index 90ab723f0ef0..ffc38d344a80 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -18,11 +18,19 @@ API Changes * LUCENE-8956: QueryRescorer now only sorts the first topN hits instead of all initial hits. (Paul Sanwald via Adrien Grand) +* LUCENE-8921: IndexSearcher.termStatistics() no longer takes a TermStates; it takes the docFreq and totalTermFreq. + And don't call if docFreq <= 0. The previous implementation survives as deprecated and final. It's removed in 9.0. + (Bruno Roustant, David Smiley, Alan Woodward) + +* LUCENE-8990: PointValues#estimateDocCount(visitor) estimates the number of documents that would be matched by + the given IntersectVisitor. THe method is used to compute the cost() of ScorerSuppliers instead of + PointValues#estimatePointCount(visitor). (Ignacio Vera, Adrien Grand) + New Features * LUCENE-8936: Add SpanishMinimalStemFilter (vinod kumar via Tomoko Uchida) -* LUCENE-8764: Add "export all terms" feature to Luke. (Leonardo Menezes via Tomoko Uchida) +* LUCENE-8764 LUCENE-8945: Add "export all terms and doc freqs" feature to Luke with delimiters. (Leonardo Menezes, Amish Shah via Tomoko Uchida) * LUCENE-8747: Composite Matches from multiple subqueries now allow access to their submatches, and a new NamedMatches API allows marking of subqueries @@ -67,6 +75,18 @@ Improvements * LUCENE-8952: Use a sort key instead of true distance in NearestNeighbor (Julie Tibshirani). +* LUCENE-8620: Tessellator labels the edges of the generated triangles whether they belong to + the original polygon. This information is added to the triangle encoding. (Ignacio Vera) + +* LUCENE-8964: Fix geojson shape parsing on string arrays in properties + (Alexander Reelsen) + +* LUCENE-8976: Use exact distance between point and bounding rectangle in FloatPointNearestNeighbor. (Ignacio Vera) + +* LUCENE-8966: The Korean analyzer now splits tokens on boundaries between digits and alphabetic characters. (Jim Ferenczi) + +* LUCENE-8984: MoreLikeThis MLT is biased for uncommon fields (Andy Hind via Anshum Gupta) + Optimizations * LUCENE-8922: DisjunctionMaxQuery more efficiently leverages impacts to skip @@ -81,11 +101,29 @@ the total hits is not requested. * LUCENE-8755: spatial-extras quad and packed quad prefix trees now index points faster. (Chongchen Chen, David Smiley) +* LUCENE-8860: add additional leaf node level optimizations in LatLonShapeBoundingBoxQuery. + (Igor Motov via Ignacio Vera) + +* LUCENE-8968: Improve performance of WITHIN and DISJOINT queries for Shape queries by + doing just one pass whenever possible. (Ignacio Vera) + +* LUCENE-8939: Introduce shared count based early termination across multiple slices + (Atri Sharma) + +* LUCENE-8980: Blocktree's seekExact now short-circuits false if the term isn't in the min-max range of the segment. + Large perf gain for ID/time like data when populated sequentially. (Guoqiang Jiang) + Bug Fixes * LUCENE-8755: spatial-extras quad and packed quad prefix trees could throw a NullPointerException for certain cell edge coordinates (Chongchen Chen, David Smiley) +* LUCENE-9005: BooleanQuery.visit() would pull subVisitors from its parent visitor, rather + than from a visitor for its own specific query. This could cause problems when BQ was + nested under another BQ. Instead, we now pull a MUST subvisitor, pass it to any MUST + subclauses, and then pull SHOULD, MUST_NOT and FILTER visitors from it rather than from + the parent. (Alan Woodward) + Other * LUCENE-8778 LUCENE-8911 LUCENE-8957: Define analyzer SPI names as static final fields and document the names in Javadocs. @@ -93,6 +131,18 @@ Other * LUCENE-8758: QuadPrefixTree: removed levelS and levelN fields which weren't used. (Amish Shah) +* LUCENE-8975: Code Cleanup: Use entryset for map iteration wherever possible. + +* LUCENE-8993, LUCENE-8807: Changed all repository and download references in build files + to HTTPS. (Uwe Schindler) + +* LUCENE-8998: Fix OverviewImplTest.testIsOptimized reproducible failure. (Tomoko Uchida) + +* LUCENE-8999: LuceneTestCase.expectThrows now propogates assert/assumption failures up to the test + w/o wrapping in a new assertion failure unless the caller has explicitly expected them (hossman) + +* LUCENE-8062: GlobalOrdinalsWithScoreQuery is no longer eligible for query caching. (Jim Ferenczi) + ======================= Lucene 8.2.0 ======================= API Changes diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseTokenizer.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseTokenizer.java index ea57d1c28c89..7e7bc3a56221 100644 --- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseTokenizer.java +++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseTokenizer.java @@ -202,7 +202,7 @@ public JapaneseTokenizer(UserDictionary userDictionary, boolean discardPunctuati } /** - * Create a new JapaneseTokenizer. + * Create a new JapaneseTokenizer using the system and unknown dictionaries shipped with Lucene. * * @param factory the AttributeFactory to use * @param userDictionary Optional: if non-null, user dictionary. @@ -211,13 +211,41 @@ public JapaneseTokenizer(UserDictionary userDictionary, boolean discardPunctuati */ public JapaneseTokenizer (AttributeFactory factory, UserDictionary userDictionary, boolean discardPunctuation, Mode mode) { + this(factory, + TokenInfoDictionary.getInstance(), + UnknownDictionary.getInstance(), + ConnectionCosts.getInstance(), + userDictionary, discardPunctuation, mode); + } + + /** + *

Create a new JapaneseTokenizer, supplying a custom system dictionary and unknown dictionary. + * This constructor provides an entry point for users that want to construct custom language models + * that can be used as input to {@link org.apache.lucene.analysis.ja.util.DictionaryBuilder}.

+ * + * @param factory the AttributeFactory to use + * @param systemDictionary a custom known token dictionary + * @param unkDictionary a custom unknown token dictionary + * @param connectionCosts custom token transition costs + * @param userDictionary Optional: if non-null, user dictionary. + * @param discardPunctuation true if punctuation tokens should be dropped from the output. + * @param mode tokenization mode. + * @lucene.experimental + */ + public JapaneseTokenizer(AttributeFactory factory, + TokenInfoDictionary systemDictionary, + UnknownDictionary unkDictionary, + ConnectionCosts connectionCosts, + UserDictionary userDictionary, + boolean discardPunctuation, + Mode mode) { super(factory); - dictionary = TokenInfoDictionary.getInstance(); - fst = dictionary.getFST(); - unkDictionary = UnknownDictionary.getInstance(); - characterDefinition = unkDictionary.getCharacterDefinition(); + this.dictionary = systemDictionary; + this.fst = dictionary.getFST(); + this.unkDictionary = unkDictionary; + this.characterDefinition = unkDictionary.getCharacterDefinition(); this.userDictionary = userDictionary; - costs = ConnectionCosts.getInstance(); + this.costs = connectionCosts; fstReader = fst.getBytesReader(); if (userDictionary != null) { userFST = userDictionary.getFST(); diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/BinaryDictionary.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/BinaryDictionary.java index a0e314d5697f..670a5a41d1b7 100644 --- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/BinaryDictionary.java +++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/BinaryDictionary.java @@ -176,6 +176,17 @@ protected final InputStream getResource(String suffix) throws IOException { } } + public static final InputStream getResource(ResourceScheme scheme, String path) throws IOException { + switch(scheme) { + case CLASSPATH: + return getClassResource(path); + case FILE: + return Files.newInputStream(Paths.get(path)); + default: + throw new IllegalStateException("unknown resource scheme " + scheme); + } + } + // util, reused by ConnectionCosts and CharacterDefinition public static final InputStream getClassResource(Class clazz, String suffix) throws IOException { final InputStream is = clazz.getResourceAsStream(clazz.getSimpleName() + suffix); @@ -185,7 +196,7 @@ public static final InputStream getClassResource(Class clazz, String suffix) return is; } - private InputStream getClassResource(String path) throws IOException { + private static InputStream getClassResource(String path) throws IOException { final InputStream is = BinaryDictionary.class.getClassLoader().getResourceAsStream(path); if (is == null) { throw new FileNotFoundException("Not in classpath: " + path); diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/ConnectionCosts.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/ConnectionCosts.java index 30ebd14a5fb2..c886af2fdc64 100644 --- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/ConnectionCosts.java +++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/ConnectionCosts.java @@ -37,12 +37,16 @@ public final class ConnectionCosts { private final short[][] costs; // array is backward IDs first since get is called using the same backward ID consecutively. maybe doesn't matter. - private ConnectionCosts() throws IOException { + /** + * @param scheme - scheme for loading resources (FILE or CLASSPATH). + * @param path - where to load resources from, without the ".dat" suffix + */ + public ConnectionCosts(BinaryDictionary.ResourceScheme scheme, String path) throws IOException { InputStream is = null; short[][] costs = null; boolean success = false; try { - is = BinaryDictionary.getClassResource(getClass(), FILENAME_SUFFIX); + is = BinaryDictionary.getResource(scheme, path.replace('.', '/') + FILENAME_SUFFIX); is = new BufferedInputStream(is); final DataInput in = new InputStreamDataInput(is); CodecUtil.checkHeader(in, HEADER, VERSION, VERSION); @@ -68,7 +72,11 @@ private ConnectionCosts() throws IOException { this.costs = costs; } - + + private ConnectionCosts() throws IOException { + this(BinaryDictionary.ResourceScheme.CLASSPATH, ConnectionCosts.class.getName()); + } + public int get(int forwardId, int backwardId) { return costs[backwardId][forwardId]; } diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary.java index 9a201a94a44b..77c96349eaf6 100644 --- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary.java +++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary.java @@ -40,7 +40,7 @@ public final class TokenInfoDictionary extends BinaryDictionary { * @param resourcePath - where to load resources (dictionaries) from. If null, with CLASSPATH scheme only, use * this class's name as the path. */ - TokenInfoDictionary(ResourceScheme resourceScheme, String resourcePath) throws IOException { + public TokenInfoDictionary(ResourceScheme resourceScheme, String resourcePath) throws IOException { super(resourceScheme, resourcePath); FST fst; try (InputStream is = new BufferedInputStream(getResource(FST_FILENAME_SUFFIX))) { diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/UnknownDictionary.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/UnknownDictionary.java index ed5d39a2168a..0a451b924042 100644 --- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/UnknownDictionary.java +++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/UnknownDictionary.java @@ -26,6 +26,15 @@ public final class UnknownDictionary extends BinaryDictionary { private final CharacterDefinition characterDefinition = CharacterDefinition.getInstance(); + /** + * @param scheme scheme for loading resources (FILE or CLASSPATH). + * @param path where to load resources from; a path, including the file base name without + * extension; this is used to match multiple files with the same base name. + */ + public UnknownDictionary(ResourceScheme scheme, String path) throws IOException { + super(scheme, path); + } + private UnknownDictionary() throws IOException { super(); } diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/UserDictionary.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/UserDictionary.java index 0684599ed931..23098f83d89f 100644 --- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/UserDictionary.java +++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/UserDictionary.java @@ -194,11 +194,11 @@ public TokenInfoFST getFST() { */ private int[][] toIndexArray(Map input) { ArrayList result = new ArrayList<>(); - for (int i : input.keySet()) { - int[] wordIdAndLength = input.get(i); + for (Map.Entry entry : input.entrySet()) { + int[] wordIdAndLength = entry.getValue(); int wordId = wordIdAndLength[0]; // convert length to index - int current = i; + int current = entry.getKey(); for (int j = 1; j < wordIdAndLength.length; j++) { // first entry is wordId offset int[] token = { wordId + j - 1, current, wordIdAndLength[j] }; result.add(token); diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/util/DictionaryBuilder.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/util/DictionaryBuilder.java index 373ce0970db6..58ab4e975e56 100644 --- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/util/DictionaryBuilder.java +++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/util/DictionaryBuilder.java @@ -23,7 +23,17 @@ import java.util.Locale; /** - * Tool to build dictionaries. + * Tool to build dictionaries. Usage: + *
+ *    java -cp [lucene classpath] org.apache.lucene.analysis.ja.util.DictionaryBuilder \
+ *          ${inputDir} ${outputDir} ${encoding}
+ * 
+ * + *

The input directory is expected to include unk.def, matrix.def, plus any number of .csv + * files, roughly following the conventions of IPADIC. JapaneseTokenizer uses dictionaries built + * with this tool. Note that the input files required by this build generally must be generated from + * a corpus of real text using tools that are not part of Lucene.

+ * @lucene.experimental */ public class DictionaryBuilder { diff --git a/lucene/analysis/kuromoji/src/resources/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary$fst.dat b/lucene/analysis/kuromoji/src/resources/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary$fst.dat index 9328c53ee383..c06fd4a128c2 100644 Binary files a/lucene/analysis/kuromoji/src/resources/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary$fst.dat and b/lucene/analysis/kuromoji/src/resources/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary$fst.dat differ diff --git a/lucene/analysis/kuromoji/src/test/org/apache/lucene/analysis/ja/TestJapaneseTokenizer.java b/lucene/analysis/kuromoji/src/test/org/apache/lucene/analysis/ja/TestJapaneseTokenizer.java index 1a478db98353..a162d0183836 100644 --- a/lucene/analysis/kuromoji/src/test/org/apache/lucene/analysis/ja/TestJapaneseTokenizer.java +++ b/lucene/analysis/kuromoji/src/test/org/apache/lucene/analysis/ja/TestJapaneseTokenizer.java @@ -33,7 +33,10 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ja.JapaneseTokenizer.Mode; +import org.apache.lucene.analysis.ja.dict.BinaryDictionary.ResourceScheme; import org.apache.lucene.analysis.ja.dict.ConnectionCosts; +import org.apache.lucene.analysis.ja.dict.TokenInfoDictionary; +import org.apache.lucene.analysis.ja.dict.UnknownDictionary; import org.apache.lucene.analysis.ja.dict.UserDictionary; import org.apache.lucene.analysis.ja.tokenattributes.*; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; @@ -441,6 +444,23 @@ public void testUserDict3() throws Exception { ); } + // Make sure loading custom dictionaries from classpath works: + public void testCustomDictionary() throws Exception { + Tokenizer tokenizer = new JapaneseTokenizer(newAttributeFactory(), + new TokenInfoDictionary(ResourceScheme.CLASSPATH, "org/apache/lucene/analysis/ja/dict/TokenInfoDictionary"), + new UnknownDictionary(ResourceScheme.CLASSPATH, "org/apache/lucene/analysis/ja/dict/UnknownDictionary"), + new ConnectionCosts(ResourceScheme.CLASSPATH, "org/apache/lucene/analysis/ja/dict/ConnectionCosts"), + readDict(), true, Mode.SEARCH); + try (Analyzer a = makeAnalyzer(tokenizer)) { + assertTokenStreamContents(a.tokenStream("foo", "abcd"), + new String[] { "a", "b", "cd" }, + new int[] { 0, 1, 2 }, + new int[] { 1, 2, 4 }, + 4 + ); + } + } + // HMM: fails (segments as a/b/cd/efghij)... because the // two paths have exactly equal paths (1 KNOWN + 1 // UNKNOWN) and we don't seem to favor longer KNOWN / diff --git a/lucene/analysis/nori/src/java/org/apache/lucene/analysis/ko/KoreanTokenizer.java b/lucene/analysis/nori/src/java/org/apache/lucene/analysis/ko/KoreanTokenizer.java index 000c743842c4..a799f2bc09a0 100644 --- a/lucene/analysis/nori/src/java/org/apache/lucene/analysis/ko/KoreanTokenizer.java +++ b/lucene/analysis/nori/src/java/org/apache/lucene/analysis/ko/KoreanTokenizer.java @@ -760,6 +760,7 @@ private void parse() throws IOException { unknownWordLength = 1; UnicodeScript scriptCode = UnicodeScript.of((int) firstCharacter); final boolean isPunct = isPunctuation(firstCharacter); + final boolean isDigit = Character.isDigit(firstCharacter); for (int posAhead = pos + 1; unknownWordLength < MAX_UNKNOWN_WORD_LENGTH; posAhead++) { int next = buffer.get(posAhead); if (next == -1) { @@ -774,7 +775,10 @@ private void parse() throws IOException { || chType == Character.NON_SPACING_MARK; if (sameScript + // split on punctuation && isPunctuation(ch, chType) == isPunct + // split on digit + && Character.isDigit(ch) == isDigit && characterDefinition.isGroup(ch)) { unknownWordLength++; } else { diff --git a/lucene/analysis/nori/src/resources/org/apache/lucene/analysis/ko/dict/TokenInfoDictionary$fst.dat b/lucene/analysis/nori/src/resources/org/apache/lucene/analysis/ko/dict/TokenInfoDictionary$fst.dat index 4bacb9ba5af0..fa0cb321fd06 100644 Binary files a/lucene/analysis/nori/src/resources/org/apache/lucene/analysis/ko/dict/TokenInfoDictionary$fst.dat and b/lucene/analysis/nori/src/resources/org/apache/lucene/analysis/ko/dict/TokenInfoDictionary$fst.dat differ diff --git a/lucene/analysis/nori/src/test/org/apache/lucene/analysis/ko/TestKoreanTokenizer.java b/lucene/analysis/nori/src/test/org/apache/lucene/analysis/ko/TestKoreanTokenizer.java index 2e9639ce6b97..a9c5bb771c6f 100644 --- a/lucene/analysis/nori/src/test/org/apache/lucene/analysis/ko/TestKoreanTokenizer.java +++ b/lucene/analysis/nori/src/test/org/apache/lucene/analysis/ko/TestKoreanTokenizer.java @@ -108,6 +108,22 @@ protected TokenStreamComponents createComponents(String fieldName) { }; } + public void testSeparateNumber() throws IOException { + assertAnalyzesTo(analyzer, "44사이즈", + new String[]{"44", "사이즈"}, + new int[]{0, 2}, + new int[]{2, 5}, + new int[]{1, 1} + ); + + assertAnalyzesTo(analyzer, "9.9사이즈", + new String[]{"9", "9", "사이즈"}, + new int[]{0, 2, 3}, + new int[]{1, 3, 6}, + new int[]{1, 1, 1} + ); + } + public void testSpaces() throws IOException { assertAnalyzesTo(analyzer, "화학 이외의 것", new String[]{"화학", "이외", "의", "것"}, diff --git a/lucene/common-build.xml b/lucene/common-build.xml index 3ed0841a59e4..17172ad62204 100644 --- a/lucene/common-build.xml +++ b/lucene/common-build.xml @@ -135,7 +135,25 @@ - + + + + + + + + + + + + + + + + + + + diff --git a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/SegmentTermsEnum.java b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/SegmentTermsEnum.java index c9d0ddf64195..56b6843efbc1 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/SegmentTermsEnum.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/SegmentTermsEnum.java @@ -321,6 +321,10 @@ public boolean seekExact(BytesRef target) throws IOException { throw new IllegalStateException("terms index was not loaded"); } + if (fr.size() > 0 && (target.compareTo(fr.getMin()) < 0 || target.compareTo(fr.getMax()) > 0)) { + return false; + } + term.grow(1 + target.length); assert clearEOF(); diff --git a/lucene/core/src/java/org/apache/lucene/document/FeatureField.java b/lucene/core/src/java/org/apache/lucene/document/FeatureField.java index 229e0571ed3e..2ca048c52704 100644 --- a/lucene/core/src/java/org/apache/lucene/document/FeatureField.java +++ b/lucene/core/src/java/org/apache/lucene/document/FeatureField.java @@ -197,7 +197,7 @@ public void close() { } } - private static final int MAX_FREQ = Float.floatToIntBits(Float.MAX_VALUE) >>> 15; + static final int MAX_FREQ = Float.floatToIntBits(Float.MAX_VALUE) >>> 15; static float decodeFeatureValue(float freq) { if (freq > MAX_FREQ) { diff --git a/lucene/core/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java b/lucene/core/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java index 62a70da66db6..28ed1b0910d8 100644 --- a/lucene/core/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java +++ b/lucene/core/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java @@ -177,7 +177,7 @@ && cost() > reader.maxDoc() / 2) { @Override public long cost() { if (cost == -1) { - cost = values.estimatePointCount(visitor); + cost = values.estimateDocCount(visitor); } assert cost >= 0; return cost; diff --git a/lucene/core/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java b/lucene/core/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java index 90e47a9af9b1..9006f160f17c 100644 --- a/lucene/core/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java +++ b/lucene/core/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java @@ -191,7 +191,7 @@ public Scorer get(long leadCost) throws IOException { public long cost() { if (cost == -1) { // Computing the cost may be expensive, so only do it if necessary - cost = values.estimatePointCount(visitor); + cost = values.estimateDocCount(visitor); assert cost >= 0; } return cost; diff --git a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java index e6baa32fa8a6..28a6a59a3fee 100644 --- a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java @@ -20,12 +20,12 @@ import java.util.Collection; import org.apache.lucene.index.PointValues; -import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; diff --git a/lucene/core/src/java/org/apache/lucene/document/RangeFieldQuery.java b/lucene/core/src/java/org/apache/lucene/document/RangeFieldQuery.java index 4d254c5ea657..57d4019a1b43 100644 --- a/lucene/core/src/java/org/apache/lucene/document/RangeFieldQuery.java +++ b/lucene/core/src/java/org/apache/lucene/document/RangeFieldQuery.java @@ -362,7 +362,7 @@ public Scorer get(long leadCost) throws IOException { public long cost() { if (cost == -1) { // Computing the cost may be expensive, so only do it if necessary - cost = values.estimatePointCount(visitor); + cost = values.estimateDocCount(visitor); assert cost >= 0; } return cost; diff --git a/lucene/core/src/java/org/apache/lucene/geo/SimpleGeoJSONPolygonParser.java b/lucene/core/src/java/org/apache/lucene/geo/SimpleGeoJSONPolygonParser.java index 1e35a3db8c47..d33d11acf5f3 100644 --- a/lucene/core/src/java/org/apache/lucene/geo/SimpleGeoJSONPolygonParser.java +++ b/lucene/core/src/java/org/apache/lucene/geo/SimpleGeoJSONPolygonParser.java @@ -295,6 +295,8 @@ private List parseArray(String path) throws ParseException { o = null; } else if (ch == '-' || ch == '.' || (ch >= '0' && ch <= '9')) { o = parseNumber(); + } else if (ch == '"') { + o = parseString(); } else { throw newParseException("expected another array or number while parsing array, not '" + ch + "'"); } diff --git a/lucene/core/src/java/org/apache/lucene/index/PointValues.java b/lucene/core/src/java/org/apache/lucene/index/PointValues.java index 0e3f27e4bdba..78c72baa7821 100644 --- a/lucene/core/src/java/org/apache/lucene/index/PointValues.java +++ b/lucene/core/src/java/org/apache/lucene/index/PointValues.java @@ -232,10 +232,36 @@ default void visit(DocIdSetIterator iterator, byte[] packedValue) throws IOExcep public abstract void intersect(IntersectVisitor visitor) throws IOException; /** Estimate the number of points that would be visited by {@link #intersect} + * with the given {@link IntersectVisitor}. This should run many times faster + * than {@link #intersect(IntersectVisitor)}. */ + public abstract long estimatePointCount(IntersectVisitor visitor); + + /** Estimate the number of documents that would be matched by {@link #intersect} * with the given {@link IntersectVisitor}. This should run many times faster * than {@link #intersect(IntersectVisitor)}. * @see DocIdSetIterator#cost */ - public abstract long estimatePointCount(IntersectVisitor visitor); + public long estimateDocCount(IntersectVisitor visitor) { + long estimatedPointCount = estimatePointCount(visitor); + int docCount = getDocCount(); + double size = size(); + if (estimatedPointCount >= size) { + // math all docs + return docCount; + } else if (size == docCount || estimatedPointCount == 0L ) { + // if the point count estimate is 0 or we have only single values + // return this estimate + return estimatedPointCount; + } else { + // in case of multi values estimate the number of docs using the solution provided in + // https://math.stackexchange.com/questions/1175295/urn-problem-probability-of-drawing-balls-of-k-unique-colors + // then approximate the solution for points per doc << size() which results in the expression + // D * (1 - ((N - n) / N)^(N/D)) + // where D is the total number of docs, N the total number of points and n the estimated point count + long docEstimate = (long) (docCount * (1d - Math.pow((size - estimatedPointCount) / size, size / docCount))); + return docEstimate == 0L ? 1L : docEstimate; + } + } + /** Returns minimum value for each dimension, packed, or null if {@link #size} is 0 */ public abstract byte[] getMinPackedValue() throws IOException; diff --git a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java index 0484fe70598a..1925e46a2bbb 100644 --- a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java @@ -510,11 +510,19 @@ public Query rewrite(IndexReader reader) throws IOException { @Override public void visit(QueryVisitor visitor) { + QueryVisitor sub = visitor.getSubVisitor(Occur.MUST, this); for (BooleanClause.Occur occur : clauseSets.keySet()) { if (clauseSets.get(occur).size() > 0) { - QueryVisitor v = visitor.getSubVisitor(occur, this); - for (Query q : clauseSets.get(occur)) { - q.visit(v); + if (occur == Occur.MUST) { + for (Query q : clauseSets.get(occur)) { + q.visit(sub); + } + } + else { + QueryVisitor v = sub.getSubVisitor(occur, this); + for (Query q : clauseSets.get(occur)) { + q.visit(v); + } } } } diff --git a/lucene/core/src/java/org/apache/lucene/search/HitsThresholdChecker.java b/lucene/core/src/java/org/apache/lucene/search/HitsThresholdChecker.java new file mode 100644 index 000000000000..54536604660d --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/search/HitsThresholdChecker.java @@ -0,0 +1,119 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.search; + +import java.util.concurrent.atomic.AtomicLong; + +/** + * Used for defining custom algorithms to allow searches to early terminate + */ +abstract class HitsThresholdChecker { + /** + * Implementation of HitsThresholdChecker which allows global hit counting + */ + private static class GlobalHitsThresholdChecker extends HitsThresholdChecker { + private final int totalHitsThreshold; + private final AtomicLong globalHitCount; + + public GlobalHitsThresholdChecker(int totalHitsThreshold) { + + if (totalHitsThreshold < 0) { + throw new IllegalArgumentException("totalHitsThreshold must be >= 0, got " + totalHitsThreshold); + } + + this.totalHitsThreshold = totalHitsThreshold; + this.globalHitCount = new AtomicLong(); + } + + @Override + public void incrementHitCount() { + globalHitCount.incrementAndGet(); + } + + @Override + public boolean isThresholdReached(){ + return globalHitCount.get() > totalHitsThreshold; + } + + @Override + public ScoreMode scoreMode() { + return totalHitsThreshold == Integer.MAX_VALUE ? ScoreMode.COMPLETE : ScoreMode.TOP_SCORES; + } + + @Override + public int getHitsThreshold() { + return totalHitsThreshold; + } + } + + /** + * Default implementation of HitsThresholdChecker to be used for single threaded execution + */ + private static class LocalHitsThresholdChecker extends HitsThresholdChecker { + private final int totalHitsThreshold; + private int hitCount; + + public LocalHitsThresholdChecker(int totalHitsThreshold) { + + if (totalHitsThreshold < 0) { + throw new IllegalArgumentException("totalHitsThreshold must be >= 0, got " + totalHitsThreshold); + } + + this.totalHitsThreshold = totalHitsThreshold; + } + + @Override + public void incrementHitCount() { + ++hitCount; + } + + @Override + public boolean isThresholdReached() { + return hitCount > totalHitsThreshold; + } + + @Override + public ScoreMode scoreMode() { + return totalHitsThreshold == Integer.MAX_VALUE ? ScoreMode.COMPLETE : ScoreMode.TOP_SCORES; + } + + @Override + public int getHitsThreshold() { + return totalHitsThreshold; + } + } + + /* + * Returns a threshold checker that is useful for single threaded searches + */ + public static HitsThresholdChecker create(final int totalHitsThreshold) { + return new LocalHitsThresholdChecker(totalHitsThreshold); + } + + /* + * Returns a threshold checker that is based on a shared counter + */ + public static HitsThresholdChecker createShared(final int totalHitsThreshold) { + return new GlobalHitsThresholdChecker(totalHitsThreshold); + } + + public abstract void incrementHitCount(); + public abstract ScoreMode scoreMode(); + public abstract int getHitsThreshold(); + public abstract boolean isThresholdReached(); +} diff --git a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java index a6198fb01fb9..5a033be1cb7a 100644 --- a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java +++ b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java @@ -396,9 +396,11 @@ public TopDocs searchAfter(ScoreDoc after, Query query, int numHits) throws IOEx final CollectorManager manager = new CollectorManager() { + private final HitsThresholdChecker hitsThresholdChecker = (executor == null || leafSlices.length <= 1) ? HitsThresholdChecker.create(TOTAL_HITS_THRESHOLD) : + HitsThresholdChecker.createShared(TOTAL_HITS_THRESHOLD); @Override public TopScoreDocCollector newCollector() throws IOException { - return TopScoreDocCollector.create(cappedNumHits, after, TOTAL_HITS_THRESHOLD); + return TopScoreDocCollector.create(cappedNumHits, after, hitsThresholdChecker); } @Override @@ -524,10 +526,13 @@ private TopFieldDocs searchAfter(FieldDoc after, Query query, int numHits, Sort final CollectorManager manager = new CollectorManager() { + private final HitsThresholdChecker hitsThresholdChecker = (executor == null || leafSlices.length <= 1) ? HitsThresholdChecker.create(TOTAL_HITS_THRESHOLD) : + HitsThresholdChecker.createShared(TOTAL_HITS_THRESHOLD); + @Override public TopFieldCollector newCollector() throws IOException { // TODO: don't pay the price for accurate hit counts by default - return TopFieldCollector.create(rewrittenSort, cappedNumHits, after, TOTAL_HITS_THRESHOLD); + return TopFieldCollector.create(rewrittenSort, cappedNumHits, after, hitsThresholdChecker); } @Override @@ -751,22 +756,36 @@ public LeafSlice(LeafReaderContext... leaves) { public String toString() { return "IndexSearcher(" + reader + "; executor=" + executor + ")"; } - + /** * Returns {@link TermStatistics} for a term, or {@code null} if * the term does not exist. - * - * This can be overridden for example, to return a term's statistics - * across a distributed collection. - * @lucene.experimental + * @deprecated in favor of {@link #termStatistics(Term, int, long)}. */ - public TermStatistics termStatistics(Term term, TermStates context) throws IOException { + @Deprecated + public final TermStatistics termStatistics(Term term, TermStates context) throws IOException { if (context.docFreq() == 0) { return null; } else { - return new TermStatistics(term.bytes(), context.docFreq(), context.totalTermFreq()); + return termStatistics(term, context.docFreq(), context.totalTermFreq()); } } + + /** + * Returns {@link TermStatistics} for a term. + *

+ * This can be overridden for example, to return a term's statistics + * across a distributed collection. + * + * @param docFreq The document frequency of the term. It must be greater or equal to 1. + * @param totalTermFreq The total term frequency. + * @return A {@link TermStatistics} (never null). + * @lucene.experimental + */ + public TermStatistics termStatistics(Term term, int docFreq, long totalTermFreq) throws IOException { + // This constructor will throw an exception if docFreq <= 0. + return new TermStatistics(term.bytes(), docFreq, totalTermFreq); + } /** * Returns {@link CollectionStatistics} for a field, or {@code null} if diff --git a/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java b/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java index 455512336107..b5da7b391950 100644 --- a/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java @@ -235,11 +235,8 @@ protected Similarity.SimScorer getStats(IndexSearcher searcher) throws IOExcepti ts = TermStates.build(context, term, scoreMode.needsScores()); termStates.put(term, ts); } - if (scoreMode.needsScores()) { - TermStatistics termStatistics = searcher.termStatistics(term, ts); - if (termStatistics != null) { - allTermStats.add(termStatistics); - } + if (scoreMode.needsScores() && ts.docFreq() > 0) { + allTermStats.add(searcher.termStatistics(term, ts.docFreq(), ts.totalTermFreq())); } } } diff --git a/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java b/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java index e477d1c7991a..88b4c5b5c20e 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java @@ -430,9 +430,9 @@ protected Similarity.SimScorer getStats(IndexSearcher searcher) throws IOExcepti final Term term = terms[i]; states[i] = TermStates.build(context, term, scoreMode.needsScores()); if (scoreMode.needsScores()) { - TermStatistics termStatistics = searcher.termStatistics(term, states[i]); - if (termStatistics != null) { - termStats[termUpTo++] = termStatistics; + TermStates ts = states[i]; + if (ts.docFreq() > 0) { + termStats[termUpTo++] = searcher.termStatistics(term, ts.docFreq(), ts.totalTermFreq()); } } } diff --git a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java index 8aa87a31c547..6660b3a2cfcb 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java @@ -317,7 +317,7 @@ && cost() > reader.maxDoc() / 2) { public long cost() { if (cost == -1) { // Computing the cost may be expensive, so only do it if necessary - cost = values.estimatePointCount(visitor); + cost = values.estimateDocCount(visitor); assert cost >= 0; } return cost; diff --git a/lucene/core/src/java/org/apache/lucene/search/SynonymQuery.java b/lucene/core/src/java/org/apache/lucene/search/SynonymQuery.java index e3e6d8bc9589..763c8bc25b16 100644 --- a/lucene/core/src/java/org/apache/lucene/search/SynonymQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/SynonymQuery.java @@ -234,9 +234,10 @@ class SynonymWeight extends Weight { long totalTermFreq = 0; termStates = new TermStates[terms.length]; for (int i = 0; i < termStates.length; i++) { - termStates[i] = TermStates.build(searcher.getTopReaderContext(), terms[i].term, true); - TermStatistics termStats = searcher.termStatistics(terms[i].term, termStates[i]); - if (termStats != null) { + TermStates ts = TermStates.build(searcher.getTopReaderContext(), terms[i].term, true); + termStates[i] = ts; + if (ts.docFreq() > 0) { + TermStatistics termStats = searcher.termStatistics(terms[i].term, ts.docFreq(), ts.totalTermFreq()); docFreq = Math.max(termStats.docFreq(), docFreq); totalTermFreq += termStats.totalTermFreq(); } diff --git a/lucene/core/src/java/org/apache/lucene/search/TermQuery.java b/lucene/core/src/java/org/apache/lucene/search/TermQuery.java index 4b99f74d1cc3..4909e2a71603 100644 --- a/lucene/core/src/java/org/apache/lucene/search/TermQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/TermQuery.java @@ -61,7 +61,7 @@ public TermWeight(IndexSearcher searcher, ScoreMode scoreMode, final TermStatistics termStats; if (scoreMode.needsScores()) { collectionStats = searcher.collectionStatistics(term.field()); - termStats = searcher.termStatistics(term, termStates); + termStats = termStates.docFreq() > 0 ? searcher.termStatistics(term, termStates.docFreq(), termStates.totalTermFreq()) : null; } else { // we do not need the actual stats, use fake stats with docFreq=maxDoc=ttf=1 collectionStats = new CollectionStatistics(term.field(), 1, 1, 1, 1); diff --git a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java index 59816592a071..7a09b5bae909 100644 --- a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java +++ b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.Comparator; import java.util.List; @@ -96,12 +97,12 @@ private static boolean canEarlyTerminateOnPrefix(Sort searchSort, Sort indexSort * document scores and maxScore. */ private static class SimpleFieldCollector extends TopFieldCollector { - final Sort sort; final FieldValueHitQueue queue; - public SimpleFieldCollector(Sort sort, FieldValueHitQueue queue, int numHits, int totalHitsThreshold) { - super(queue, numHits, totalHitsThreshold, sort.needsScores()); + public SimpleFieldCollector(Sort sort, FieldValueHitQueue queue, int numHits, + HitsThresholdChecker hitsThresholdChecker) { + super(queue, numHits, hitsThresholdChecker, sort.needsScores()); this.sort = sort; this.queue = queue; } @@ -128,13 +129,14 @@ public void setScorer(Scorable scorer) throws IOException { @Override public void collect(int doc) throws IOException { ++totalHits; + hitsThresholdChecker.incrementHitCount(); if (queueFull) { if (collectedAllCompetitiveHits || reverseMul * comparator.compareBottom(doc) <= 0) { // since docs are visited in doc Id order, if compare is 0, it means // this document is largest than anything else in the queue, and // therefore not competitive. if (canEarlyTerminate) { - if (totalHits > totalHitsThreshold) { + if (hitsThresholdChecker.isThresholdReached()) { totalHitsRelation = Relation.GREATER_THAN_OR_EQUAL_TO; throw new CollectionTerminatedException(); } else { @@ -181,15 +183,13 @@ private final static class PagingFieldCollector extends TopFieldCollector { int collectedHits; final FieldValueHitQueue queue; final FieldDoc after; - final int totalHitsThreshold; public PagingFieldCollector(Sort sort, FieldValueHitQueue queue, FieldDoc after, int numHits, - int totalHitsThreshold) { - super(queue, numHits, totalHitsThreshold, sort.needsScores()); + HitsThresholdChecker hitsThresholdChecker) { + super(queue, numHits, hitsThresholdChecker, sort.needsScores()); this.sort = sort; this.queue = queue; this.after = after; - this.totalHitsThreshold = totalHitsThreshold; FieldComparator[] comparators = queue.comparators; // Tell all comparators their top value: @@ -221,6 +221,7 @@ public void collect(int doc) throws IOException { //System.out.println(" collect doc=" + doc); totalHits++; + hitsThresholdChecker.incrementHitCount(); if (queueFull) { // Fastmatch: return if this hit is no better than @@ -230,7 +231,7 @@ public void collect(int doc) throws IOException { // this document is largest than anything else in the queue, and // therefore not competitive. if (canEarlyTerminate) { - if (totalHits > totalHitsThreshold) { + if (hitsThresholdChecker.isThresholdReached()) { totalHitsRelation = Relation.GREATER_THAN_OR_EQUAL_TO; throw new CollectionTerminatedException(); } else { @@ -282,7 +283,7 @@ public void collect(int doc) throws IOException { private static final ScoreDoc[] EMPTY_SCOREDOCS = new ScoreDoc[0]; final int numHits; - final int totalHitsThreshold; + final HitsThresholdChecker hitsThresholdChecker; final FieldComparator.RelevanceComparator firstComparator; final boolean canSetMinScore; final int numComparators; @@ -297,17 +298,18 @@ public void collect(int doc) throws IOException { // internal versions. If someone will define a constructor with any other // visibility, then anyone will be able to extend the class, which is not what // we want. - private TopFieldCollector(FieldValueHitQueue pq, int numHits, int totalHitsThreshold, boolean needsScores) { + private TopFieldCollector(FieldValueHitQueue pq, int numHits, + HitsThresholdChecker hitsThresholdChecker, boolean needsScores) { super(pq); this.needsScores = needsScores; this.numHits = numHits; - this.totalHitsThreshold = totalHitsThreshold; + this.hitsThresholdChecker = hitsThresholdChecker; this.numComparators = pq.getComparators().length; FieldComparator fieldComparator = pq.getComparators()[0]; int reverseMul = pq.reverseMul[0]; if (fieldComparator.getClass().equals(FieldComparator.RelevanceComparator.class) && reverseMul == 1 // if the natural sort is preserved (sort by descending relevance) - && totalHitsThreshold != Integer.MAX_VALUE) { + && hitsThresholdChecker.getHitsThreshold() != Integer.MAX_VALUE) { firstComparator = (FieldComparator.RelevanceComparator) fieldComparator; scoreMode = ScoreMode.TOP_SCORES; canSetMinScore = true; @@ -324,7 +326,7 @@ public ScoreMode scoreMode() { } protected void updateMinCompetitiveScore(Scorable scorer) throws IOException { - if (canSetMinScore && totalHits > totalHitsThreshold && queueFull) { + if (canSetMinScore && hitsThresholdChecker.isThresholdReached() && queueFull) { assert bottom != null && firstComparator != null; float minScore = firstComparator.value(bottom.slot); scorer.setMinCompetitiveScore(minScore); @@ -382,8 +384,19 @@ public static TopFieldCollector create(Sort sort, int numHits, int totalHitsThre * @return a {@link TopFieldCollector} instance which will sort the results by * the sort criteria. */ - public static TopFieldCollector create(Sort sort, int numHits, FieldDoc after, - int totalHitsThreshold) { + public static TopFieldCollector create(Sort sort, int numHits, FieldDoc after, int totalHitsThreshold) { + if (totalHitsThreshold < 0) { + throw new IllegalArgumentException("totalHitsThreshold must be >= 0, got " + totalHitsThreshold); + } + + return create(sort, numHits, after, HitsThresholdChecker.create(totalHitsThreshold)); + } + + /** + * Same as above with an additional parameter to allow passing in the threshold checker + */ + static TopFieldCollector create(Sort sort, int numHits, FieldDoc after, + HitsThresholdChecker hitsThresholdChecker) { if (sort.fields.length == 0) { throw new IllegalArgumentException("Sort must contain at least one field"); @@ -393,14 +406,14 @@ public static TopFieldCollector create(Sort sort, int numHits, FieldDoc after, throw new IllegalArgumentException("numHits must be > 0; please use TotalHitCountCollector if you just need the total hit count"); } - if (totalHitsThreshold < 0) { - throw new IllegalArgumentException("totalHitsThreshold must be >= 0, got " + totalHitsThreshold); + if (hitsThresholdChecker == null) { + throw new IllegalArgumentException("hitsThresholdChecker should not be null"); } FieldValueHitQueue queue = FieldValueHitQueue.create(sort.fields, numHits); if (after == null) { - return new SimpleFieldCollector(sort, queue, numHits, totalHitsThreshold); + return new SimpleFieldCollector(sort, queue, numHits, hitsThresholdChecker); } else { if (after.fields == null) { throw new IllegalArgumentException("after.fields wasn't set; you must pass fillFields=true for the previous search"); @@ -410,10 +423,36 @@ public static TopFieldCollector create(Sort sort, int numHits, FieldDoc after, throw new IllegalArgumentException("after.fields has " + after.fields.length + " values but sort has " + sort.getSort().length); } - return new PagingFieldCollector(sort, queue, after, numHits, totalHitsThreshold); + return new PagingFieldCollector(sort, queue, after, numHits, hitsThresholdChecker); } } + /** + * Create a CollectorManager which uses a shared hit counter to maintain number of hits + */ + public static CollectorManager createSharedManager(Sort sort, int numHits, FieldDoc after, + int totalHitsThreshold) { + return new CollectorManager() { + + private final HitsThresholdChecker hitsThresholdChecker = HitsThresholdChecker.createShared(totalHitsThreshold); + + @Override + public TopFieldCollector newCollector() throws IOException { + return create(sort, numHits, after, hitsThresholdChecker); + } + + @Override + public TopFieldDocs reduce(Collection collectors) throws IOException { + final TopFieldDocs[] topDocs = new TopFieldDocs[collectors.size()]; + int i = 0; + for (TopFieldCollector collector : collectors) { + topDocs[i++] = collector.topDocs(); + } + return TopDocs.merge(sort, numHits, topDocs); + } + }; + } + /** * Populate {@link ScoreDoc#score scores} of the given {@code topDocs}. * @param topDocs the top docs to populate diff --git a/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java index c857f8f3a1e1..b0e6a8bc8083 100644 --- a/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java +++ b/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java @@ -18,6 +18,7 @@ import java.io.IOException; +import java.util.Collection; import org.apache.lucene.index.LeafReaderContext; @@ -48,8 +49,8 @@ public void setScorer(Scorable scorer) throws IOException { private static class SimpleTopScoreDocCollector extends TopScoreDocCollector { - SimpleTopScoreDocCollector(int numHits, int totalHitsThreshold) { - super(numHits, totalHitsThreshold); + SimpleTopScoreDocCollector(int numHits, HitsThresholdChecker hitsThresholdChecker) { + super(numHits, hitsThresholdChecker); } @Override @@ -71,8 +72,10 @@ public void collect(int doc) throws IOException { assert score >= 0; // NOTE: false for NaN totalHits++; + hitsThresholdChecker.incrementHitCount(); + if (score <= pqTop.score) { - if (totalHitsRelation == TotalHits.Relation.EQUAL_TO && totalHits > totalHitsThreshold) { + if (totalHitsRelation == TotalHits.Relation.EQUAL_TO && hitsThresholdChecker.isThresholdReached()) { // we just reached totalHitsThreshold, we can start setting the min // competitive score now updateMinCompetitiveScore(scorer); @@ -97,8 +100,8 @@ private static class PagingTopScoreDocCollector extends TopScoreDocCollector { private final ScoreDoc after; private int collectedHits; - PagingTopScoreDocCollector(int numHits, ScoreDoc after, int totalHitsThreshold) { - super(numHits, totalHitsThreshold); + PagingTopScoreDocCollector(int numHits, ScoreDoc after, HitsThresholdChecker hitsThresholdChecker) { + super(numHits, hitsThresholdChecker); this.after = after; this.collectedHits = 0; } @@ -130,10 +133,11 @@ public void collect(int doc) throws IOException { assert score >= 0; // NOTE: false for NaN totalHits++; + hitsThresholdChecker.incrementHitCount(); if (score > after.score || (score == after.score && doc <= afterDoc)) { // hit was collected on a previous page - if (totalHitsRelation == TotalHits.Relation.EQUAL_TO && totalHits > totalHitsThreshold) { + if (totalHitsRelation == TotalHits.Relation.EQUAL_TO && hitsThresholdChecker.isThresholdReached()) { // we just reached totalHitsThreshold, we can start setting the min // competitive score now updateMinCompetitiveScore(scorer); @@ -191,32 +195,65 @@ public static TopScoreDocCollector create(int numHits, int totalHitsThreshold) { * objects. */ public static TopScoreDocCollector create(int numHits, ScoreDoc after, int totalHitsThreshold) { + return create(numHits, after, HitsThresholdChecker.create(totalHitsThreshold)); + } + + static TopScoreDocCollector create(int numHits, ScoreDoc after, HitsThresholdChecker hitsThresholdChecker) { if (numHits <= 0) { throw new IllegalArgumentException("numHits must be > 0; please use TotalHitCountCollector if you just need the total hit count"); } - if (totalHitsThreshold < 0) { - throw new IllegalArgumentException("totalHitsThreshold must be >= 0, got " + totalHitsThreshold); + if (hitsThresholdChecker == null) { + throw new IllegalArgumentException("hitsThresholdChecker must be non null"); } if (after == null) { - return new SimpleTopScoreDocCollector(numHits, totalHitsThreshold); + return new SimpleTopScoreDocCollector(numHits, hitsThresholdChecker); } else { - return new PagingTopScoreDocCollector(numHits, after, totalHitsThreshold); + return new PagingTopScoreDocCollector(numHits, after, hitsThresholdChecker); } } - final int totalHitsThreshold; + /** + * Create a CollectorManager which uses a shared hit counter to maintain number of hits + */ + public static CollectorManager createSharedManager(int numHits, FieldDoc after, + int totalHitsThreshold) { + return new CollectorManager() { + + private final HitsThresholdChecker hitsThresholdChecker = HitsThresholdChecker.createShared(totalHitsThreshold); + + @Override + public TopScoreDocCollector newCollector() throws IOException { + return TopScoreDocCollector.create(numHits, after, hitsThresholdChecker); + } + + @Override + public TopDocs reduce(Collection collectors) throws IOException { + final TopDocs[] topDocs = new TopDocs[collectors.size()]; + int i = 0; + for (TopScoreDocCollector collector : collectors) { + topDocs[i++] = collector.topDocs(); + } + return TopDocs.merge(numHits, topDocs); + } + + }; + } + ScoreDoc pqTop; + final HitsThresholdChecker hitsThresholdChecker; // prevents instantiation - TopScoreDocCollector(int numHits, int totalHitsThreshold) { + TopScoreDocCollector(int numHits, HitsThresholdChecker hitsThresholdChecker) { super(new HitQueue(numHits, true)); - this.totalHitsThreshold = totalHitsThreshold; + assert hitsThresholdChecker != null; + // HitQueue implements getSentinelObject to return a ScoreDoc, so we know // that at this point top() is already initialized. pqTop = pq.top(); + this.hitsThresholdChecker = hitsThresholdChecker; } @Override @@ -230,11 +267,11 @@ protected TopDocs newTopDocs(ScoreDoc[] results, int start) { @Override public ScoreMode scoreMode() { - return totalHitsThreshold == Integer.MAX_VALUE ? ScoreMode.COMPLETE : ScoreMode.TOP_SCORES; + return hitsThresholdChecker.scoreMode(); } protected void updateMinCompetitiveScore(Scorable scorer) throws IOException { - if (totalHits > totalHitsThreshold + if (hitsThresholdChecker.isThresholdReached() && pqTop != null && pqTop.score != Float.NEGATIVE_INFINITY) { // -Infinity is the score of sentinels // since we tie-break on doc id and collect in doc id order, we can require diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java index c33235f9e1bc..d70d2773e059 100644 --- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java +++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java @@ -102,10 +102,10 @@ private Similarity.SimScorer buildSimWeight(SpanQuery query, IndexSearcher searc return null; TermStatistics[] termStats = new TermStatistics[termStates.size()]; int termUpTo = 0; - for (Term term : termStates.keySet()) { - TermStatistics termStatistics = searcher.termStatistics(term, termStates.get(term)); - if (termStatistics != null) { - termStats[termUpTo++] = termStatistics; + for (Map.Entry entry : termStates.entrySet()) { + TermStates ts = entry.getValue(); + if (ts.docFreq() > 0) { + termStats[termUpTo++] = searcher.termStatistics(entry.getKey(), ts.docFreq(), ts.totalTermFreq()); } } CollectionStatistics collectionStats = searcher.collectionStatistics(query.getField()); diff --git a/lucene/core/src/java/org/apache/lucene/util/fst/Builder.java b/lucene/core/src/java/org/apache/lucene/util/fst/Builder.java index bb9a682a666a..c54b144989a1 100644 --- a/lucene/core/src/java/org/apache/lucene/util/fst/Builder.java +++ b/lucene/core/src/java/org/apache/lucene/util/fst/Builder.java @@ -50,9 +50,6 @@ public class Builder { - // The amount of Arc array oversizing used to enable direct addressing of Arcs by their labels - static final int DIRECT_ARC_LOAD_FACTOR = 4; - private final NodeHash dedupHash; final FST fst; private final T NO_OUTPUT; diff --git a/lucene/core/src/java/org/apache/lucene/util/fst/FST.java b/lucene/core/src/java/org/apache/lucene/util/fst/FST.java index 26f5e51e7d8f..e0692b4d7e84 100644 --- a/lucene/core/src/java/org/apache/lucene/util/fst/FST.java +++ b/lucene/core/src/java/org/apache/lucene/util/fst/FST.java @@ -88,8 +88,6 @@ public static enum INPUT_TYPE {BYTE1, BYTE2, BYTE4}; // this means either of these things in different contexts // in the midst of a direct array: private static final byte BIT_MISSING_ARC = 1 << 6; - // at the start of a direct array: - private static final byte ARCS_AS_ARRAY_WITH_GAPS = BIT_MISSING_ARC; /** * @see #shouldExpand(Builder, Builder.UnCompiledNode) @@ -109,7 +107,7 @@ public static enum INPUT_TYPE {BYTE1, BYTE2, BYTE4}; // Increment version to change it private static final String FILE_FORMAT_NAME = "FST"; private static final int VERSION_START = 6; - private static final int VERSION_CURRENT = 7; + private static final int VERSION_CURRENT = VERSION_START; // Never serialized; just used to represent the virtual // final node w/ no arcs: @@ -645,35 +643,19 @@ long addNode(Builder builder, Builder.UnCompiledNode nodeIn) throws IOExce assert maxBytesPerArc > 0; // 2nd pass just "expands" all arcs to take up a fixed byte size - // If more than (1 / DIRECT_ARC_LOAD_FACTOR) of the "slots" would be occupied, write an arc - // array that may have holes in it so that we can address the arcs directly by label without - // binary search - int labelRange = nodeIn.arcs[nodeIn.numArcs - 1].label - nodeIn.arcs[0].label + 1; - boolean writeDirectly = labelRange > 0 && labelRange < Builder.DIRECT_ARC_LOAD_FACTOR * nodeIn.numArcs; - - //System.out.println("write int @pos=" + (fixedArrayStart-4) + " numArcs=" + nodeIn.numArcs); // create the header // TODO: clean this up: or just rewind+reuse and deal with it byte header[] = new byte[MAX_HEADER_SIZE]; ByteArrayDataOutput bad = new ByteArrayDataOutput(header); // write a "false" first arc: - if (writeDirectly) { - bad.writeByte(ARCS_AS_ARRAY_WITH_GAPS); - bad.writeVInt(labelRange); - } else { - bad.writeByte(ARCS_AS_ARRAY_PACKED); - bad.writeVInt(nodeIn.numArcs); - } + bad.writeByte(ARCS_AS_ARRAY_PACKED); + bad.writeVInt(nodeIn.numArcs); bad.writeVInt(maxBytesPerArc); int headerLen = bad.getPosition(); final long fixedArrayStart = startAddress + headerLen; - if (writeDirectly) { - writeArrayWithGaps(builder, nodeIn, fixedArrayStart, maxBytesPerArc, labelRange); - } else { - writeArrayPacked(builder, nodeIn, fixedArrayStart, maxBytesPerArc); - } + writeArrayPacked(builder, nodeIn, fixedArrayStart, maxBytesPerArc); // now write the header builder.bytes.writeBytes(startAddress, header, 0, headerLen); @@ -707,45 +689,7 @@ private void writeArrayPacked(Builder builder, Builder.UnCompiledNode node } } - private void writeArrayWithGaps(Builder builder, Builder.UnCompiledNode nodeIn, long fixedArrayStart, int maxBytesPerArc, int labelRange) { - // expand the arcs in place, backwards - long srcPos = builder.bytes.getPosition(); - long destPos = fixedArrayStart + labelRange * maxBytesPerArc; - // if destPos == srcPos it means all the arcs were the same length, and the array of them is *already* direct - assert destPos >= srcPos; - if (destPos > srcPos) { - builder.bytes.skipBytes((int) (destPos - srcPos)); - int arcIdx = nodeIn.numArcs - 1; - int firstLabel = nodeIn.arcs[0].label; - int nextLabel = nodeIn.arcs[arcIdx].label; - for (int directArcIdx = labelRange - 1; directArcIdx >= 0; directArcIdx--) { - destPos -= maxBytesPerArc; - if (directArcIdx == nextLabel - firstLabel) { - int arcLen = builder.reusedBytesPerArc[arcIdx]; - srcPos -= arcLen; - //System.out.println(" direct pack idx=" + directArcIdx + " arcIdx=" + arcIdx + " srcPos=" + srcPos + " destPos=" + destPos + " label=" + nextLabel); - if (srcPos != destPos) { - //System.out.println(" copy len=" + builder.reusedBytesPerArc[arcIdx]); - assert destPos > srcPos: "destPos=" + destPos + " srcPos=" + srcPos + " arcIdx=" + arcIdx + " maxBytesPerArc=" + maxBytesPerArc + " reusedBytesPerArc[arcIdx]=" + builder.reusedBytesPerArc[arcIdx] + " nodeIn.numArcs=" + nodeIn.numArcs; - builder.bytes.copyBytes(srcPos, destPos, arcLen); - if (arcIdx == 0) { - break; - } - } - --arcIdx; - nextLabel = nodeIn.arcs[arcIdx].label; - } else { - assert directArcIdx > arcIdx; - // mark this as a missing arc - //System.out.println(" direct pack idx=" + directArcIdx + " no arc"); - builder.bytes.writeByte(destPos, BIT_MISSING_ARC); - } - } - } - } - - /** Fills virtual 'start' arc, ie, an empty incoming arc to - * the FST's start node */ + /** Fills virtual 'start' arc, ie, an empty incoming arc to the FST's start node */ public Arc getFirstArc(Arc arc) { T NO_OUTPUT = outputs.getNoOutput(); @@ -786,18 +730,13 @@ public Arc readLastTargetArc(Arc follow, Arc arc, BytesReader in) throw } else { in.setPosition(follow.target); final byte b = in.readByte(); - if (b == ARCS_AS_ARRAY_PACKED || b == ARCS_AS_ARRAY_WITH_GAPS) { + if (b == ARCS_AS_ARRAY_PACKED) { // array: jump straight to end arc.numArcs = in.readVInt(); arc.bytesPerArc = in.readVInt(); //System.out.println(" array numArcs=" + arc.numArcs + " bpa=" + arc.bytesPerArc); arc.posArcsStart = in.getPosition(); - if (b == ARCS_AS_ARRAY_WITH_GAPS) { - arc.arcIdx = Integer.MIN_VALUE; - arc.nextArc = arc.posArcsStart - (arc.numArcs - 1) * arc.bytesPerArc; - } else { - arc.arcIdx = arc.numArcs - 2; - } + arc.arcIdx = arc.numArcs - 2; } else { arc.flags = b; // non-array: linear scan @@ -868,7 +807,7 @@ public Arc readFirstRealTargetArc(long node, Arc arc, final BytesReader in //System.out.println(" flags=" + arc.flags); byte flags = in.readByte(); - if (flags == ARCS_AS_ARRAY_PACKED || flags == ARCS_AS_ARRAY_WITH_GAPS) { + if (flags == ARCS_AS_ARRAY_PACKED) { //System.out.println(" fixedArray"); // this is first arc in a fixed-array arc.numArcs = in.readVInt(); @@ -901,7 +840,7 @@ boolean isExpandedTarget(Arc follow, BytesReader in) throws IOException { } else { in.setPosition(follow.target); byte flags = in.readByte(); - return flags == ARCS_AS_ARRAY_PACKED || flags == ARCS_AS_ARRAY_WITH_GAPS; + return flags == ARCS_AS_ARRAY_PACKED; } } @@ -931,7 +870,7 @@ public int readNextArcLabel(Arc arc, BytesReader in) throws IOException { in.setPosition(pos); final byte flags = in.readByte(); - if (flags == ARCS_AS_ARRAY_PACKED || flags == ARCS_AS_ARRAY_WITH_GAPS) { + if (flags == ARCS_AS_ARRAY_PACKED) { //System.out.println(" nextArc fixed array"); in.readVInt(); @@ -1140,34 +1079,7 @@ private Arc findTargetArc(int labelToMatch, Arc follow, Arc arc, BytesR // System.out.println("fta label=" + (char) labelToMatch); byte flags = in.readByte(); - if (flags == ARCS_AS_ARRAY_WITH_GAPS) { - arc.numArcs = in.readVInt(); - arc.bytesPerArc = in.readVInt(); - arc.posArcsStart = in.getPosition(); - - // Array is direct; address by label - in.skipBytes(1); - int firstLabel = readLabel(in); - int arcPos = labelToMatch - firstLabel; - if (arcPos == 0) { - arc.nextArc = arc.posArcsStart; - } else if (arcPos > 0) { - if (arcPos >= arc.numArcs) { - return null; - } - in.setPosition(arc.posArcsStart - arc.bytesPerArc * arcPos); - flags = in.readByte(); - if (flag(flags, BIT_MISSING_ARC)) { - return null; - } - // point to flags that we just read - arc.nextArc = in.getPosition() + 1; - } else { - return null; - } - arc.arcIdx = Integer.MIN_VALUE; - return readNextRealArc(arc, in); - } else if (flags == ARCS_AS_ARRAY_PACKED) { + if (flags == ARCS_AS_ARRAY_PACKED) { arc.numArcs = in.readVInt(); arc.bytesPerArc = in.readVInt(); arc.posArcsStart = in.getPosition(); diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java index 87fc5e23c98f..4f4de3955523 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java @@ -50,7 +50,7 @@ public class TestLucene60PointsFormat extends BasePointsFormatTestCase { private final Codec codec; private final int maxPointsInLeafNode; - + public TestLucene60PointsFormat() { // standard issue Codec defaultCodec = TestUtil.getDefaultCodec(); @@ -110,15 +110,19 @@ public void testEstimatePointCount() throws IOException { byte[] uniquePointValue = new byte[3]; random().nextBytes(uniquePointValue); final int numDocs = atLeast(10000); // make sure we have several leaves + final boolean multiValues = random().nextBoolean(); for (int i = 0; i < numDocs; ++i) { Document doc = new Document(); if (i == numDocs / 2) { doc.add(new BinaryPoint("f", uniquePointValue)); } else { - do { - random().nextBytes(pointValue); - } while (Arrays.equals(pointValue, uniquePointValue)); - doc.add(new BinaryPoint("f", pointValue)); + final int numValues = (multiValues) ? TestUtil.nextInt(random(), 2, 100) : 1; + for (int j = 0; j < numValues; j ++) { + do { + random().nextBytes(pointValue); + } while (Arrays.equals(pointValue, uniquePointValue)); + doc.add(new BinaryPoint("f", pointValue)); + } } w.addDocument(doc); } @@ -129,58 +133,72 @@ public void testEstimatePointCount() throws IOException { PointValues points = lr.getPointValues("f"); // If all points match, then the point count is numLeaves * maxPointsInLeafNode - final int numLeaves = (int) Math.ceil((double) numDocs / maxPointsInLeafNode); - assertEquals(numLeaves * maxPointsInLeafNode, - points.estimatePointCount(new IntersectVisitor() { - @Override - public void visit(int docID, byte[] packedValue) throws IOException {} - - @Override - public void visit(int docID) throws IOException {} - - @Override - public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - return Relation.CELL_INSIDE_QUERY; - } - })); + final int numLeaves = (int) Math.ceil((double) points.size() / maxPointsInLeafNode); + + IntersectVisitor allPointsVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return Relation.CELL_INSIDE_QUERY; + } + }; + + assertEquals(numLeaves * maxPointsInLeafNode, points.estimatePointCount(allPointsVisitor)); + assertEquals(numDocs, points.estimateDocCount(allPointsVisitor)); + + IntersectVisitor noPointsVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return Relation.CELL_OUTSIDE_QUERY; + } + }; // Return 0 if no points match - assertEquals(0, - points.estimatePointCount(new IntersectVisitor() { - @Override - public void visit(int docID, byte[] packedValue) throws IOException {} - - @Override - public void visit(int docID) throws IOException {} - - @Override - public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - return Relation.CELL_OUTSIDE_QUERY; - } - })); + assertEquals(0, points.estimatePointCount(noPointsVisitor)); + assertEquals(0, points.estimateDocCount(noPointsVisitor)); + + IntersectVisitor onePointMatchVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + if (FutureArrays.compareUnsigned(uniquePointValue, 0, 3, maxPackedValue, 0, 3) > 0 || + FutureArrays.compareUnsigned(uniquePointValue, 0, 3, minPackedValue, 0, 3) < 0) { + return Relation.CELL_OUTSIDE_QUERY; + } + return Relation.CELL_CROSSES_QUERY; + } + }; // If only one point matches, then the point count is (maxPointsInLeafNode + 1) / 2 // in general, or maybe 2x that if the point is a split value - final long pointCount = points.estimatePointCount(new IntersectVisitor() { - @Override - public void visit(int docID, byte[] packedValue) throws IOException {} - - @Override - public void visit(int docID) throws IOException {} - - @Override - public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - if (FutureArrays.compareUnsigned(uniquePointValue, 0, 3, maxPackedValue, 0, 3) > 0 || - FutureArrays.compareUnsigned(uniquePointValue, 0, 3, minPackedValue, 0, 3) < 0) { - return Relation.CELL_OUTSIDE_QUERY; - } - return Relation.CELL_CROSSES_QUERY; - } - }); + final long pointCount = points.estimatePointCount(onePointMatchVisitor); assertTrue(""+pointCount, pointCount == (maxPointsInLeafNode + 1) / 2 || // common case - pointCount == 2*((maxPointsInLeafNode + 1) / 2)); // if the point is a split value + pointCount == 2*((maxPointsInLeafNode + 1) / 2)); // if the point is a split value + + final long docCount = points.estimateDocCount(onePointMatchVisitor); + if (multiValues) { + assertEquals(docCount, (long) (docCount * (1d - Math.pow( (numDocs - pointCount) / points.size() , points.size() / docCount)))); + } else { + assertEquals(pointCount, docCount); + } r.close(); dir.close(); } @@ -199,16 +217,20 @@ public void testEstimatePointCount2Dims() throws IOException { random().nextBytes(uniquePointValue[0]); random().nextBytes(uniquePointValue[1]); final int numDocs = atLeast(10000); // make sure we have several leaves + final boolean multiValues = random().nextBoolean(); for (int i = 0; i < numDocs; ++i) { Document doc = new Document(); if (i == numDocs / 2) { doc.add(new BinaryPoint("f", uniquePointValue)); } else { - do { - random().nextBytes(pointValue[0]); - random().nextBytes(pointValue[1]); - } while (Arrays.equals(pointValue[0], uniquePointValue[0]) || Arrays.equals(pointValue[1], uniquePointValue[1])); - doc.add(new BinaryPoint("f", pointValue)); + final int numValues = (multiValues) ? TestUtil.nextInt(random(), 2, 100) : 1; + for (int j = 0; j < numValues; j ++) { + do { + random().nextBytes(pointValue[0]); + random().nextBytes(pointValue[1]); + } while (Arrays.equals(pointValue[0], uniquePointValue[0]) || Arrays.equals(pointValue[1], uniquePointValue[1])); + doc.add(new BinaryPoint("f", pointValue)); + } } w.addDocument(doc); } @@ -219,67 +241,161 @@ public void testEstimatePointCount2Dims() throws IOException { PointValues points = lr.getPointValues("f"); // With >1 dims, the tree is balanced - int actualMaxPointsInLeafNode = numDocs; + long actualMaxPointsInLeafNode = points.size(); while (actualMaxPointsInLeafNode > maxPointsInLeafNode) { actualMaxPointsInLeafNode = (actualMaxPointsInLeafNode + 1) / 2; } + IntersectVisitor allPointsVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return Relation.CELL_INSIDE_QUERY; + } + }; + // If all points match, then the point count is numLeaves * maxPointsInLeafNode - final int numLeaves = Integer.highestOneBit((numDocs - 1) / actualMaxPointsInLeafNode) << 1; - assertEquals(numLeaves * actualMaxPointsInLeafNode, - points.estimatePointCount(new IntersectVisitor() { - @Override - public void visit(int docID, byte[] packedValue) throws IOException {} - - @Override - public void visit(int docID) throws IOException {} - - @Override - public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - return Relation.CELL_INSIDE_QUERY; - } - })); + final int numLeaves = (int) Long.highestOneBit( ((points.size() - 1) / actualMaxPointsInLeafNode)) << 1; + + assertEquals(numLeaves * actualMaxPointsInLeafNode, points.estimatePointCount(allPointsVisitor)); + assertEquals(numDocs, points.estimateDocCount(allPointsVisitor)); + + IntersectVisitor noPointsVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return Relation.CELL_OUTSIDE_QUERY; + } + }; // Return 0 if no points match - assertEquals(0, - points.estimatePointCount(new IntersectVisitor() { - @Override - public void visit(int docID, byte[] packedValue) throws IOException {} - - @Override - public void visit(int docID) throws IOException {} - - @Override - public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + assertEquals(0, points.estimatePointCount(noPointsVisitor)); + assertEquals(0, points.estimateDocCount(noPointsVisitor)); + + IntersectVisitor onePointMatchVisitor = new IntersectVisitor() { + @Override + public void visit(int docID, byte[] packedValue) throws IOException {} + + @Override + public void visit(int docID) throws IOException {} + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + for (int dim = 0; dim < 2; ++dim) { + if (FutureArrays.compareUnsigned(uniquePointValue[dim], 0, 3, maxPackedValue, dim * 3, dim * 3 + 3) > 0 || + FutureArrays.compareUnsigned(uniquePointValue[dim], 0, 3, minPackedValue, dim * 3, dim * 3 + 3) < 0) { return Relation.CELL_OUTSIDE_QUERY; } - })); - + } + return Relation.CELL_CROSSES_QUERY; + } + }; // If only one point matches, then the point count is (actualMaxPointsInLeafNode + 1) / 2 // in general, or maybe 2x that if the point is a split value - final long pointCount = points.estimatePointCount(new IntersectVisitor() { - @Override - public void visit(int docID, byte[] packedValue) throws IOException {} - - @Override - public void visit(int docID) throws IOException {} - - @Override - public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - for (int dim = 0; dim < 2; ++dim) { - if (FutureArrays.compareUnsigned(uniquePointValue[dim], 0, 3, maxPackedValue, dim * 3, dim * 3 + 3) > 0 || - FutureArrays.compareUnsigned(uniquePointValue[dim], 0, 3, minPackedValue, dim * 3, dim * 3 + 3) < 0) { - return Relation.CELL_OUTSIDE_QUERY; - } - } - return Relation.CELL_CROSSES_QUERY; - } - }); + final long pointCount = points.estimatePointCount(onePointMatchVisitor); assertTrue(""+pointCount, pointCount == (actualMaxPointsInLeafNode + 1) / 2 || // common case - pointCount == 2*((actualMaxPointsInLeafNode + 1) / 2)); // if the point is a split value + pointCount == 2*((actualMaxPointsInLeafNode + 1) / 2)); // if the point is a split value + final long docCount = points.estimateDocCount(onePointMatchVisitor); + if (multiValues) { + assertEquals(docCount, (long) (docCount * (1d - Math.pow( (numDocs - pointCount) / points.size() , points.size() / docCount)))); + } else { + assertEquals(pointCount, docCount); + } r.close(); dir.close(); } + + public void testDocCountEdgeCases() { + PointValues values = getPointValues(Long.MAX_VALUE, 1, Long.MAX_VALUE); + long docs = values.estimateDocCount(null); + assertEquals(1, docs); + values = getPointValues(Long.MAX_VALUE, 1, 1); + docs = values.estimateDocCount(null); + assertEquals(1, docs); + values = getPointValues(Long.MAX_VALUE, Integer.MAX_VALUE, Long.MAX_VALUE); + docs = values.estimateDocCount(null); + assertEquals(Integer.MAX_VALUE, docs); + values = getPointValues(Long.MAX_VALUE, Integer.MAX_VALUE, Long.MAX_VALUE / 2); + docs = values.estimateDocCount(null); + assertEquals(Integer.MAX_VALUE, docs); + values = getPointValues(Long.MAX_VALUE, Integer.MAX_VALUE, 1); + docs = values.estimateDocCount(null); + assertEquals(1, docs); + } + + public void testRandomDocCount() { + for (int i = 0; i < 100; i++) { + long size = TestUtil.nextLong(random(), 1, Long.MAX_VALUE); + int maxDoc = (size > Integer.MAX_VALUE) ? Integer.MAX_VALUE : Math.toIntExact(size); + int docCount = TestUtil.nextInt(random(), 1, maxDoc); + long estimatedPointCount = TestUtil.nextLong(random(), 0, size); + PointValues values = getPointValues(size, docCount, estimatedPointCount); + long docs = values.estimateDocCount(null); + assertTrue(docs <= estimatedPointCount); + assertTrue(docs <= maxDoc); + assertTrue(docs >= estimatedPointCount / (size/docCount)); + } + } + + + private PointValues getPointValues(long size, int docCount, long estimatedPointCount) { + return new PointValues() { + @Override + public void intersect(IntersectVisitor visitor) { + throw new UnsupportedOperationException(); + } + + @Override + public long estimatePointCount(IntersectVisitor visitor) { + return estimatedPointCount; + } + + @Override + public byte[] getMinPackedValue() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public byte[] getMaxPackedValue() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int getNumDataDimensions() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int getNumIndexDimensions() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int getBytesPerDimension() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public long size() { + return size; + } + + @Override + public int getDocCount() { + return docCount; + } + }; + } } diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java b/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java index 3c8fef117284..1090e5a5679a 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java @@ -230,7 +230,7 @@ public void testDuelFloat() throws IOException { if (random().nextBoolean()) { float f; do { - int freq = TestUtil.nextInt(random(), 1, (1 << 16) - 1); + int freq = TestUtil.nextInt(random(), 1, FeatureField.MAX_FREQ); f = FeatureField.decodeFeatureValue(freq); } while (f < Float.MIN_NORMAL); doc.add(new NumericDocValuesField("float", Float.floatToIntBits(f))); diff --git a/lucene/core/src/test/org/apache/lucene/geo/TestPolygon.java b/lucene/core/src/test/org/apache/lucene/geo/TestPolygon.java index 8ee62718e6d7..e9bf265ffb56 100644 --- a/lucene/core/src/test/org/apache/lucene/geo/TestPolygon.java +++ b/lucene/core/src/test/org/apache/lucene/geo/TestPolygon.java @@ -300,4 +300,21 @@ public void testIllegalGeoJSONMultipleFeatures() throws Exception { Exception e = expectThrows(ParseException.class, () -> Polygon.fromGeoJSON(b.toString())); assertTrue(e.getMessage().contains("can only handle type FeatureCollection (if it has a single polygon geometry), Feature, Polygon or MutiPolygon, but got Point")); } + + public void testPolygonPropertiesCanBeStringArrays() throws Exception { + StringBuilder b = new StringBuilder(); + b.append("{\n"); + b.append(" \"type\": \"Polygon\",\n"); + b.append(" \"coordinates\": [\n"); + b.append(" [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],\n"); + b.append(" [100.0, 1.0], [100.0, 0.0] ]\n"); + b.append(" ],\n"); + b.append(" \"properties\": {\n"); + b.append(" \"array\": [ \"value\" ]\n"); + b.append(" }\n"); + b.append("}\n"); + + Polygon[] polygons = Polygon.fromGeoJSON(b.toString()); + assertEquals(1, polygons.length); + } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java index d784b120e9a5..de5d94780222 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; @@ -86,4 +87,70 @@ public void testUseIndexForSelectiveQueries() throws IOException { dir.close(); } + public void testUseIndexForSelectiveMultiValueQueries() throws IOException { + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig() + // relies on costs and PointValues.estimateCost so we need the default codec + .setCodec(TestUtil.getDefaultCodec())); + for (int i = 0; i < 2000; ++i) { + Document doc = new Document(); + if (i < 1000) { + doc.add(new StringField("f1", "bar", Store.NO)); + for (int j =0; j < 500; j++) { + doc.add(new LongPoint("f2", 42L)); + doc.add(new SortedNumericDocValuesField("f2", 42L)); + } + } else if (i == 1001) { + doc.add(new StringField("f1", "foo", Store.NO)); + doc.add(new LongPoint("f2", 2L)); + doc.add(new SortedNumericDocValuesField("f2", 42L)); + } else { + doc.add(new StringField("f1", "bar", Store.NO)); + for (int j =0; j < 100; j++) { + doc.add(new LongPoint("f2", 2L)); + doc.add(new SortedNumericDocValuesField("f2", 2L)); + } + } + w.addDocument(doc); + } + w.forceMerge(1); + IndexReader reader = DirectoryReader.open(w); + IndexSearcher searcher = newSearcher(reader); + searcher.setQueryCache(null); + + // The term query is less selective, so the IndexOrDocValuesQuery should use points + final Query q1 = new BooleanQuery.Builder() + .add(new TermQuery(new Term("f1", "bar")), Occur.MUST) + .add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 2), SortedNumericDocValuesField.newSlowRangeQuery("f2", 2L, 2L)), Occur.MUST) + .build(); + + final Weight w1 = searcher.createWeight(searcher.rewrite(q1), ScoreMode.COMPLETE, 1); + final Scorer s1 = w1.scorer(searcher.getIndexReader().leaves().get(0)); + assertNull(s1.twoPhaseIterator()); // means we use points + + // The term query is less selective, so the IndexOrDocValuesQuery should use points + final Query q2 = new BooleanQuery.Builder() + .add(new TermQuery(new Term("f1", "bar")), Occur.MUST) + .add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 42), SortedNumericDocValuesField.newSlowRangeQuery("f2", 42, 42L)), Occur.MUST) + .build(); + + final Weight w2 = searcher.createWeight(searcher.rewrite(q2), ScoreMode.COMPLETE, 1); + final Scorer s2 = w2.scorer(searcher.getIndexReader().leaves().get(0)); + assertNull(s2.twoPhaseIterator()); // means we use points + + // The term query is more selective, so the IndexOrDocValuesQuery should use doc values + final Query q3 = new BooleanQuery.Builder() + .add(new TermQuery(new Term("f1", "foo")), Occur.MUST) + .add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 42), SortedNumericDocValuesField.newSlowRangeQuery("f2", 42, 42L)), Occur.MUST) + .build(); + + final Weight w3 = searcher.createWeight(searcher.rewrite(q3), ScoreMode.COMPLETE, 1); + final Scorer s3 = w3.scorer(searcher.getIndexReader().leaves().get(0)); + assertNotNull(s3.twoPhaseIterator()); // means we use doc values + + reader.close(); + w.close(); + dir.close(); + } + } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java b/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java index ea414e7d25f6..e6debf20f0cf 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java @@ -329,10 +329,10 @@ static class SlowMinShouldMatchScorer extends Scorer { if (ord >= 0) { boolean success = ords.add(ord); assert success; // no dups - TermStates context = TermStates.build(reader.getContext(), term, true); + TermStates ts = TermStates.build(reader.getContext(), term, true); SimScorer w = weight.similarity.scorer(1f, searcher.collectionStatistics("field"), - searcher.termStatistics(term, context)); + searcher.termStatistics(term, ts.docFreq(), ts.totalTermFreq())); sims[(int)ord] = new LeafSimScorer(w, reader, "field", true); } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestQueryVisitor.java b/lucene/core/src/test/org/apache/lucene/search/TestQueryVisitor.java index f1a431054741..27254d8579f2 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestQueryVisitor.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestQueryVisitor.java @@ -328,6 +328,24 @@ public void testExtractMatchingTermSet() { minimumTermSet.clear(); extractor.collectTerms(minimumTermSet); assertThat(minimumTermSet, equalTo(expected2)); + + BooleanQuery bq = new BooleanQuery.Builder() + .add(new BooleanQuery.Builder() + .add(new TermQuery(new Term("f", "1")), BooleanClause.Occur.MUST) + .add(new TermQuery(new Term("f", "61")), BooleanClause.Occur.MUST) + .add(new TermQuery(new Term("f", "211")), BooleanClause.Occur.FILTER) + .add(new TermQuery(new Term("f", "5")), BooleanClause.Occur.SHOULD) + .build(), BooleanClause.Occur.SHOULD) + .add(new PhraseQuery("f", "3333", "44444"), BooleanClause.Occur.SHOULD) + .build(); + QueryNode ex2 = new ConjunctionNode(); + bq.visit(ex2); + Set expected3 = new HashSet<>(Arrays.asList(new Term("f", "1"), new Term("f", "3333"))); + minimumTermSet.clear(); + ex2.collectTerms(minimumTermSet); + assertThat(minimumTermSet, equalTo(expected3)); + ex2.getWeight(); // force sort order + assertThat(ex2.toString(), equalTo("AND(AND(OR(AND(TERM(f:3333),TERM(f:44444)),AND(TERM(f:1),TERM(f:61),AND(TERM(f:211))))))")); } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java index 809d83580dc5..608108c2a02b 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java @@ -19,6 +19,10 @@ import java.io.IOException; import java.util.Arrays; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; @@ -29,6 +33,7 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.NamedThreadFactory; public class TestTopDocsCollector extends LuceneTestCase { @@ -96,6 +101,31 @@ private TopDocsCollector doSearch(int numResults) throws IOException { searcher.search(q, tdc); return tdc; } + + private TopDocsCollector doSearchWithThreshold(int numResults, int thresHold) throws IOException { + Query q = new MatchAllDocsQuery(); + IndexSearcher searcher = newSearcher(reader); + TopDocsCollector tdc = TopScoreDocCollector.create(numResults, thresHold); + searcher.search(q, tdc); + return tdc; + } + + private TopDocs doConcurrentSearchWithThreshold(int numResults, int thresHold) throws IOException { + Query q = new MatchAllDocsQuery(); + ExecutorService service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS, + new LinkedBlockingQueue(), + new NamedThreadFactory("TestTopDocsCollector")); + IndexSearcher searcher = new IndexSearcher(reader, service); + + CollectorManager collectorManager = TopScoreDocCollector.createSharedManager(numResults, + null, Integer.MAX_VALUE); + + TopDocs tdc = (TopDocs) searcher.search(q, collectorManager); + + service.shutdown(); + + return tdc; + } @Override public void setUp() throws Exception { @@ -274,6 +304,29 @@ public void testSetMinCompetitiveScore() throws Exception { dir.close(); } + public void testSharedCountCollectorManager() throws Exception { + Query q = new MatchAllDocsQuery(); + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE)); + Document doc = new Document(); + w.addDocuments(Arrays.asList(doc, doc, doc, doc)); + w.flush(); + w.addDocuments(Arrays.asList(doc, doc)); + w.flush(); + IndexReader reader = DirectoryReader.open(w); + assertEquals(2, reader.leaves().size()); + w.close(); + + TopDocsCollector collector = doSearchWithThreshold(5, 10); + TopDocs tdc = doConcurrentSearchWithThreshold(5, 10); + TopDocs tdc2 = collector.topDocs(); + + CheckHits.checkEqual(q, tdc.scoreDocs, tdc2.scoreDocs); + + reader.close(); + dir.close(); + } + public void testTotalHits() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE)); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java index 5846733cdf48..2f5599d94ab1 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java @@ -20,6 +20,10 @@ import java.io.IOException; import java.util.Arrays; import java.util.Comparator; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; @@ -37,6 +41,7 @@ import org.apache.lucene.search.FieldValueHitQueue.Entry; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.NamedThreadFactory; import org.apache.lucene.util.TestUtil; import static org.apache.lucene.search.SortField.FIELD_SCORE; @@ -108,6 +113,37 @@ public void testSort() throws Exception { } } + public void testSharedHitcountCollector() throws Exception { + + ExecutorService service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS, + new LinkedBlockingQueue(), + new NamedThreadFactory("TestTopFieldCollector")); + + IndexSearcher concurrentSearcher = new IndexSearcher(ir, service); + + // Two Sort criteria to instantiate the multi/single comparators. + Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() }; + for(int i = 0; i < sort.length; i++) { + Query q = new MatchAllDocsQuery(); + TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, Integer.MAX_VALUE); + + is.search(q, tdc); + + CollectorManager tsdc = TopFieldCollector.createSharedManager(sort[i], 10, null, Integer.MAX_VALUE); + + TopDocs td = tdc.topDocs(); + TopDocs td2 = (TopDocs) concurrentSearcher.search(q, tsdc); + ScoreDoc[] sd = td.scoreDocs; + for(int j = 0; j < sd.length; j++) { + assertTrue(Float.isNaN(sd[j].score)); + } + + CheckHits.checkEqual(q, td.scoreDocs, td2.scoreDocs); + } + + service.shutdown(); + } + public void testSortWithoutTotalHitTracking() throws Exception { Sort sort = new Sort(SortField.FIELD_DOC); for(int i = 0; i < 2; i++) { diff --git a/lucene/default-nested-ivy-settings.xml b/lucene/default-nested-ivy-settings.xml index cfee7c4f2a64..c9fe95be0ce3 100644 --- a/lucene/default-nested-ivy-settings.xml +++ b/lucene/default-nested-ivy-settings.xml @@ -34,7 +34,7 @@ - + TopGroup.merge method + * using a narrative approach. Use of a creative narrative may seem unusual + * or even silly but the idea behind it is to make it hopefully easier to + * reason about the documents and groups and scores in the test whilst testing + * several scenario permutations. + * + * Imagine: + * + * Each document represents (say) a picture book of an animal. + * We are searching for two books and wish to draw a picture of our own, inspired by the books. + * We think that large animals are easier to draw and therefore order the books by the featured animal's size. + * We think that different colors would make for a good drawing and therefore group the books by the featured animal's color. + * + * Index content: + * + * The documents are in 2 groups ("blue" and "red") and there are 4 documents across 2 shards: + * shard 1 (blue whale, red ant) and shard 2 (blue dragonfly, red squirrel). + * + * If all documents are present the "blue whale" and the "red squirrel" documents would be returned + * for our drawing since they are the largest animals in their respective groups. + * + * Test permutations (haveBlueWhale, haveRedAnt, haveBlueDragonfly, haveRedSquirrel) arise because + * in the first pass of the search all documents can be present, but + * in the second pass of the search some documents could be missing + * if they have been deleted 'just so' between the two phases. + * + * Additionally a haveAnimal == false condition also represents scenarios where a given + * group has documents on some but not all shards in the collection. + */ + private void narrativeMergeTestImplementation( + boolean haveBlueWhale, + boolean haveRedAnt, + boolean haveBlueDragonfly, + boolean haveRedSquirrel) { + + final String blueGroupValue = "blue"; + final String redGroupValue = "red"; + + final Integer redAntSize = 1; + final Integer blueDragonflySize = 10; + final Integer redSquirrelSize = 100; + final Integer blueWhaleSize = 1000; + + final float redAntScore = redAntSize; + final float blueDragonflyScore = blueDragonflySize; + final float redSquirrelScore = redSquirrelSize; + final float blueWhaleScore = blueWhaleSize; + + final Sort sort = Sort.RELEVANCE; + + final TopGroups shard1TopGroups; + { + final GroupDocs group1 = haveBlueWhale + ? createSingletonGroupDocs(blueGroupValue, new Object[] { blueWhaleSize }, 1 /* docId */, blueWhaleScore, 0 /* shardIndex */) + : createEmptyGroupDocs(blueGroupValue, new Object[] { blueWhaleSize }); + + final GroupDocs group2 = haveRedAnt + ? createSingletonGroupDocs(redGroupValue, new Object[] { redAntSize }, 2 /* docId */, redAntScore, 0 /* shardIndex */) + : createEmptyGroupDocs(redGroupValue, new Object[] { redAntSize }); + + shard1TopGroups = new TopGroups( + sort.getSort() /* groupSort */, + sort.getSort() /* withinGroupSort */, + group1.scoreDocs.length + group2.scoreDocs.length /* totalHitCount */, + group1.scoreDocs.length + group2.scoreDocs.length /* totalGroupedHitCount */, + combineGroupDocs(group1, group2) /* groups */, + (haveBlueWhale ? blueWhaleScore : (haveRedAnt ? redAntScore : Float.NaN)) /* maxScore */); + } + + final TopGroups shard2TopGroups; + { + final GroupDocs group1 = haveBlueDragonfly + ? createSingletonGroupDocs(blueGroupValue, new Object[] { blueDragonflySize }, 3 /* docId */, blueDragonflyScore, 1 /* shardIndex */) + : createEmptyGroupDocs(blueGroupValue, new Object[] { blueDragonflySize }); + + final GroupDocs group2 = haveRedSquirrel + ? createSingletonGroupDocs(redGroupValue, new Object[] { redSquirrelSize }, 4 /* docId */, redSquirrelScore, 1 /* shardIndex */) + : createEmptyGroupDocs(redGroupValue, new Object[] { redSquirrelSize }); + + shard2TopGroups = new TopGroups( + sort.getSort() /* groupSort */, + sort.getSort() /* withinGroupSort */, + group1.scoreDocs.length + group2.scoreDocs.length /* totalHitCount */, + group1.scoreDocs.length + group2.scoreDocs.length /* totalGroupedHitCount */, + combineGroupDocs(group1, group2) /* groups */, + (haveRedSquirrel ? redSquirrelScore : (haveBlueDragonfly ? blueDragonflyScore : Float.NaN)) /* maxScore */); + } + + final TopGroups mergedTopGroups = TopGroups.merge( + combineTopGroups(shard1TopGroups, shard2TopGroups), + sort /* groupSort */, + sort /* docSort */, + 0 /* docOffset */, + 2 /* docTopN */, + TopGroups.ScoreMergeMode.None); + assertNotNull(mergedTopGroups); + + final int expectedCount = + (haveBlueWhale ? 1 : 0) + + (haveRedAnt ? 1 : 0) + + (haveBlueDragonfly ? 1 : 0) + + (haveRedSquirrel ? 1 : 0); + + assertEquals(expectedCount, mergedTopGroups.totalHitCount); + assertEquals(expectedCount, mergedTopGroups.totalGroupedHitCount); + + assertEquals(2, mergedTopGroups.groups.length); + { + assertEquals(blueGroupValue, mergedTopGroups.groups[0].groupValue); + final float expectedBlueMaxScore = + (haveBlueWhale ? blueWhaleScore : (haveBlueDragonfly ? blueDragonflyScore : Float.MIN_VALUE)); + checkMaxScore(expectedBlueMaxScore, mergedTopGroups.groups[0].maxScore); + } + { + assertEquals(redGroupValue, mergedTopGroups.groups[1].groupValue); + final float expectedRedMaxScore = + (haveRedSquirrel ? redSquirrelScore : (haveRedAnt ? redAntScore : Float.MIN_VALUE)); + checkMaxScore(expectedRedMaxScore, mergedTopGroups.groups[1].maxScore); + } + + final float expectedMaxScore = + (haveBlueWhale ? blueWhaleScore + : (haveRedSquirrel ? redSquirrelScore + : (haveBlueDragonfly ? blueDragonflyScore + : (haveRedAnt ? redAntScore + : Float.MIN_VALUE)))); + checkMaxScore(expectedMaxScore, mergedTopGroups.maxScore); + } + + private static void checkMaxScore(float expected, float actual) { + if (Float.isNaN(expected)) { + assertTrue(Float.isNaN(actual)); + } else { + assertEquals(expected, actual, 0.0); + } + } + + // helper methods + + private static GroupDocs createEmptyGroupDocs(String groupValue, Object[] groupSortValues) { + return new GroupDocs( + Float.NaN /* score */, + Float.NaN /* maxScore */, + new TotalHits(0, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[0], + groupValue, + groupSortValues); + } + + private static GroupDocs createSingletonGroupDocs(String groupValue, Object[] groupSortValues, + int docId, float docScore, int shardIndex) { + return new GroupDocs( + Float.NaN /* score */, + docScore /* maxScore */, + new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] { new ScoreDoc(docId, docScore, shardIndex) }, + groupValue, + groupSortValues); + } + + private static GroupDocs[] combineGroupDocs(GroupDocs group0, GroupDocs group1) { + @SuppressWarnings({"unchecked","rawtypes"}) + final GroupDocs[] groups = new GroupDocs[2]; + groups[0] = group0; + groups[1] = group1; + return groups; + } + + private static TopGroups[] combineTopGroups(TopGroups group0, TopGroups group1) { + @SuppressWarnings({"unchecked","rawtypes"}) + final TopGroups[] groups = new TopGroups[2]; + groups[0] = group0; + groups[1] = group1; + return groups; + } + +} diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties index 926ef8b7b21e..c61284fc6cb7 100644 --- a/lucene/ivy-versions.properties +++ b/lucene/ivy-versions.properties @@ -8,6 +8,8 @@ com.carrotsearch.randomizedtesting.version = 2.7.2 /com.carrotsearch.randomizedtesting/junit4-ant = ${com.carrotsearch.randomizedtesting.version} /com.carrotsearch.randomizedtesting/randomizedtesting-runner = ${com.carrotsearch.randomizedtesting.version} +/com.carrotsearch.thirdparty/simple-xml-safe = 2.7.1 + /com.carrotsearch/hppc = 0.8.1 /com.cybozu.labs/langdetect = 1.1-20120112 @@ -15,14 +17,14 @@ com.carrotsearch.randomizedtesting.version = 2.7.2 /com.epam/parso = 2.0.9 -com.fasterxml.jackson.core.version = 2.9.8 +com.fasterxml.jackson.core.version = 2.9.9 /com.fasterxml.jackson.core/jackson-annotations = ${com.fasterxml.jackson.core.version} /com.fasterxml.jackson.core/jackson-core = ${com.fasterxml.jackson.core.version} -/com.fasterxml.jackson.core/jackson-databind = ${com.fasterxml.jackson.core.version} +/com.fasterxml.jackson.core/jackson-databind = 2.9.9.3 /com.fasterxml.jackson.dataformat/jackson-dataformat-smile = ${com.fasterxml.jackson.core.version} /com.fasterxml.jackson.dataformat/jackson-dataformat-cbor = ${com.fasterxml.jackson.core.version} -/com.github.ben-manes.caffeine/caffeine = 2.4.0 +/com.github.ben-manes.caffeine/caffeine = 2.8.0 /com.github.virtuald/curvesapi = 1.04 /com.google.guava/guava = 25.1-jre @@ -52,7 +54,6 @@ com.sun.jersey.version = 1.19 /com.tdunning/t-digest = 3.1 /com.vaadin.external.google/android-json = 0.0.20131108.vaadin1 -/commons-beanutils/commons-beanutils = 1.9.3 /commons-cli/commons-cli = 1.2 /commons-codec/commons-codec = 1.11 /commons-collections/commons-collections = 3.2.2 @@ -72,8 +73,16 @@ io.jaegertracing.version = 0.35.5 /io.jaegertracing/jaeger-core = ${io.jaegertracing.version} /io.jaegertracing/jaeger-thrift = ${io.jaegertracing.version} -io.netty.netty-all.version = 4.0.52.Final -/io.netty/netty-all = ${io.netty.netty-all.version} +io.netty.netty.version = 4.1.29.Final +/io.netty/netty-all = ${io.netty.netty.version} +/io.netty/netty-buffer = ${io.netty.netty.version} +/io.netty/netty-codec = ${io.netty.netty.version} +/io.netty/netty-common = ${io.netty.netty.version} +/io.netty/netty-handler = ${io.netty.netty.version} +/io.netty/netty-resolver = ${io.netty.netty.version} +/io.netty/netty-transport = ${io.netty.netty.version} +/io.netty/netty-transport-native-epoll = ${io.netty.netty.version} +/io.netty/netty-transport-native-unix-common = ${io.netty.netty.version} io.opentracing.version = 0.33.0 /io.opentracing/opentracing-api = ${io.opentracing.version} @@ -216,9 +225,10 @@ org.apache.zookeeper.version = 3.5.5 /org.apache.zookeeper/zookeeper = ${org.apache.zookeeper.version} /org.apache.zookeeper/zookeeper-jute = ${org.apache.zookeeper.version} -# v1.6.0-alpha.5 of asciidoctor-ant includes asciidoctorj-pdf 1.5.0-alpha.16, -# which is the same as asciidoctor-pdf 1.5.0-alpha.16 -/org.asciidoctor/asciidoctor-ant = 1.6.0-alpha.5 +# v1.6.2 of asciidoctor-ant includes asciidoctorj 1.6.2, which uses +# asciidoctor 1.5.8, and asciidoctorj-pdf 1.5.0-alpha.16, which is the same +# as asciidoctor-pdf 1.5.0-alpha.16 +/org.asciidoctor/asciidoctor-ant = 1.6.2 /org.aspectj/aspectjrt = 1.8.0 @@ -305,8 +315,6 @@ org.restlet.jee.version = 2.3.0 /org.rrd4j/rrd4j = 3.5 -/org.simpleframework/simple-xml = 2.7.1 - org.slf4j.version = 1.7.24 /org.slf4j/jcl-over-slf4j = ${org.slf4j.version} /org.slf4j/jul-to-slf4j = ${org.slf4j.version} diff --git a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java index cd1f1f0ee09a..183bca19ca98 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java @@ -190,6 +190,13 @@ public Scorer scorer(LeafReaderContext context) throws IOException { } } + @Override + public boolean isCacheable(LeafReaderContext ctx) { + // disable caching because this query relies on a top reader context + // and holds a bitset of matching ordinals that cannot be accounted in + // the memory used by the cache + return false; + } } final static class OrdinalMapScorer extends BaseGlobalOrdinalScorer { diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/MenuBarProvider.java b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/MenuBarProvider.java index 3090283868e1..90b2d4fb5853 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/MenuBarProvider.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/MenuBarProvider.java @@ -269,7 +269,7 @@ void showAboutDialog(ActionEvent e) { } void showExportTermsDialog(ActionEvent e) { - new DialogOpener<>(exportTermsDialogFactory).open("Export terms", 600, 400, + new DialogOpener<>(exportTermsDialogFactory).open("Export terms", 600, 450, factory -> { }); } diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java index 07fe3cf4ce9c..471094223c1e 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java @@ -38,8 +38,10 @@ import java.io.File; import java.io.IOException; import java.lang.invoke.MethodHandles; +import java.util.Arrays; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.stream.Stream; import org.apache.logging.log4j.Logger; import org.apache.lucene.luke.app.IndexHandler; @@ -76,6 +78,8 @@ public final class ExportTermsDialogFactory implements DialogOpener.DialogFactor private final JComboBox fieldCombo = new JComboBox(); + private final JComboBox delimiterCombo = new JComboBox(); + private final JTextField destDir = new JTextField(); private final JLabel statusLbl = new JLabel(); @@ -88,6 +92,8 @@ public final class ExportTermsDialogFactory implements DialogOpener.DialogFactor private IndexTools toolsModel; + private String selectedDelimiter; + public synchronized static ExportTermsDialogFactory getInstance() throws IOException { if (instance == null) { instance = new ExportTermsDialogFactory(); @@ -99,6 +105,8 @@ private ExportTermsDialogFactory() throws IOException { this.prefs = PreferencesFactory.getInstance(); this.indexHandler = IndexHandler.getInstance(); indexHandler.addObserver(new Observer()); + Stream.of(Delimiter.values()).forEachOrdered(delimiterVal -> delimiterCombo.addItem(delimiterVal.getDescription())); + delimiterCombo.setSelectedItem(Delimiter.COMMA.getDescription());//Set default delimiter } @Override @@ -120,6 +128,7 @@ private JPanel content() { panel.add(currentOpenIndexPanel()); panel.add(fieldComboPanel()); panel.add(destinationDirPanel()); + panel.add(delimiterComboPanel()); panel.add(statusPanel()); panel.add(actionButtonsPanel()); @@ -138,6 +147,14 @@ private JPanel currentOpenIndexPanel() { return panel; } + private JPanel delimiterComboPanel() { + JPanel panel = new JPanel(new GridLayout(2, 1)); + panel.setOpaque(false); + panel.add(new JLabel("Select Delimiter: ")); + panel.add(delimiterCombo); + return panel; + } + private JPanel fieldComboPanel() { JPanel panel = new JPanel(new GridLayout(2, 1)); panel.setOpaque(false); @@ -225,9 +242,11 @@ protected Void doInBackground() { statusLbl.setText("Exporting..."); indicatorLbl.setVisible(true); String field = (String) fieldCombo.getSelectedItem(); + selectedDelimiter = Delimiter.getSelectedDelimiterValue((String) delimiterCombo.getSelectedItem()); + String directory = destDir.getText(); try { - filename = toolsModel.exportTerms(directory, field); + filename = toolsModel.exportTerms(directory, field, selectedDelimiter); } catch (LukeException e) { log.error("Error while exporting terms from field " + field, e); statusLbl.setText(MessageUtils.getLocalizedMessage("export.terms.label.error", e.getMessage())); @@ -245,7 +264,7 @@ protected Void doInBackground() { protected void done() { indicatorLbl.setVisible(false); if (filename != null) { - statusLbl.setText(MessageUtils.getLocalizedMessage("export.terms.label.success", filename, "[term],[doc frequency]")); + statusLbl.setText(MessageUtils.getLocalizedMessage("export.terms.label.success", filename, "[term]" + selectedDelimiter + "[doc frequency]")); } } }; @@ -272,4 +291,35 @@ public void closeIndex() { } + /** + * Delimiters that can be selected + */ + private enum Delimiter { + COMMA("Comma", ","), WHITESPACE("Whitespace", " "), TAB("Tab", "\t"); + + private final String description; + private final String separator; + + private Delimiter(final String description, final String separator) { + this.description = description; + this.separator = separator; + } + + String getDescription() { + return this.description; + } + + String getSeparator() { + return this.separator; + } + + static String getSelectedDelimiterValue(String delimiter) { + return Arrays.stream(Delimiter.values()) + .filter(e -> e.description.equals(delimiter)) + .findFirst() + .orElse(COMMA) + .getSeparator(); + } + } + } diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/tools/IndexTools.java b/lucene/luke/src/java/org/apache/lucene/luke/models/tools/IndexTools.java index 72d5384c2e05..a4f4d12052e1 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/models/tools/IndexTools.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/models/tools/IndexTools.java @@ -100,7 +100,8 @@ public interface IndexTools { * Export terms from given field into a new file on the destination directory * @param destDir - destination directory * @param field - field name + * @param delimiter - delimiter to separate terms and their frequency * @return The file containing the export */ - String exportTerms(String destDir, String field); + String exportTerms(String destDir, String field, String delimiter); } diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/tools/IndexToolsImpl.java b/lucene/luke/src/java/org/apache/lucene/luke/models/tools/IndexToolsImpl.java index f4ca89ed8115..4fdd6e3f96a7 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/models/tools/IndexToolsImpl.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/models/tools/IndexToolsImpl.java @@ -193,7 +193,7 @@ public void createNewIndex(String dataDir) { } } - public String exportTerms(String destDir, String field) { + public String exportTerms(String destDir, String field, String delimiter) { String filename = "terms_" + field + "_" + System.currentTimeMillis() + ".out"; Path path = Paths.get(destDir, filename); try { @@ -205,7 +205,7 @@ public String exportTerms(String destDir, String field) { TermsEnum termsEnum = terms.iterator(); BytesRef term; while (!Thread.currentThread().isInterrupted() && (term = termsEnum.next()) != null) { - writer.write(String.format(Locale.US, "%s,%d\n", term.utf8ToString(), +termsEnum.docFreq())); + writer.write(String.format(Locale.US, "%s%s%d\n", term.utf8ToString(), delimiter, +termsEnum.docFreq())); } return path.toString(); } diff --git a/lucene/luke/src/test/org/apache/lucene/luke/models/overview/OverviewTestBase.java b/lucene/luke/src/test/org/apache/lucene/luke/models/overview/OverviewTestBase.java index 5554d7099417..f15161742505 100644 --- a/lucene/luke/src/test/org/apache/lucene/luke/models/overview/OverviewTestBase.java +++ b/lucene/luke/src/test/org/apache/lucene/luke/models/overview/OverviewTestBase.java @@ -28,6 +28,8 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; @@ -55,7 +57,9 @@ private Path createIndex() throws IOException { Path indexDir = createTempDir(); Directory dir = newFSDirectory(indexDir); - RandomIndexWriter writer = new RandomIndexWriter(random(), dir, new MockAnalyzer(random())); + IndexWriterConfig config = new IndexWriterConfig(new MockAnalyzer(random())); + config.setMergePolicy(NoMergePolicy.INSTANCE); // see LUCENE-8998 + RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc1 = new Document(); doc1.add(newStringField("f1", "1", Field.Store.NO)); diff --git a/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java b/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java index 7c077e53fe21..37643b61c6b7 100644 --- a/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java +++ b/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java @@ -649,13 +649,17 @@ private Query createQuery(PriorityQueue q) { */ private PriorityQueue createQueue(Map> perFieldTermFrequencies) throws IOException { // have collected all words in doc and their freqs - int numDocs = ir.numDocs(); final int limit = Math.min(maxQueryTerms, this.getTermsCount(perFieldTermFrequencies)); FreqQ queue = new FreqQ(limit); // will order words by score for (Map.Entry> entry : perFieldTermFrequencies.entrySet()) { Map perWordTermFrequencies = entry.getValue(); String fieldName = entry.getKey(); + long numDocs = ir.getDocCount(fieldName); + if(numDocs == -1) { + numDocs = ir.numDocs(); + } + for (Map.Entry tfEntry : perWordTermFrequencies.entrySet()) { // for every word String word = tfEntry.getKey(); int tf = tfEntry.getValue().x; // term freq in the source doc diff --git a/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java b/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java index 2061068bc72f..1c246f5c211e 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java @@ -23,6 +23,7 @@ import java.util.Collection; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Locale; import java.util.Map; @@ -130,6 +131,64 @@ private void addDoc(RandomIndexWriter writer, String fieldName, String[] texts) writer.addDocument(doc); } + public void testSmallSampleFromCorpus() throws Throwable { + // add series of docs with terms of decreasing df + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + for (int i = 0; i < 1980; i++) { + Document doc = new Document(); + doc.add(newTextField("text", "filler", Field.Store.YES)); + writer.addDocument(doc); + } + for (int i = 0; i < 18; i++) { + Document doc = new Document(); + doc.add(newTextField("one_percent", "all", Field.Store.YES)); + writer.addDocument(doc); + } + for (int i = 0; i < 2; i++) { + Document doc = new Document(); + doc.add(newTextField("one_percent", "all", Field.Store.YES)); + doc.add(newTextField("one_percent", "tenth", Field.Store.YES)); + writer.addDocument(doc); + } + IndexReader reader = writer.getReader(); + writer.close(); + + // setup MLT query + MoreLikeThis mlt = new MoreLikeThis(reader); + Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false); + mlt.setAnalyzer(analyzer); + mlt.setMaxQueryTerms(3); + mlt.setMinDocFreq(1); + mlt.setMinTermFreq(1); + mlt.setMinWordLen(1); + mlt.setFieldNames(new String[]{"one_percent"}); + + BooleanQuery query = (BooleanQuery) mlt.like("one_percent", new StringReader("tenth tenth all")); + Collection clauses = query.clauses(); + + assertTrue(clauses.size() == 2); + Term term = ((TermQuery) ((List) clauses).get(0).getQuery()).getTerm(); + assertTrue(term.text().equals("all")); + term = ((TermQuery) ((List) clauses).get(1).getQuery()).getTerm(); + assertTrue(term.text().equals("tenth")); + + + query = (BooleanQuery) mlt.like("one_percent", new StringReader("tenth all all")); + clauses = query.clauses(); + + assertTrue(clauses.size() == 2); + term = ((TermQuery) ((List) clauses).get(0).getQuery()).getTerm(); + assertTrue(term.text().equals("all")); + term = ((TermQuery) ((List) clauses).get(1).getQuery()).getTerm(); + assertTrue(term.text().equals("tenth")); + + // clean up + reader.close(); + dir.close(); + analyzer.close(); + } + public void testBoostFactor() throws Throwable { Map originalValues = getOriginalValues(); mlt.setFieldNames(new String[] {"text"}); diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java index 77680cc56f8f..b4703120dd5e 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java @@ -16,34 +16,9 @@ */ package org.apache.lucene.queryparser.xml; -import javax.xml.XMLConstants; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import java.io.InputStream; -import java.util.Locale; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.queryparser.classic.QueryParser; -import org.apache.lucene.queryparser.xml.builders.BooleanQueryBuilder; -import org.apache.lucene.queryparser.xml.builders.BoostingTermBuilder; -import org.apache.lucene.queryparser.xml.builders.ConstantScoreQueryBuilder; -import org.apache.lucene.queryparser.xml.builders.DisjunctionMaxQueryBuilder; -import org.apache.lucene.queryparser.xml.builders.MatchAllDocsQueryBuilder; -import org.apache.lucene.queryparser.xml.builders.PointRangeQueryBuilder; -import org.apache.lucene.queryparser.xml.builders.RangeQueryBuilder; -import org.apache.lucene.queryparser.xml.builders.SpanFirstBuilder; -import org.apache.lucene.queryparser.xml.builders.SpanNearBuilder; -import org.apache.lucene.queryparser.xml.builders.SpanNotBuilder; -import org.apache.lucene.queryparser.xml.builders.SpanOrBuilder; -import org.apache.lucene.queryparser.xml.builders.SpanOrTermsBuilder; -import org.apache.lucene.queryparser.xml.builders.SpanPositionRangeBuilder; -import org.apache.lucene.queryparser.xml.builders.SpanQueryBuilder; -import org.apache.lucene.queryparser.xml.builders.SpanQueryBuilderFactory; -import org.apache.lucene.queryparser.xml.builders.SpanTermBuilder; -import org.apache.lucene.queryparser.xml.builders.TermQueryBuilder; -import org.apache.lucene.queryparser.xml.builders.TermsQueryBuilder; -import org.apache.lucene.queryparser.xml.builders.UserInputQueryBuilder; +import org.apache.lucene.queryparser.xml.builders.*; import org.apache.lucene.search.Query; import org.apache.lucene.search.spans.SpanQuery; import org.w3c.dom.Document; @@ -52,6 +27,14 @@ import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; +import javax.xml.XMLConstants; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; + +import java.io.InputStream; +import java.util.Locale; + /** * Assembles a QueryBuilder which uses only core Lucene Query objects */ diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java index cb45dc90993b..4faf6e84b1f5 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java @@ -16,9 +16,6 @@ */ package org.apache.lucene.queryparser.xml; -import java.io.IOException; -import java.io.InputStream; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockTokenFilter; @@ -35,6 +32,9 @@ import org.junit.AfterClass; import org.xml.sax.SAXException; +import java.io.IOException; +import java.io.InputStream; + public class TestCoreParser extends LuceneTestCase { final private static String defaultField = "contents"; diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/FloatPointNearestNeighbor.java b/lucene/sandbox/src/java/org/apache/lucene/document/FloatPointNearestNeighbor.java index eb3db1aa5930..789e01a84d50 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/FloatPointNearestNeighbor.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/FloatPointNearestNeighbor.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.PriorityQueue; @@ -46,7 +45,6 @@ static class Cell implements Comparable { final byte[] minPacked; final byte[] maxPacked; final BKDReader.IndexTree index; - /** The closest possible distance^2 of all points in this cell */ final double distanceSquared; @@ -75,21 +73,15 @@ private static class NearestVisitor implements PointValues.IntersectVisitor { final int topN; final PriorityQueue hitQueue; final float[] origin; - private int dims; - private int updateMinMaxCounter; - private float[] min; - private float[] max; - + final private int dims; + double bottomNearestDistanceSquared = Double.POSITIVE_INFINITY; + int bottomNearestDistanceDoc = Integer.MAX_VALUE; public NearestVisitor(PriorityQueue hitQueue, int topN, float[] origin) { this.hitQueue = hitQueue; this.topN = topN; this.origin = origin; - dims = origin.length; - min = new float[dims]; - max = new float[dims]; - Arrays.fill(min, Float.NEGATIVE_INFINITY); - Arrays.fill(max, Float.POSITIVE_INFINITY); + this.dims = origin.length; } @Override @@ -97,110 +89,59 @@ public void visit(int docID) { throw new AssertionError(); } - private static final int MANTISSA_BITS = 23; - - /** - * Returns the minimum value that will change the given distance when added to it. - * - * This value is calculated from the distance exponent reduced by (at most) 23, - * the number of bits in a float mantissa. This is necessary when the result of - * subtracting/adding the distance in a single dimension has an exponent that - * differs significantly from that of the distance value. Without this fudge - * factor (i.e. only subtracting/adding the distance), cells and values can be - * inappropriately judged as outside the search radius. - */ - private float getMinDelta(float distance) { - int exponent = Float.floatToIntBits(distance) >> MANTISSA_BITS; // extract biased exponent (distance is positive) - if (exponent == 0) { - return Float.MIN_VALUE; - } else { - exponent = exponent <= MANTISSA_BITS ? 1 : exponent - MANTISSA_BITS; // Avoid underflow - return Float.intBitsToFloat(exponent << MANTISSA_BITS); - } - } - - private void maybeUpdateMinMax() { - if (updateMinMaxCounter < 1024 || (updateMinMaxCounter & 0x3F) == 0x3F) { - NearestHit hit = hitQueue.peek(); - float distance = (float)Math.sqrt(hit.distanceSquared); - float minDelta = getMinDelta(distance); - // String oldMin = Arrays.toString(min); - // String oldMax = Arrays.toString(max); - for (int d = 0 ; d < dims ; ++d) { - min[d] = (origin[d] - distance) - minDelta; - max[d] = (origin[d] + distance) + minDelta; - // System.out.println("origin[" + d + "] (" + origin[d] + ") - distance (" + distance + ") - minDelta (" + minDelta + ") = min[" + d + "] (" + min[d] + ")"); - // System.out.println("origin[" + d + "] (" + origin[d] + ") + distance (" + distance + ") + minDelta (" + minDelta + ") = max[" + d + "] (" + max[d] + ")"); - } - // System.out.println("maybeUpdateMinMax: min: " + oldMin + " -> " + Arrays.toString(min) + " max: " + oldMax + " -> " + Arrays.toString(max)); - } - ++updateMinMaxCounter; - } - @Override public void visit(int docID, byte[] packedValue) { - // System.out.println("visit docID=" + docID + " liveDocs=" + curLiveDocs); - + // System.out.println("visit docID=" + docID + " liveDocs=" + curLiveDocs);; if (curLiveDocs != null && curLiveDocs.get(docID) == false) { return; } - float[] docPoint = new float[dims]; + double distanceSquared = 0.0d; for (int d = 0, offset = 0 ; d < dims ; ++d, offset += Float.BYTES) { - docPoint[d] = FloatPoint.decodeDimension(packedValue, offset); - if (docPoint[d] > max[d] || docPoint[d] < min[d]) { - - // if (docPoint[d] > max[d]) { - // System.out.println(" skipped because docPoint[" + d + "] (" + docPoint[d] + ") > max[" + d + "] (" + max[d] + ")"); - // } else { - // System.out.println(" skipped because docPoint[" + d + "] (" + docPoint[d] + ") < min[" + d + "] (" + min[d] + ")"); - // } - + double diff = (double) FloatPoint.decodeDimension(packedValue, offset) - (double) origin[d]; + distanceSquared += diff * diff; + if (distanceSquared > bottomNearestDistanceSquared) { return; } } - - double distanceSquared = euclideanDistanceSquared(origin, docPoint); // System.out.println(" visit docID=" + docID + " distanceSquared=" + distanceSquared + " value: " + Arrays.toString(docPoint)); int fullDocID = curDocBase + docID; if (hitQueue.size() == topN) { // queue already full - NearestHit bottom = hitQueue.peek(); + if (distanceSquared == bottomNearestDistanceSquared && fullDocID > bottomNearestDistanceDoc) { + return; + } + NearestHit bottom = hitQueue.poll(); // System.out.println(" bottom distanceSquared=" + bottom.distanceSquared); - if (distanceSquared < bottom.distanceSquared - // we don't collect docs in order here, so we must also test the tie-break case ourselves: - || (distanceSquared == bottom.distanceSquared && fullDocID < bottom.docID)) { - hitQueue.poll(); - bottom.docID = fullDocID; - bottom.distanceSquared = distanceSquared; - hitQueue.offer(bottom); + bottom.docID = fullDocID; + bottom.distanceSquared = distanceSquared; + hitQueue.offer(bottom); + updateBottomNearestDistance(); // System.out.println(" ** keep1, now bottom=" + bottom); - maybeUpdateMinMax(); - } } else { NearestHit hit = new NearestHit(); hit.docID = fullDocID; hit.distanceSquared = distanceSquared; hitQueue.offer(hit); + if (hitQueue.size() == topN) { + updateBottomNearestDistance(); + } // System.out.println(" ** keep2, new addition=" + hit); } } + private void updateBottomNearestDistance() { + NearestHit newBottom = hitQueue.peek(); + bottomNearestDistanceSquared = newBottom.distanceSquared; + bottomNearestDistanceDoc = newBottom.docID; + } + @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - for (int d = 0, offset = 0; d < dims; ++d, offset += Float.BYTES) { - float cellMaxAtDim = FloatPoint.decodeDimension(maxPackedValue, offset); - if (cellMaxAtDim < min[d]) { - // System.out.println(" skipped because cell max at " + d + " (" + cellMaxAtDim + ") < visitor.min[" + d + "] (" + min[d] + ")"); - return PointValues.Relation.CELL_OUTSIDE_QUERY; - } - float cellMinAtDim = FloatPoint.decodeDimension(minPackedValue, offset); - if (cellMinAtDim > max[d]) { - // System.out.println(" skipped because cell min at " + d + " (" + cellMinAtDim + ") > visitor.max[" + d + "] (" + max[d] + ")"); - return PointValues.Relation.CELL_OUTSIDE_QUERY; - } + if (hitQueue.size() == topN && pointToRectangleDistanceSquared(minPackedValue, maxPackedValue, origin) > bottomNearestDistanceSquared) { + return PointValues.Relation.CELL_OUTSIDE_QUERY; } return PointValues.Relation.CELL_CROSSES_QUERY; } @@ -252,33 +193,31 @@ private static NearestHit[] nearest(List readers, List liveDocs states.add(state); cellQueue.offer(new Cell(state.index, i, reader.getMinPackedValue(), reader.getMaxPackedValue(), - approxBestDistanceSquared(minPackedValue, maxPackedValue, origin))); + pointToRectangleDistanceSquared(minPackedValue, maxPackedValue, origin))); } while (cellQueue.size() > 0) { Cell cell = cellQueue.poll(); // System.out.println(" visit " + cell); - // TODO: if we replace approxBestDistance with actualBestDistance, we can put an opto here to break once this "best" cell is fully outside of the hitQueue bottom's radius: - BKDReader reader = readers.get(cell.readerIndex); + if (cell.distanceSquared > visitor.bottomNearestDistanceSquared) { + break; + } + BKDReader reader = readers.get(cell.readerIndex); if (cell.index.isLeafNode()) { // System.out.println(" leaf"); // Leaf block: visit all points and possibly collect them: visitor.curDocBase = docBases.get(cell.readerIndex); visitor.curLiveDocs = liveDocs.get(cell.readerIndex); reader.visitLeafBlockValues(cell.index, states.get(cell.readerIndex)); + + //assert hitQueue.peek().distanceSquared >= cell.distanceSquared; // System.out.println(" now " + hitQueue.size() + " hits"); } else { // System.out.println(" non-leaf"); // Non-leaf block: split into two cells and put them back into the queue: - if (hitQueue.size() == topN) { - if (visitor.compare(cell.minPacked, cell.maxPacked) == PointValues.Relation.CELL_OUTSIDE_QUERY) { - // this cell is outside our search radius; don't bother exploring any more - continue; - } - } BytesRef splitValue = BytesRef.deepCopyOf(cell.index.getSplitDimValue()); int splitDim = cell.index.getSplitDim(); @@ -288,15 +227,19 @@ private static NearestHit[] nearest(List readers, List liveDocs System.arraycopy(splitValue.bytes, splitValue.offset, splitPackedValue, splitDim * bytesPerDim, bytesPerDim); cell.index.pushLeft(); - cellQueue.offer(new Cell(cell.index, cell.readerIndex, cell.minPacked, splitPackedValue, - approxBestDistanceSquared(cell.minPacked, splitPackedValue, origin))); + double distanceLeft = pointToRectangleDistanceSquared(cell.minPacked, splitPackedValue, origin); + if (distanceLeft <= visitor.bottomNearestDistanceSquared) { + cellQueue.offer(new Cell(cell.index, cell.readerIndex, cell.minPacked, splitPackedValue, distanceLeft)); + } splitPackedValue = cell.minPacked.clone(); System.arraycopy(splitValue.bytes, splitValue.offset, splitPackedValue, splitDim * bytesPerDim, bytesPerDim); newIndex.pushRight(); - cellQueue.offer(new Cell(newIndex, cell.readerIndex, splitPackedValue, cell.maxPacked, - approxBestDistanceSquared(splitPackedValue, cell.maxPacked, origin))); + double distanceRight = pointToRectangleDistanceSquared(splitPackedValue, cell.maxPacked, origin); + if (distanceRight <= visitor.bottomNearestDistanceSquared) { + cellQueue.offer(new Cell(newIndex, cell.readerIndex, splitPackedValue, cell.maxPacked, distanceRight)); + } } } @@ -306,44 +249,27 @@ private static NearestHit[] nearest(List readers, List liveDocs hits[downTo] = hitQueue.poll(); downTo--; } + //System.out.println(visitor.comp); return hits; } - private static double approxBestDistanceSquared(byte[] minPackedValue, byte[] maxPackedValue, float[] value) { - boolean insideCell = true; - float[] min = new float[value.length]; - float[] max = new float[value.length]; - double[] closest = new double[value.length]; + private static double pointToRectangleDistanceSquared(byte[] minPackedValue, byte[] maxPackedValue, float[] value) { + double sumOfSquaredDiffs = 0.0d; for (int i = 0, offset = 0 ; i < value.length ; ++i, offset += Float.BYTES) { - min[i] = FloatPoint.decodeDimension(minPackedValue, offset); - max[i] = FloatPoint.decodeDimension(maxPackedValue, offset); - if (insideCell) { - if (value[i] < min[i] || value[i] > max[i]) { - insideCell = false; - } + double min = FloatPoint.decodeDimension(minPackedValue, offset); + if (value[i] < min) { + double diff = min - (double)value[i]; + sumOfSquaredDiffs += diff * diff; + continue; + } + double max = FloatPoint.decodeDimension(maxPackedValue, offset); + if (value[i] > max) { + double diff = max - (double)value[i]; + sumOfSquaredDiffs += diff * diff; } - double minDiff = Math.abs((double)value[i] - (double)min[i]); - double maxDiff = Math.abs((double)value[i] - (double)max[i]); - closest[i] = minDiff < maxDiff ? minDiff : maxDiff; - } - if (insideCell) { - return 0.0f; - } - double sumOfSquaredDiffs = 0.0d; - for (int d = 0 ; d < value.length ; ++d) { - sumOfSquaredDiffs += closest[d] * closest[d]; } return sumOfSquaredDiffs; } - - static double euclideanDistanceSquared(float[] a, float[] b) { - double sumOfSquaredDifferences = 0.0d; - for (int d = 0 ; d < a.length ; ++d) { - double diff = (double)a[d] - (double)b[d]; - sumOfSquaredDifferences += diff * diff; - } - return sumOfSquaredDifferences; - } public static TopFieldDocs nearest(IndexSearcher searcher, String field, int topN, float... origin) throws IOException { if (topN < 1) { diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java index 5645645da26a..aa1f93d3ecca 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java @@ -42,26 +42,31 @@ public LatLonShapeBoundingBoxQuery(String field, QueryRelation queryRelation, do @Override protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle, int maxXOffset, int maxYOffset, byte[] maxTriangle) { + if (queryRelation == QueryRelation.INTERSECTS || queryRelation == QueryRelation.DISJOINT) { + return rectangle2D.intersectRangeBBox(minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle); + } return rectangle2D.relateRangeBBox(minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle); } /** returns true if the query matches the encoded triangle */ @Override - protected boolean queryMatches(byte[] t, int[] scratchTriangle, QueryRelation queryRelation) { + protected boolean queryMatches(byte[] t, ShapeField.DecodedTriangle scratchTriangle, QueryRelation queryRelation) { // decode indexed triangle ShapeField.decodeTriangle(t, scratchTriangle); - int aY = scratchTriangle[0]; - int aX = scratchTriangle[1]; - int bY = scratchTriangle[2]; - int bX = scratchTriangle[3]; - int cY = scratchTriangle[4]; - int cX = scratchTriangle[5]; + int aY = scratchTriangle.aY; + int aX = scratchTriangle.aX; + int bY = scratchTriangle.bY; + int bX = scratchTriangle.bX; + int cY = scratchTriangle.cY; + int cX = scratchTriangle.cX; - if (queryRelation == QueryRelation.WITHIN) { - return rectangle2D.containsTriangle(aX, aY, bX, bY, cX, cY); + switch (queryRelation) { + case INTERSECTS: return rectangle2D.intersectsTriangle(aX, aY, bX, bY, cX, cY); + case WITHIN: return rectangle2D.containsTriangle(aX, aY, bX, bY, cX, cY); + case DISJOINT: return rectangle2D.intersectsTriangle(aX, aY, bX, bY, cX, cY) == false; + default: throw new IllegalArgumentException("Unsupported query type :[" + queryRelation + "]"); } - return rectangle2D.intersectsTriangle(aX, aY, bX, bY, cX, cY); } @Override diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeLineQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeLineQuery.java index 93705650e3d2..b6c300fd9425 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeLineQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeLineQuery.java @@ -84,21 +84,22 @@ protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] } @Override - protected boolean queryMatches(byte[] t, int[] scratchTriangle, QueryRelation queryRelation) { + protected boolean queryMatches(byte[] t, ShapeField.DecodedTriangle scratchTriangle, QueryRelation queryRelation) { ShapeField.decodeTriangle(t, scratchTriangle); - double alat = GeoEncodingUtils.decodeLatitude(scratchTriangle[0]); - double alon = GeoEncodingUtils.decodeLongitude(scratchTriangle[1]); - double blat = GeoEncodingUtils.decodeLatitude(scratchTriangle[2]); - double blon = GeoEncodingUtils.decodeLongitude(scratchTriangle[3]); - double clat = GeoEncodingUtils.decodeLatitude(scratchTriangle[4]); - double clon = GeoEncodingUtils.decodeLongitude(scratchTriangle[5]); + double alat = GeoEncodingUtils.decodeLatitude(scratchTriangle.aY); + double alon = GeoEncodingUtils.decodeLongitude(scratchTriangle.aX); + double blat = GeoEncodingUtils.decodeLatitude(scratchTriangle.bY); + double blon = GeoEncodingUtils.decodeLongitude(scratchTriangle.bX); + double clat = GeoEncodingUtils.decodeLatitude(scratchTriangle.cY); + double clon = GeoEncodingUtils.decodeLongitude(scratchTriangle.cX); - if (queryRelation == QueryRelation.WITHIN) { - return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY; + switch (queryRelation) { + case INTERSECTS: return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY; + case WITHIN: return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY; + case DISJOINT: return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_OUTSIDE_QUERY; + default: throw new IllegalArgumentException("Unsupported query type :[" + queryRelation + "]"); } - // INTERSECTS - return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY; } @Override diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java index bcdd3ae5e454..5ba47fa0f9ca 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java @@ -78,21 +78,22 @@ protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] } @Override - protected boolean queryMatches(byte[] t, int[] scratchTriangle, QueryRelation queryRelation) { + protected boolean queryMatches(byte[] t, ShapeField.DecodedTriangle scratchTriangle, QueryRelation queryRelation) { ShapeField.decodeTriangle(t, scratchTriangle); - double alat = GeoEncodingUtils.decodeLatitude(scratchTriangle[0]); - double alon = GeoEncodingUtils.decodeLongitude(scratchTriangle[1]); - double blat = GeoEncodingUtils.decodeLatitude(scratchTriangle[2]); - double blon = GeoEncodingUtils.decodeLongitude(scratchTriangle[3]); - double clat = GeoEncodingUtils.decodeLatitude(scratchTriangle[4]); - double clon = GeoEncodingUtils.decodeLongitude(scratchTriangle[5]); + double alat = GeoEncodingUtils.decodeLatitude(scratchTriangle.aY); + double alon = GeoEncodingUtils.decodeLongitude(scratchTriangle.aX); + double blat = GeoEncodingUtils.decodeLatitude(scratchTriangle.bY); + double blon = GeoEncodingUtils.decodeLongitude(scratchTriangle.bX); + double clat = GeoEncodingUtils.decodeLatitude(scratchTriangle.cY); + double clon = GeoEncodingUtils.decodeLongitude(scratchTriangle.cX); - if (queryRelation == QueryRelation.WITHIN) { - return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY; + switch (queryRelation) { + case INTERSECTS: return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY; + case WITHIN: return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY; + case DISJOINT: return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_OUTSIDE_QUERY; + default: throw new IllegalArgumentException("Unsupported query type :[" + queryRelation + "]"); } - // INTERSECTS - return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY; } @Override diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/ShapeField.java b/lucene/sandbox/src/java/org/apache/lucene/document/ShapeField.java index d73b9bcf5e9a..efbe17c62be8 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/ShapeField.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/ShapeField.java @@ -16,6 +16,8 @@ */ package org.apache.lucene.document; +import java.util.Objects; + import org.apache.lucene.geo.GeoUtils; import org.apache.lucene.geo.Line; import org.apache.lucene.geo.Polygon; @@ -56,18 +58,22 @@ private ShapeField() { */ public static class Triangle extends Field { + // constructor for points and lines Triangle(String name, int aXencoded, int aYencoded, int bXencoded, int bYencoded, int cXencoded, int cYencoded) { super(name, TYPE); - setTriangleValue(aXencoded, aYencoded, bXencoded, bYencoded, cXencoded, cYencoded); + setTriangleValue(aXencoded, aYencoded, true, bXencoded, bYencoded, true, cXencoded, cYencoded, true); } + Triangle(String name, Tessellator.Triangle t) { super(name, TYPE); - setTriangleValue(t.getEncodedX(0), t.getEncodedY(0), t.getEncodedX(1), t.getEncodedY(1), t.getEncodedX(2), t.getEncodedY(2)); + setTriangleValue(t.getEncodedX(0), t.getEncodedY(0), t.isEdgefromPolygon(0), + t.getEncodedX(1), t.getEncodedY(1), t.isEdgefromPolygon(1), + t.getEncodedX(2), t.getEncodedY(2), t.isEdgefromPolygon(2)); } /** sets the vertices of the triangle as integer encoded values */ - protected void setTriangleValue(int aX, int aY, int bX, int bY, int cX, int cY) { + protected void setTriangleValue(int aX, int aY, boolean abFromShape, int bX, int bY, boolean bcFromShape, int cX, int cY, boolean caFromShape) { final byte[] bytes; if (fieldsData == null) { @@ -76,7 +82,7 @@ protected void setTriangleValue(int aX, int aY, int bX, int bY, int cX, int cY) } else { bytes = ((BytesRef) fieldsData).bytes; } - encodeTriangle(bytes, aY, aX, bY, bX, cY, cX); + encodeTriangle(bytes, aY, aX, abFromShape, bY, bX, bcFromShape, cY, cX, caFromShape); } } @@ -99,7 +105,7 @@ public enum QueryRelation { * Triangles are encoded with CCW orientation and might be rotated to limit the number of possible reconstructions to 2^3. * Reconstruction always happens from west to east. */ - public static void encodeTriangle(byte[] bytes, int aLat, int aLon, int bLat, int bLon, int cLat, int cLon) { + public static void encodeTriangle(byte[] bytes, int aLat, int aLon, boolean abFromShape, int bLat, int bLon, boolean bcFromShape, int cLat, int cLon, boolean caFromShape) { assert bytes.length == 7 * BYTES; int aX; int bX; @@ -107,6 +113,7 @@ public static void encodeTriangle(byte[] bytes, int aLat, int aLon, int bLat, in int aY; int bY; int cY; + boolean ab, bc, ca; //change orientation if CW if (GeoUtils.orient(aLon, aLat, bLon, bLat, cLon, cLat) == -1) { aX = cLon; @@ -115,6 +122,9 @@ public static void encodeTriangle(byte[] bytes, int aLat, int aLon, int bLat, in aY = cLat; bY = bLat; cY = aLat; + ab = bcFromShape; + bc = abFromShape; + ca = caFromShape; } else { aX = aLon; bX = bLon; @@ -122,27 +132,38 @@ public static void encodeTriangle(byte[] bytes, int aLat, int aLon, int bLat, in aY = aLat; bY = bLat; cY = cLat; + ab = abFromShape; + bc = bcFromShape; + ca = caFromShape; } //rotate edges and place minX at the beginning if (bX < aX || cX < aX) { if (bX < cX) { int tempX = aX; int tempY = aY; + boolean tempBool = ab; aX = bX; aY = bY; + ab = bc; bX = cX; bY = cY; + bc = ca; cX = tempX; cY = tempY; + ca = tempBool; } else if (cX < aX) { int tempX = aX; int tempY = aY; + boolean tempBool = ab; aX = cX; aY = cY; + ab = ca; cX = bX; cY = bY; + ca = bc; bX = tempX; bY = tempY; + bc = tempBool; } } else if (aX == bX && aX == cX) { //degenerated case, all points with same longitude @@ -151,21 +172,29 @@ public static void encodeTriangle(byte[] bytes, int aLat, int aLon, int bLat, in if (bY < cY) { int tempX = aX; int tempY = aY; + boolean tempBool = ab; aX = bX; aY = bY; + ab = bc; bX = cX; bY = cY; + bc = ca; cX = tempX; cY = tempY; + ca = tempBool; } else if (cY < aY) { int tempX = aX; int tempY = aY; + boolean tempBool = ab; aX = cX; aY = cY; + ab = ca; cX = bX; cY = bY; + ca = bc; bX = tempX; bY = tempY; + bc = tempBool; } } } @@ -215,6 +244,9 @@ public static void encodeTriangle(byte[] bytes, int aLat, int aLon, int bLat, in } else { throw new IllegalArgumentException("Could not encode the provided triangle"); } + bits |= (ab) ? (1 << 3) : 0; + bits |= (bc) ? (1 << 4) : 0; + bits |= (ca) ? (1 << 5) : 0; NumericUtils.intToSortableBytes(minY, bytes, 0); NumericUtils.intToSortableBytes(minX, bytes, BYTES); NumericUtils.intToSortableBytes(maxY, bytes, 2 * BYTES); @@ -224,83 +256,133 @@ public static void encodeTriangle(byte[] bytes, int aLat, int aLon, int bLat, in NumericUtils.intToSortableBytes(bits, bytes, 6 * BYTES); } - /** - * Decode a triangle encoded by {@link ShapeField#encodeTriangle(byte[], int, int, int, int, int, int)}. + /** Decode a triangle encoded by {@link ShapeField#encodeTriangle(byte[], int, int, boolean, int, int, boolean, int, int, boolean)}. */ - public static void decodeTriangle(byte[] t, int[] triangle) { - assert triangle.length == 6; + public static void decodeTriangle(byte[] t, DecodedTriangle triangle) { + final int aX, aY, bX, bY, cX, cY; + final boolean ab, bc, ca; int bits = NumericUtils.sortableBytesToInt(t, 6 * BYTES); //extract the first three bits int tCode = (((1 << 3) - 1) & (bits >> 0)); switch (tCode) { case MINY_MINX_MAXY_MAXX_Y_X: - triangle[0] = NumericUtils.sortableBytesToInt(t, 0 * BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 2 * BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 4 * BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * BYTES); + aY = NumericUtils.sortableBytesToInt(t, 0 * BYTES); + aX = NumericUtils.sortableBytesToInt(t, 1 * BYTES); + bY = NumericUtils.sortableBytesToInt(t, 2 * BYTES); + bX = NumericUtils.sortableBytesToInt(t, 3 * BYTES); + cY = NumericUtils.sortableBytesToInt(t, 4 * BYTES); + cX = NumericUtils.sortableBytesToInt(t, 5 * BYTES); break; case MINY_MINX_Y_X_MAXY_MAXX: - triangle[0] = NumericUtils.sortableBytesToInt(t, 0 * BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 4 * BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * BYTES); + aY = NumericUtils.sortableBytesToInt(t, 0 * BYTES); + aX = NumericUtils.sortableBytesToInt(t, 1 * BYTES); + bY = NumericUtils.sortableBytesToInt(t, 4 * BYTES); + bX = NumericUtils.sortableBytesToInt(t, 5 * BYTES); + cY = NumericUtils.sortableBytesToInt(t, 2 * BYTES); + cX = NumericUtils.sortableBytesToInt(t, 3 * BYTES); break; case MAXY_MINX_Y_X_MINY_MAXX: - triangle[0] = NumericUtils.sortableBytesToInt(t, 2 * BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 4 * BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 0 * BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * BYTES); + aY = NumericUtils.sortableBytesToInt(t, 2 * BYTES); + aX = NumericUtils.sortableBytesToInt(t, 1 * BYTES); + bY = NumericUtils.sortableBytesToInt(t, 4 * BYTES); + bX = NumericUtils.sortableBytesToInt(t, 5 * BYTES); + cY = NumericUtils.sortableBytesToInt(t, 0 * BYTES); + cX = NumericUtils.sortableBytesToInt(t, 3 * BYTES); break; case MAXY_MINX_MINY_MAXX_Y_X: - triangle[0] = NumericUtils.sortableBytesToInt(t, 2 * BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 4 * BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * BYTES); + aY = NumericUtils.sortableBytesToInt(t, 2 * BYTES); + aX = NumericUtils.sortableBytesToInt(t, 1 * BYTES); + bY = NumericUtils.sortableBytesToInt(t, 0 * BYTES); + bX = NumericUtils.sortableBytesToInt(t, 3 * BYTES); + cY = NumericUtils.sortableBytesToInt(t, 4 * BYTES); + cX = NumericUtils.sortableBytesToInt(t, 5 * BYTES); break; case Y_MINX_MINY_X_MAXY_MAXX: - triangle[0] = NumericUtils.sortableBytesToInt(t, 4 * BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * BYTES); + aY = NumericUtils.sortableBytesToInt(t, 4 * BYTES); + aX = NumericUtils.sortableBytesToInt(t, 1 * BYTES); + bY = NumericUtils.sortableBytesToInt(t, 0 * BYTES); + bX = NumericUtils.sortableBytesToInt(t, 5 * BYTES); + cY = NumericUtils.sortableBytesToInt(t, 2 * BYTES); + cX = NumericUtils.sortableBytesToInt(t, 3 * BYTES); break; case Y_MINX_MINY_MAXX_MAXY_X: - triangle[0] = NumericUtils.sortableBytesToInt(t, 4 * BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * BYTES); + aY = NumericUtils.sortableBytesToInt(t, 4 * BYTES); + aX = NumericUtils.sortableBytesToInt(t, 1 * BYTES); + bY = NumericUtils.sortableBytesToInt(t, 0 * BYTES); + bX = NumericUtils.sortableBytesToInt(t, 3 * BYTES); + cY = NumericUtils.sortableBytesToInt(t, 2 * BYTES); + cX = NumericUtils.sortableBytesToInt(t, 5 * BYTES); break; case MAXY_MINX_MINY_X_Y_MAXX: - triangle[0] = NumericUtils.sortableBytesToInt(t, 2 * BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 4 * BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * BYTES); + aY = NumericUtils.sortableBytesToInt(t, 2 * BYTES); + aX = NumericUtils.sortableBytesToInt(t, 1 * BYTES); + bY = NumericUtils.sortableBytesToInt(t, 0 * BYTES); + bX = NumericUtils.sortableBytesToInt(t, 5 * BYTES); + cY = NumericUtils.sortableBytesToInt(t, 4 * BYTES); + cX = NumericUtils.sortableBytesToInt(t, 3 * BYTES); break; case MINY_MINX_Y_MAXX_MAXY_X: - triangle[0] = NumericUtils.sortableBytesToInt(t, 0 * BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 4 * BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * BYTES); + aY = NumericUtils.sortableBytesToInt(t, 0 * BYTES); + aX = NumericUtils.sortableBytesToInt(t, 1 * BYTES); + bY = NumericUtils.sortableBytesToInt(t, 4 * BYTES); + bX = NumericUtils.sortableBytesToInt(t, 3 * BYTES); + cY = NumericUtils.sortableBytesToInt(t, 2 * BYTES); + cX = NumericUtils.sortableBytesToInt(t, 5 * BYTES); break; default: throw new IllegalArgumentException("Could not decode the provided triangle"); } //Points of the decoded triangle must be co-planar or CCW oriented - assert GeoUtils.orient(triangle[1], triangle[0], triangle[3], triangle[2], triangle[5], triangle[4]) >= 0; + assert GeoUtils.orient(aX, aY, bX, bY, cX, cY) >= 0; + ab = (bits & 1 << 3) == 1 << 3; + bc = (bits & 1 << 4) == 1 << 4; + ca = (bits & 1 << 5) == 1 << 5; + triangle.setValues(aX, aY, ab, bX, bY, bc, cX, cY, ca); + } + + /** + * Represents a encoded triangle using {@link ShapeField#decodeTriangle(byte[], DecodedTriangle)}. + */ + public static class DecodedTriangle { + //Triangle vertices + public int aX, aY, bX, bY, cX, cY; + //Represent if edges belongs to original shape + public boolean ab, bc, ca; + + public DecodedTriangle() { + } + + private void setValues(int aX, int aY, boolean ab, int bX, int bY, boolean bc, int cX, int cY, boolean ca) { + this.aX = aX; + this.aY = aY; + this.ab = ab; + this.bX = bX; + this.bY = bY; + this.bc = bc; + this.cX = cX; + this.cY = cY; + this.ca = ca; + } + + @Override + public int hashCode() { + return Objects.hash(aX, aY, bX, bY, cX, cY, ab, bc, ca); + } + + @Override + public boolean equals(Object o) { + DecodedTriangle other = (DecodedTriangle) o; + return aX == other.aX && bX == other.bX && cX == other.cX + && aY == other.aY && bY == other.bY && cY == other.cY + && ab == other.ab && bc == other.bc && ca == other.ca; + } + + /** pretty print the triangle vertices */ + public String toString() { + String result = aX + ", " + aY + " " + + bX + ", " + bY + " " + + cX + ", " + cY + " " + "[" + ab + "," +bc + "," + ca + "]"; + return result; + } } } diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/ShapeQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/ShapeQuery.java index 7234ed5900a9..d27a3eaf5d81 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/ShapeQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/ShapeQuery.java @@ -26,6 +26,7 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.index.PointValues.IntersectVisitor; import org.apache.lucene.index.PointValues.Relation; +import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.DocIdSetIterator; @@ -82,12 +83,12 @@ protected abstract Relation relateRangeBBoxToQuery(int minXOffset, int minYOffse int maxXOffset, int maxYOffset, byte[] maxTriangle); /** returns true if the provided triangle matches the query */ - protected abstract boolean queryMatches(byte[] triangle, int[] scratchTriangle, ShapeField.QueryRelation queryRelation); + protected abstract boolean queryMatches(byte[] triangle, ShapeField.DecodedTriangle scratchTriangle, ShapeField.QueryRelation queryRelation); /** relates a range of triangles (internal node) to the query */ protected Relation relateRangeToQuery(byte[] minTriangle, byte[] maxTriangle, QueryRelation queryRelation) { // compute bounding box of internal node - Relation r = relateRangeBBoxToQuery(ShapeField.BYTES, 0, minTriangle, 3 * ShapeField.BYTES, 2 * ShapeField.BYTES, maxTriangle); + final Relation r = relateRangeBBoxToQuery(ShapeField.BYTES, 0, minTriangle, 3 * ShapeField.BYTES, 2 * ShapeField.BYTES, maxTriangle); if (queryRelation == QueryRelation.DISJOINT) { return transposeRelation(r); } @@ -102,149 +103,44 @@ public void visit(QueryVisitor visitor) { } @Override - public final Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + public final Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { + final ShapeQuery query = this; + return new ConstantScoreWeight(query, boost) { - return new ConstantScoreWeight(this, boost) { - - /** create a visitor that adds documents that match the query using a sparse bitset. (Used by INTERSECT) */ - protected IntersectVisitor getSparseIntersectVisitor(DocIdSetBuilder result) { - return new IntersectVisitor() { - final int[] scratchTriangle = new int[6]; - DocIdSetBuilder.BulkAdder adder; - - @Override - public void grow(int count) { - adder = result.grow(count); - } - - @Override - public void visit(int docID) throws IOException { - adder.add(docID); - } - - @Override - public void visit(int docID, byte[] t) throws IOException { - if (queryMatches(t, scratchTriangle, QueryRelation.INTERSECTS)) { - visit(docID); - } - } - - @Override - public void visit(DocIdSetIterator iterator, byte[] t) throws IOException { - if (queryMatches(t, scratchTriangle, QueryRelation.INTERSECTS)) { - int docID; - while ((docID = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { - visit(docID); - } - } - } - - @Override - public Relation compare(byte[] minTriangle, byte[] maxTriangle) { - return relateRangeToQuery(minTriangle, maxTriangle, ShapeField.QueryRelation.INTERSECTS); - } - }; - } - - /** create a visitor that adds documents that match the query using a dense bitset. (Used by WITHIN, DISJOINT) */ - protected IntersectVisitor getDenseIntersectVisitor(FixedBitSet intersect, FixedBitSet disjoint, ShapeField.QueryRelation queryRelation) { - return new IntersectVisitor() { - final int[] scratchTriangle = new int[6]; - @Override - public void visit(int docID) throws IOException { - if (queryRelation == ShapeField.QueryRelation.DISJOINT) { - // if DISJOINT query set the doc in the disjoint bitset - disjoint.set(docID); - } else { - // for INTERSECT, and WITHIN queries we set the intersect bitset - intersect.set(docID); - } - } - - @Override - public void visit(int docID, byte[] t) throws IOException { - if (queryMatches(t, scratchTriangle, queryRelation)) { - intersect.set(docID); - } else { - disjoint.set(docID); - } - } - - @Override - public void visit(DocIdSetIterator iterator, byte[] t) throws IOException { - boolean queryMatches = queryMatches(t, scratchTriangle, queryRelation); - int docID; - while ((docID = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { - if (queryMatches) { - intersect.set(docID); - } else { - disjoint.set(docID); - } - } - } - - @Override - public Relation compare(byte[] minTriangle, byte[] maxTriangle) { - return relateRangeToQuery(minTriangle, maxTriangle, queryRelation); - } - }; - } - - /** get a scorer supplier for INTERSECT queries */ - protected ScorerSupplier getIntersectScorerSupplier(LeafReader reader, PointValues values, Weight weight, ScoreMode scoreMode) throws IOException { - DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field); - IntersectVisitor visitor = getSparseIntersectVisitor(result); - return new RelationScorerSupplier(values, visitor, null, queryRelation) { - @Override - public Scorer get(long leadCost) throws IOException { - return getIntersectsScorer(ShapeQuery.this, reader, weight, result, score(), scoreMode); - } - }; - } - - /** get a scorer supplier for all other queries (DISJOINT, WITHIN) */ - protected ScorerSupplier getScorerSupplier(LeafReader reader, PointValues values, Weight weight, ScoreMode scoreMode) throws IOException { - if (queryRelation == ShapeField.QueryRelation.INTERSECTS) { - return getIntersectScorerSupplier(reader, values, weight, scoreMode); + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + final ScorerSupplier scorerSupplier = scorerSupplier(context); + if (scorerSupplier == null) { + return null; } - //For within and disjoint we need two passes to remove false positives in case of multi-shapes. - FixedBitSet within = new FixedBitSet(reader.maxDoc()); - FixedBitSet disjoint = new FixedBitSet(reader.maxDoc()); - IntersectVisitor withinVisitor = getDenseIntersectVisitor(within, disjoint, ShapeField.QueryRelation.WITHIN); - IntersectVisitor disjointVisitor = getDenseIntersectVisitor(within, disjoint, ShapeField.QueryRelation.DISJOINT); - return new RelationScorerSupplier(values, withinVisitor, disjointVisitor, queryRelation) { - @Override - public Scorer get(long leadCost) throws IOException { - return getScorer(ShapeQuery.this, weight, within, disjoint, score(), scoreMode); - } - }; + return scorerSupplier.get(Long.MAX_VALUE); } @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { - LeafReader reader = context.reader(); - PointValues values = reader.getPointValues(field); + final LeafReader reader = context.reader(); + final PointValues values = reader.getPointValues(field); if (values == null) { // No docs in this segment had any points fields return null; } - FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field); + final FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field); if (fieldInfo == null) { // No docs in this segment indexed this field at all return null; } - boolean allDocsMatch = true; - if (values.getDocCount() != reader.maxDoc() || - relateRangeToQuery(values.getMinPackedValue(), values.getMaxPackedValue(), queryRelation) != Relation.CELL_INSIDE_QUERY) { - allDocsMatch = false; - } - final Weight weight = this; - if (allDocsMatch) { + final Relation rel = relateRangeToQuery(values.getMinPackedValue(), values.getMaxPackedValue(), queryRelation); + if (rel == Relation.CELL_OUTSIDE_QUERY) { + // no documents match the query + return null; + } + else if (values.getDocCount() == reader.maxDoc() && rel == Relation.CELL_INSIDE_QUERY) { + // all documents match the query return new ScorerSupplier() { @Override - public Scorer get(long leadCost) throws IOException { + public Scorer get(long leadCost) { return new ConstantScoreScorer(weight, score(), scoreMode, DocIdSetIterator.all(reader.maxDoc())); } @@ -254,17 +150,20 @@ public long cost() { } }; } else { - return getScorerSupplier(reader, values, weight, scoreMode); - } - } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; + if (queryRelation != QueryRelation.INTERSECTS + && hasAnyHits(query, values) == false) { + // First we check if we have any hits so we are fast in the adversarial case where + // the shape does not match any documents and we are in the dense case + return null; + } + // walk the tree to get matching documents + return new RelationScorerSupplier(values, ShapeQuery.this) { + @Override + public Scorer get(long leadCost) throws IOException { + return getScorer(reader, weight, score(), scoreMode); + } + }; } - return scorerSupplier.get(Long.MAX_VALUE); } @Override @@ -313,57 +212,26 @@ private static Relation transposeRelation(Relation r) { /** utility class for implementing constant score logic specific to INTERSECT, WITHIN, and DISJOINT */ private static abstract class RelationScorerSupplier extends ScorerSupplier { - PointValues values; - IntersectVisitor visitor; - IntersectVisitor disjointVisitor;//it can be null - ShapeField.QueryRelation queryRelation; - long cost = -1; + final private PointValues values; + final private ShapeQuery query; + private long cost = -1; - RelationScorerSupplier(PointValues values, IntersectVisitor visitor, IntersectVisitor disjointVisitor, QueryRelation queryRelation) { + RelationScorerSupplier(final PointValues values, final ShapeQuery query) { this.values = values; - this.visitor = visitor; - this.disjointVisitor = disjointVisitor; - this.queryRelation = queryRelation; + this.query = query; } - /** create a visitor that clears documents that do NOT match the polygon query; used with INTERSECTS */ - private IntersectVisitor getInverseIntersectVisitor(ShapeQuery query, FixedBitSet result, int[] cost) { - return new IntersectVisitor() { - int[] scratchTriangle = new int[6]; - @Override - public void visit(int docID) { - result.clear(docID); - cost[0]--; - } - - @Override - public void visit(int docID, byte[] packedTriangle) { - if (query.queryMatches(packedTriangle, scratchTriangle, QueryRelation.INTERSECTS) == false) { - visit(docID); - } - } - - @Override - public void visit(DocIdSetIterator iterator, byte[] t) throws IOException { - if (query.queryMatches(t, scratchTriangle, QueryRelation.INTERSECTS) == false) { - int docID; - while ((docID = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { - visit(docID); - } - } - } - - - @Override - public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - return transposeRelation(query.relateRangeToQuery(minPackedValue, maxPackedValue, QueryRelation.INTERSECTS)); - } - }; + protected Scorer getScorer(final LeafReader reader, final Weight weight, final float boost, final ScoreMode scoreMode) throws IOException { + switch (query.getQueryRelation()) { + case INTERSECTS: return getSparseScorer(reader, weight, boost, scoreMode); + case WITHIN: + case DISJOINT: return getDenseScorer(reader, weight, boost, scoreMode); + default: throw new IllegalArgumentException("Unsupported query type :[" + query.getQueryRelation() + "]"); + } } - /** returns a Scorer for INTERSECT queries that uses a sparse bitset */ - protected Scorer getIntersectsScorer(ShapeQuery query, LeafReader reader, Weight weight, - DocIdSetBuilder docIdSetBuilder, final float boost, ScoreMode scoreMode) throws IOException { + /** Scorer used for INTERSECTS **/ + private Scorer getSparseScorer(final LeafReader reader, final Weight weight, final float boost, final ScoreMode scoreMode) throws IOException { if (values.getDocCount() == reader.maxDoc() && values.getDocCount() == values.size() && cost() > reader.maxDoc() / 2) { @@ -372,34 +240,41 @@ && cost() > reader.maxDoc() / 2) { // by computing the set of documents that do NOT match the query final FixedBitSet result = new FixedBitSet(reader.maxDoc()); result.set(0, reader.maxDoc()); - int[] cost = new int[]{reader.maxDoc()}; - values.intersect(getInverseIntersectVisitor(query, result, cost)); + final long[] cost = new long[]{reader.maxDoc()}; + values.intersect(getInverseDenseVisitor(query, result, cost)); final DocIdSetIterator iterator = new BitSetIterator(result, cost[0]); return new ConstantScoreScorer(weight, boost, scoreMode, iterator); } - - values.intersect(visitor); - DocIdSetIterator iterator = docIdSetBuilder.build().iterator(); + final DocIdSetBuilder docIdSetBuilder = new DocIdSetBuilder(reader.maxDoc(), values, query.getField()); + values.intersect(getSparseVisitor(query, docIdSetBuilder)); + final DocIdSetIterator iterator = docIdSetBuilder.build().iterator(); return new ConstantScoreScorer(weight, boost, scoreMode, iterator); } - /** returns a Scorer for all other (non INTERSECT) queries */ - protected Scorer getScorer(ShapeQuery query, Weight weight, - FixedBitSet intersect, FixedBitSet disjoint, final float boost, ScoreMode scoreMode) throws IOException { - values.intersect(visitor); - if (disjointVisitor != null) { - values.intersect(disjointVisitor); - } - DocIdSetIterator iterator; - if (query.queryRelation == ShapeField.QueryRelation.DISJOINT) { - disjoint.andNot(intersect); - iterator = new BitSetIterator(disjoint, cost()); - } else if (query.queryRelation == ShapeField.QueryRelation.WITHIN) { - intersect.andNot(disjoint); - iterator = new BitSetIterator(intersect, cost()); + /** Scorer used for WITHIN and DISJOINT **/ + private Scorer getDenseScorer(LeafReader reader, Weight weight, final float boost, ScoreMode scoreMode) throws IOException { + final FixedBitSet result = new FixedBitSet(reader.maxDoc()); + final long[] cost; + if (values.getDocCount() == reader.maxDoc()) { + cost = new long[]{values.size()}; + // In this case we can spare one visit to the tree, all documents + // are potential matches + result.set(0, reader.maxDoc()); + // Remove false positives + values.intersect(getInverseDenseVisitor(query, result, cost)); } else { - iterator = new BitSetIterator(intersect, cost()); + cost = new long[]{0}; + // Get potential documents. + final FixedBitSet excluded = new FixedBitSet(reader.maxDoc()); + values.intersect(getDenseVisitor(query, result, excluded, cost)); + result.andNot(excluded); + // Remove false positives, we only care about the inner nodes as intersecting + // leaf nodes have been already taken into account. Unfortunately this + // process still reads the leaf nodes. + values.intersect(getShallowInverseDenseVisitor(query, result)); } + assert cost[0] > 0; + final DocIdSetIterator iterator = new BitSetIterator(result, cost[0]); return new ConstantScoreScorer(weight, boost, scoreMode, iterator); } @@ -407,14 +282,213 @@ protected Scorer getScorer(ShapeQuery query, Weight weight, public long cost() { if (cost == -1) { // Computing the cost may be expensive, so only do it if necessary - if (queryRelation == ShapeField.QueryRelation.DISJOINT) { - cost = values.estimatePointCount(disjointVisitor); - } else { - cost = values.estimatePointCount(visitor); - } + cost = values.estimateDocCount(getEstimateVisitor(query)); assert cost >= 0; } return cost; } } + + /** create a visitor for calculating point count estimates for the provided relation */ + private static IntersectVisitor getEstimateVisitor(final ShapeQuery query) { + return new IntersectVisitor() { + @Override + public void visit(int docID) { + throw new UnsupportedOperationException(); + } + + @Override + public void visit(int docID, byte[] t) { + throw new UnsupportedOperationException(); + } + + @Override + public Relation compare(byte[] minTriangle, byte[] maxTriangle) { + return query.relateRangeToQuery(minTriangle, maxTriangle, query.getQueryRelation()); + } + }; + } + + /** create a visitor that adds documents that match the query using a sparse bitset. (Used by INTERSECT) */ + private static IntersectVisitor getSparseVisitor(final ShapeQuery query, final DocIdSetBuilder result) { + return new IntersectVisitor() { + final ShapeField.DecodedTriangle scratchTriangle = new ShapeField.DecodedTriangle(); + DocIdSetBuilder.BulkAdder adder; + + @Override + public void grow(int count) { + adder = result.grow(count); + } + + @Override + public void visit(int docID) { + adder.add(docID); + } + + @Override + public void visit(int docID, byte[] t) { + if (query.queryMatches(t, scratchTriangle, query.getQueryRelation())) { + visit(docID); + } + } + + @Override + public void visit(DocIdSetIterator iterator, byte[] t) throws IOException { + if (query.queryMatches(t, scratchTriangle, query.getQueryRelation())) { + int docID; + while ((docID = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { + visit(docID); + } + } + } + + @Override + public Relation compare(byte[] minTriangle, byte[] maxTriangle) { + return query.relateRangeToQuery(minTriangle, maxTriangle, query.getQueryRelation()); + } + }; + } + + /** create a visitor that adds documents that match the query using a dense bitset; used with WITHIN & DISJOINT */ + private static IntersectVisitor getDenseVisitor(final ShapeQuery query, final FixedBitSet result, final FixedBitSet excluded, final long[] cost) { + return new IntersectVisitor() { + final ShapeField.DecodedTriangle scratchTriangle = new ShapeField.DecodedTriangle(); + + @Override + public void visit(int docID) { + result.set(docID); + cost[0]++; + } + + @Override + public void visit(int docID, byte[] t) { + if (query.queryMatches(t, scratchTriangle, query.getQueryRelation())) { + visit(docID); + } else { + excluded.set(docID); + } + } + + @Override + public void visit(DocIdSetIterator iterator, byte[] t) throws IOException { + boolean matches = query.queryMatches(t, scratchTriangle, query.getQueryRelation()); + int docID; + while ((docID = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { + if (matches) { + visit(docID); + } else { + excluded.set(docID); + } + } + } + + @Override + public Relation compare(byte[] minTriangle, byte[] maxTriangle) { + return query.relateRangeToQuery(minTriangle, maxTriangle, query.getQueryRelation()); + } + }; + } + + /** create a visitor that clears documents that do not match the polygon query using a dense bitset; used with WITHIN & DISJOINT */ + private static IntersectVisitor getInverseDenseVisitor(final ShapeQuery query, final FixedBitSet result, final long[] cost) { + return new IntersectVisitor() { + final ShapeField.DecodedTriangle scratchTriangle = new ShapeField.DecodedTriangle(); + + @Override + public void visit(int docID) { + result.clear(docID); + cost[0]--; + } + + @Override + public void visit(int docID, byte[] packedTriangle) { + if (query.queryMatches(packedTriangle, scratchTriangle, query.getQueryRelation()) == false) { + visit(docID); + } + } + + @Override + public void visit(DocIdSetIterator iterator, byte[] t) throws IOException { + if (query.queryMatches(t, scratchTriangle, query.getQueryRelation()) == false) { + int docID; + while ((docID = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { + visit(docID); + } + } + } + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return transposeRelation(query.relateRangeToQuery(minPackedValue, maxPackedValue, query.getQueryRelation())); + } + }; + } + + /** create a visitor that clears documents that do not match the polygon query using a dense bitset; used with WITHIN & DISJOINT. + * This visitor only takes into account inner nodes */ + private static IntersectVisitor getShallowInverseDenseVisitor(final ShapeQuery query, final FixedBitSet result) { + return new IntersectVisitor() { + + @Override + public void visit(int docID) { + result.clear(docID); + } + + @Override + public void visit(int docID, byte[] packedTriangle) { + //NO-OP + } + + @Override + public void visit(DocIdSetIterator iterator, byte[] t) { + //NO-OP + } + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return transposeRelation(query.relateRangeToQuery(minPackedValue, maxPackedValue, query.getQueryRelation())); + } + }; + } + + /** Return true if the query matches at least one document. It creates a visitor that terminates as soon as one or more docs + * are matched. */ + private static boolean hasAnyHits(final ShapeQuery query, final PointValues values) throws IOException { + try { + values.intersect(new IntersectVisitor() { + final ShapeField.DecodedTriangle scratchTriangle = new ShapeField.DecodedTriangle(); + + @Override + public void visit(int docID) { + throw new CollectionTerminatedException(); + } + + @Override + public void visit(int docID, byte[] t) { + if (query.queryMatches(t, scratchTriangle, query.getQueryRelation())) { + throw new CollectionTerminatedException(); + } + } + + @Override + public void visit(DocIdSetIterator iterator, byte[] t) { + if (query.queryMatches(t, scratchTriangle, query.getQueryRelation())) { + throw new CollectionTerminatedException(); + } + } + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + Relation rel = query.relateRangeToQuery(minPackedValue, maxPackedValue, query.getQueryRelation()); + if (rel == Relation.CELL_INSIDE_QUERY) { + throw new CollectionTerminatedException(); + } + return rel; + } + }); + } catch (CollectionTerminatedException e) { + return true; + } + return false; + } } diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeBoundingBoxQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeBoundingBoxQuery.java index 21fa5b48d51d..4a9e46525cec 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeBoundingBoxQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeBoundingBoxQuery.java @@ -41,26 +41,31 @@ public XYShapeBoundingBoxQuery(String field, QueryRelation queryRelation, double @Override protected PointValues.Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle, int maxXOffset, int maxYOffset, byte[] maxTriangle) { + if (queryRelation == QueryRelation.INTERSECTS || queryRelation == QueryRelation.DISJOINT) { + return rectangle2D.intersectRangeBBox(minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle); + } return rectangle2D.relateRangeBBox(minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle); } /** returns true if the query matches the encoded triangle */ @Override - protected boolean queryMatches(byte[] t, int[] scratchTriangle, QueryRelation queryRelation) { + protected boolean queryMatches(byte[] t, ShapeField.DecodedTriangle scratchTriangle, QueryRelation queryRelation) { // decode indexed triangle ShapeField.decodeTriangle(t, scratchTriangle); - int aY = scratchTriangle[0]; - int aX = scratchTriangle[1]; - int bY = scratchTriangle[2]; - int bX = scratchTriangle[3]; - int cY = scratchTriangle[4]; - int cX = scratchTriangle[5]; + int aY = scratchTriangle.aY; + int aX = scratchTriangle.aX; + int bY = scratchTriangle.bY; + int bX = scratchTriangle.bX; + int cY = scratchTriangle.cY; + int cX = scratchTriangle.cX; - if (queryRelation == QueryRelation.WITHIN) { - return rectangle2D.containsTriangle(aX, aY, bX, bY, cX, cY); + switch (queryRelation) { + case INTERSECTS: return rectangle2D.intersectsTriangle(aX, aY, bX, bY, cX, cY); + case WITHIN: return rectangle2D.containsTriangle(aX, aY, bX, bY, cX, cY); + case DISJOINT: return rectangle2D.intersectsTriangle(aX, aY, bX, bY, cX, cY) == false; + default: throw new IllegalArgumentException("Unsupported query type :[" + queryRelation + "]"); } - return rectangle2D.intersectsTriangle(aX, aY, bX, bY, cX, cY); } @Override diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeLineQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeLineQuery.java index b8ec71094d7f..5f200daca1a4 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeLineQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeLineQuery.java @@ -86,21 +86,22 @@ protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] } @Override - protected boolean queryMatches(byte[] t, int[] scratchTriangle, QueryRelation queryRelation) { + protected boolean queryMatches(byte[] t, ShapeField.DecodedTriangle scratchTriangle, QueryRelation queryRelation) { ShapeField.decodeTriangle(t, scratchTriangle); - double alat = decode(scratchTriangle[0]); - double alon = decode(scratchTriangle[1]); - double blat = decode(scratchTriangle[2]); - double blon = decode(scratchTriangle[3]); - double clat = decode(scratchTriangle[4]); - double clon = decode(scratchTriangle[5]); + double alat = decode(scratchTriangle.aY); + double alon = decode(scratchTriangle.aX); + double blat = decode(scratchTriangle.bY); + double blon = decode(scratchTriangle.bX); + double clat = decode(scratchTriangle.cY); + double clon = decode(scratchTriangle.cX); - if (queryRelation == QueryRelation.WITHIN) { - return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY; + switch (queryRelation) { + case INTERSECTS: return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY; + case WITHIN: return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY; + case DISJOINT: return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_OUTSIDE_QUERY; + default: throw new IllegalArgumentException("Unsupported query type :[" + queryRelation + "]"); } - // INTERSECTS - return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY; } @Override diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/XYShapePolygonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/XYShapePolygonQuery.java index e1b4e9916b37..49835bdd6786 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/XYShapePolygonQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/XYShapePolygonQuery.java @@ -26,6 +26,8 @@ import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.util.NumericUtils; +import static org.apache.lucene.geo.XYEncodingUtils.decode; + /** * Finds all previously indexed cartesian shapes that intersect the specified arbitrary cartesian {@link XYPolygon}. * @@ -76,21 +78,22 @@ protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] } @Override - protected boolean queryMatches(byte[] t, int[] scratchTriangle, QueryRelation queryRelation) { + protected boolean queryMatches(byte[] t, ShapeField.DecodedTriangle scratchTriangle, QueryRelation queryRelation) { ShapeField.decodeTriangle(t, scratchTriangle); - double alat = XYEncodingUtils.decode(scratchTriangle[0]); - double alon = XYEncodingUtils.decode(scratchTriangle[1]); - double blat = XYEncodingUtils.decode(scratchTriangle[2]); - double blon = XYEncodingUtils.decode(scratchTriangle[3]); - double clat = XYEncodingUtils.decode(scratchTriangle[4]); - double clon = XYEncodingUtils.decode(scratchTriangle[5]); + double alat = decode(scratchTriangle.aY); + double alon = decode(scratchTriangle.aX); + double blat = decode(scratchTriangle.bY); + double blon = decode(scratchTriangle.bX); + double clat = decode(scratchTriangle.cY); + double clon = decode(scratchTriangle.cX); - if (queryRelation == QueryRelation.WITHIN) { - return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY; + switch (queryRelation) { + case INTERSECTS: return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY; + case WITHIN: return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY; + case DISJOINT: return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_OUTSIDE_QUERY; + default: throw new IllegalArgumentException("Unsupported query type :[" + queryRelation + "]"); } - // INTERSECTS - return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY; } @Override diff --git a/lucene/sandbox/src/java/org/apache/lucene/geo/Rectangle2D.java b/lucene/sandbox/src/java/org/apache/lucene/geo/Rectangle2D.java index 38fe4e1ade4b..da33337e495b 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/geo/Rectangle2D.java +++ b/lucene/sandbox/src/java/org/apache/lucene/geo/Rectangle2D.java @@ -17,7 +17,6 @@ package org.apache.lucene.geo; -import org.apache.lucene.index.PointValues; import org.apache.lucene.util.FutureArrays; import java.util.Arrays; @@ -108,16 +107,28 @@ public boolean queryContainsPoint(int x, int y) { return bboxContainsPoint(x, y, this.minX, this.maxX, this.minY, this.maxY); } - /** compare this to a provided rangle bounding box **/ + /** compare this to a provided range bounding box **/ public Relation relateRangeBBox(int minXOffset, int minYOffset, byte[] minTriangle, int maxXOffset, int maxYOffset, byte[] maxTriangle) { - Relation eastRelation = compareBBoxToRangeBBox(this.bbox, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle); + Relation eastRelation = compareBBoxToRangeBBox(this.bbox, + minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle); if (this.crossesDateline() && eastRelation == Relation.CELL_OUTSIDE_QUERY) { return compareBBoxToRangeBBox(this.west, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle); } return eastRelation; } + /** intersects this to a provided range bounding box **/ + public Relation intersectRangeBBox(int minXOffset, int minYOffset, byte[] minTriangle, + int maxXOffset, int maxYOffset, byte[] maxTriangle) { + Relation eastRelation = intersectBBoxWithRangeBBox(this.bbox, + minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle); + if (this.crossesDateline() && eastRelation == Relation.CELL_OUTSIDE_QUERY) { + return intersectBBoxWithRangeBBox(this.west, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle); + } + return eastRelation; + } + /** Checks if the rectangle intersects the provided triangle **/ public boolean intersectsTriangle(int aX, int aY, int bX, int bY, int cX, int cY) { // 1. query contains any triangle points @@ -168,27 +179,78 @@ public boolean containsTriangle(int ax, int ay, int bx, int by, int cx, int cy) return bboxContainsTriangle(ax, ay, bx, by, cx, cy, minX, maxX, minY, maxY); } - /** static utility method to compare a bbox with a range of triangles (just the bbox of the triangle collection) */ + /** + * static utility method to compare a bbox with a range of triangles (just the bbox of the triangle collection) + **/ private static Relation compareBBoxToRangeBBox(final byte[] bbox, int minXOffset, int minYOffset, byte[] minTriangle, int maxXOffset, int maxYOffset, byte[] maxTriangle) { // check bounding box (DISJOINT) - if (FutureArrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) > 0 || - FutureArrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, BYTES, 2 * BYTES) < 0 || - FutureArrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) > 0 || - FutureArrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 0, BYTES) < 0) { - return PointValues.Relation.CELL_OUTSIDE_QUERY; + if (disjoint(bbox, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle)) { + return Relation.CELL_OUTSIDE_QUERY; } if (FutureArrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, BYTES, 2 * BYTES) >= 0 && FutureArrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) <= 0 && FutureArrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 0, BYTES) >= 0 && FutureArrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) <= 0) { - return PointValues.Relation.CELL_INSIDE_QUERY; + return Relation.CELL_INSIDE_QUERY; } + return Relation.CELL_CROSSES_QUERY; } + /** + * static utility method to compare a bbox with a range of triangles (just the bbox of the triangle collection) + * for intersection + **/ + private static Relation intersectBBoxWithRangeBBox(final byte[] bbox, + int minXOffset, int minYOffset, byte[] minTriangle, + int maxXOffset, int maxYOffset, byte[] maxTriangle) { + // check bounding box (DISJOINT) + if (disjoint(bbox, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle)) { + return Relation.CELL_OUTSIDE_QUERY; + } + + if (FutureArrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, BYTES, 2 * BYTES) >= 0 && + FutureArrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 0, BYTES) >= 0 ) { + if (FutureArrays.compareUnsigned(maxTriangle, minXOffset, minXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) <= 0 && + FutureArrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) <= 0) { + return Relation.CELL_INSIDE_QUERY; + } + if (FutureArrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) <= 0 && + FutureArrays.compareUnsigned(maxTriangle, minYOffset, minYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) <= 0) { + return Relation.CELL_INSIDE_QUERY; + } + } + + if (FutureArrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) <= 0 && + FutureArrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) <= 0 ) { + if (FutureArrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, BYTES, 2 * BYTES) >= 0 && + FutureArrays.compareUnsigned(minTriangle, maxYOffset, maxYOffset + BYTES, bbox, 0, BYTES) >= 0) { + return Relation.CELL_INSIDE_QUERY; + } + if (FutureArrays.compareUnsigned(minTriangle, maxXOffset, maxXOffset + BYTES, bbox, BYTES, 2 * BYTES) >= 0 && + FutureArrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 0, BYTES) >= 0) { + return Relation.CELL_INSIDE_QUERY; + } + } + + return Relation.CELL_CROSSES_QUERY; + } + + /** + * static utility method to check a bbox is disjoint with a range of triangles + **/ + private static boolean disjoint(final byte[] bbox, + int minXOffset, int minYOffset, byte[] minTriangle, + int maxXOffset, int maxYOffset, byte[] maxTriangle) { + return FutureArrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) > 0 || + FutureArrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, BYTES, 2 * BYTES) < 0 || + FutureArrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) > 0 || + FutureArrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 0, BYTES) < 0; + } + /** * encodes a bounding box into the provided byte array */ diff --git a/lucene/sandbox/src/java/org/apache/lucene/geo/Tessellator.java b/lucene/sandbox/src/java/org/apache/lucene/geo/Tessellator.java index e46df18af1ae..bd9547575f4d 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/geo/Tessellator.java +++ b/lucene/sandbox/src/java/org/apache/lucene/geo/Tessellator.java @@ -175,7 +175,7 @@ private static final Node createDoublyLinkedList(final double[] x, final double[ } // if first and last node are the same then remove the end node and set lastNode to the start if (lastNode != null && isVertexEquals(lastNode, lastNode.next)) { - removeNode(lastNode); + removeNode(lastNode, true); lastNode = lastNode.next; } @@ -286,7 +286,7 @@ private static final void eliminateHole(final Node holeNode, Node outerNode, dou Node sharedVertex = getSharedVertex(holeNode, next); if (sharedVertex != null) { // Split the resulting polygon. - Node node = splitPolygon(next, sharedVertex); + Node node = splitPolygon(next, sharedVertex, true); // Filter the split nodes. filterPoints(node, node.next); return; @@ -300,8 +300,10 @@ private static final void eliminateHole(final Node holeNode, Node outerNode, dou // Determine whether a hole bridge could be fetched. if(outerNode != null) { + // compute if the bridge overlaps with a polygon edge. + boolean fromPolygon = isPointInLine(outerNode, outerNode.next, holeNode) || isPointInLine(holeNode, holeNode.next, outerNode); // Split the resulting polygon. - Node node = splitPolygon(outerNode, holeNode); + Node node = splitPolygon(outerNode, holeNode, fromPolygon); // Filter the split nodes. filterPoints(node, node.next); } @@ -369,7 +371,7 @@ && pointInEar(p.getX(), p.getY(), hy < my ? hx : qx, hy, mx, my, hy < my ? qx : } /** Check if the provided vertex is in the polygon and return it **/ - private static Node getSharedVertex(Node polygon, Node vertex) { + private static Node getSharedVertex(final Node polygon, final Node vertex) { Node next = polygon; do { if (isVertexEquals(next, vertex)) { @@ -417,10 +419,14 @@ private static final List earcutLinkedList(Object polygon, Node currEa // Determine whether the current triangle must be cut off. final boolean isReflex = area(prevNode.getX(), prevNode.getY(), currEar.getX(), currEar.getY(), nextNode.getX(), nextNode.getY()) >= 0; if (isReflex == false && isEar(currEar, mortonOptimized) == true) { + // Compute if edges belong to the polygon + boolean abFromPolygon = prevNode.isNextEdgeFromPolygon; + boolean bcFromPolygon = currEar.isNextEdgeFromPolygon; + boolean caFromPolygon = isEdgeFromPolygon(prevNode, nextNode, mortonOptimized); // Return the triangulated data - tessellation.add(new Triangle(prevNode, currEar, nextNode)); + tessellation.add(new Triangle(prevNode, abFromPolygon, currEar, bcFromPolygon, nextNode, caFromPolygon)); // Remove the ear node. - removeNode(currEar); + removeNode(currEar, caFromPolygon); // Skipping to the next node leaves fewer slither triangles. currEar = nextNode.next; @@ -439,7 +445,7 @@ private static final List earcutLinkedList(Object polygon, Node currEa continue earcut; case CURE: // if this didn't work, try curing all small self-intersections locally - currEar = cureLocalIntersections(currEar, tessellation); + currEar = cureLocalIntersections(currEar, tessellation, mortonOptimized); state = State.SPLIT; continue earcut; case SPLIT: @@ -531,7 +537,7 @@ && area(n.previous.getX(), n.previous.getY(), n.getX(), n.getY(), n.next.getX(), } /** Iterate through all polygon nodes and remove small local self-intersections **/ - private static final Node cureLocalIntersections(Node startNode, final List tessellation) { + private static final Node cureLocalIntersections(Node startNode, final List tessellation, final boolean mortonOptimized) { Node node = startNode; Node nextNode; do { @@ -544,12 +550,17 @@ private static final Node cureLocalIntersections(Node startNode, final List tessellation, final boolean mortonIndexed) { + private static final boolean splitEarcut(final Object polygon, final Node start, final List tessellation, final boolean mortonOptimized) { // Search for a valid diagonal that divides the polygon into two. Node searchNode = start; Node nextNode; @@ -569,17 +580,17 @@ private static final boolean splitEarcut(Object polygon, final Node start, final while (diagonal != searchNode.previous) { if(searchNode.idx != diagonal.idx && isValidDiagonal(searchNode, diagonal)) { // Split the polygon into two at the point of the diagonal - Node splitNode = splitPolygon(searchNode, diagonal); + Node splitNode = splitPolygon(searchNode, diagonal, isEdgeFromPolygon(searchNode, diagonal, mortonOptimized)); // Filter the resulting polygon. searchNode = filterPoints(searchNode, searchNode.next); splitNode = filterPoints(splitNode, splitNode.next); // Attempt to earcut both of the resulting polygons - if (mortonIndexed) { + if (mortonOptimized) { sortByMortonWithReset(searchNode); sortByMortonWithReset(splitNode); } - earcutLinkedList(polygon, searchNode, tessellation, State.INIT, mortonIndexed); - earcutLinkedList(polygon, splitNode, tessellation, State.INIT, mortonIndexed); + earcutLinkedList(polygon, searchNode, tessellation, State.INIT, mortonOptimized); + earcutLinkedList(polygon, splitNode, tessellation, State.INIT, mortonOptimized); // Finish the iterative search return true; } @@ -590,14 +601,120 @@ private static final boolean splitEarcut(Object polygon, final Node start, final return false; } + /** Computes if edge defined by a and b overlaps with a polygon edge **/ + private static boolean isEdgeFromPolygon(final Node a, final Node b, final boolean isMorton) { + if (isMorton) { + return isMortonEdgeFromPolygon(a, b); + } + Node next = a; + do { + if (isPointInLine(next, next.next, a) && isPointInLine(next, next.next, b)) { + return next.isNextEdgeFromPolygon; + } + if (isPointInLine(next, next.previous, a) && isPointInLine(next, next.previous, b)) { + return next.previous.isNextEdgeFromPolygon; + } + next = next.next; + } while(next != a); + return false; + } + + /** Uses morton code for speed to determine whether or not and edge defined by a and b overlaps with a polygon edge */ + private static final boolean isMortonEdgeFromPolygon(final Node a, final Node b) { + // edge bbox (flip the bits so negative encoded values are < positive encoded values) + final int minTX = StrictMath.min(a.x, b.x) ^ 0x80000000; + final int minTY = StrictMath.min(a.y, b.y) ^ 0x80000000; + final int maxTX = StrictMath.max(a.x, b.x) ^ 0x80000000; + final int maxTY = StrictMath.max(a.y, b.y) ^ 0x80000000; + + // z-order range for the current edge; + final long minZ = BitUtil.interleave(minTX, minTY); + final long maxZ = BitUtil.interleave(maxTX, maxTY); + + // now make sure we don't have other points inside the potential ear; + + // look for points inside edge in both directions + Node p = a.previousZ; + Node n = a.nextZ; + while (p != null && Long.compareUnsigned(p.morton, minZ) >= 0 + && n != null && Long.compareUnsigned(n.morton, maxZ) <= 0) { + if (isPointInLine(p, p.next, a) && isPointInLine(p, p.next, b)) { + return p.isNextEdgeFromPolygon; + } + if (isPointInLine(p, p.previous, a) && isPointInLine(p, p.previous, b)) { + return p.previous.isNextEdgeFromPolygon; + } + + p = p.previousZ; + + if (isPointInLine(n, n.next, a) && isPointInLine(n, n.next, b)) { + return n.isNextEdgeFromPolygon; + } + if (isPointInLine(n, n.previous, a) && isPointInLine(n, n.previous, b)) { + return n.previous.isNextEdgeFromPolygon; + } + + n = n.nextZ; + } + + // first look for points inside the edge in decreasing z-order + while (p != null && Long.compareUnsigned(p.morton, minZ) >= 0) { + if (isPointInLine(p, p.next, a) && isPointInLine(p, p.next, b)) { + return p.isNextEdgeFromPolygon; + } + if (isPointInLine(p, p.previous, a) && isPointInLine(p, p.previous, b)) { + return p.previous.isNextEdgeFromPolygon; + } + p = p.previousZ; + } + // then look for points in increasing z-order + while (n != null && + Long.compareUnsigned(n.morton, maxZ) <= 0) { + if (isPointInLine(n, n.next, a) && isPointInLine(n, n.next, b)) { + return n.isNextEdgeFromPolygon; + } + if (isPointInLine(n, n.previous, a) && isPointInLine(n, n.previous, b)) { + return n.previous.isNextEdgeFromPolygon; + } + n = n.nextZ; + } + return false; + } + + private static boolean isPointInLine(final Node a, final Node b, final Node point) { + return isPointInLine(a, b, point.getX(), point.getY()); + } + + private static boolean isPointInLine(final Node a, final Node b, final double lon, final double lat) { + final double dxc = lon - a.getX(); + final double dyc = lat - a.getY(); + + final double dxl = b.getX() - a.getX(); + final double dyl = b.getY() - a.getY(); + + if (dxc * dyl - dyc * dxl == 0) { + if (Math.abs(dxl) >= Math.abs(dyl)) { + return dxl > 0 ? + a.getX() <= lon && lon <= b.getX() : + b.getX() <= lon && lon <= a.getX(); + } else { + return dyl > 0 ? + a.getY() <= lat && lat <= b.getY() : + b.getY() <= lat && lat <= a.getY(); + } + } + return false; + } + /** Links two polygon vertices using a bridge. **/ - private static final Node splitPolygon(final Node a, final Node b) { + private static final Node splitPolygon(final Node a, final Node b, boolean edgeFromPolygon) { final Node a2 = new Node(a); final Node b2 = new Node(b); final Node an = a.next; final Node bp = b.previous; a.next = b; + a.isNextEdgeFromPolygon = edgeFromPolygon; a.nextZ = b; b.previous = a; b.previousZ = a; @@ -606,6 +723,7 @@ private static final Node splitPolygon(final Node a, final Node b) { an.previous = a2; an.previousZ = a2; b2.next = a2; + b2.isNextEdgeFromPolygon = edgeFromPolygon; b2.nextZ = a2; a2.previous = b2; a2.previousZ = b2; @@ -628,7 +746,7 @@ && isLocallyInside(a, b) && isLocallyInside(b, a) } /** Determine whether the polygon defined between node start and node end is CW */ - private static boolean isCWPolygon(Node start, Node end) { + private static boolean isCWPolygon(final Node start, final Node end) { Node next = start; double windingSum = 0; do { @@ -796,10 +914,13 @@ private static final Node filterPoints(final Node start, Node end) { continueIteration = false; nextNode = node.next; prevNode = node.previous; - if (isVertexEquals(node, nextNode) - || area(prevNode.getX(), prevNode.getY(), node.getX(), node.getY(), nextNode.getX(), nextNode.getY()) == 0) { + //We can filter points when they are the same, if not and they are co-linear we can only + //remove it if both edges have the same value in .isNextEdgeFromPolygon + if (isVertexEquals(node, nextNode) || + (prevNode.isNextEdgeFromPolygon == node.isNextEdgeFromPolygon && + area(prevNode.getX(), prevNode.getY(), node.getX(), node.getY(), nextNode.getX(), nextNode.getY()) == 0)) { // Remove the node - removeNode(node); + removeNode(node, prevNode.isNextEdgeFromPolygon); node = end = prevNode; if (node == nextNode) { @@ -835,9 +956,10 @@ private static final Node insertNode(final double[] x, final double[] y, int ind } /** Removes a node from the doubly linked list */ - private static final void removeNode(Node node) { + private static final void removeNode(Node node, boolean edgeFromPolygon) { node.next.previous = node.previous; node.previous.next = node.next; + node.previous.isNextEdgeFromPolygon = edgeFromPolygon; if (node.previousZ != null) { node.previousZ.nextZ = node.nextZ; @@ -873,13 +995,23 @@ private static boolean pointInEar(final double x, final double y, final double a /** compute whether the given x, y point is in a triangle; uses the winding order method */ public static boolean pointInTriangle (double x, double y, double ax, double ay, double bx, double by, double cx, double cy) { - int a = orient(x, y, ax, ay, bx, by); - int b = orient(x, y, bx, by, cx, cy); - if (a == 0 || b == 0 || a < 0 == b < 0) { - int c = orient(x, y, cx, cy, ax, ay); - return c == 0 || (c < 0 == (b < 0 || a < 0)); + double minX = StrictMath.min(ax, StrictMath.min(bx, cx)); + double minY = StrictMath.min(ay, StrictMath.min(by, cy)); + double maxX = StrictMath.max(ax, StrictMath.max(bx, cx)); + double maxY = StrictMath.max(ay, StrictMath.max(by, cy)); + //check the bounding box because if the triangle is degenerated, e.g points and lines, we need to filter out + //coplanar points that are not part of the triangle. + if (x >= minX && x <= maxX && y >= minY && y <= maxY ) { + int a = orient(x, y, ax, ay, bx, by); + int b = orient(x, y, bx, by, cx, cy); + if (a == 0 || b == 0 || a < 0 == b < 0) { + int c = orient(x, y, cx, cy, ax, ay); + return c == 0 || (c < 0 == (b < 0 || a < 0)); + } + return false; + } else { + return false; } - return false; } /** Brute force compute if a point is in the polygon by traversing entire triangulation @@ -901,8 +1033,7 @@ protected static class Node { private final int idx; // vertex index in the polygon private final int vrtxIdx; - // reference to the polygon for lat/lon values -// private final Polygon polygon; + // reference to the polygon for lat/lon values; private final double[] polyX; private final double[] polyY; // encoded x value @@ -920,6 +1051,8 @@ protected static class Node { private Node previousZ; // next z node private Node nextZ; + // if the edge from this node to the next node is part of the polygon edges + private boolean isNextEdgeFromPolygon; protected Node(final double[] x, final double[] y, final int index, final int vertexIndex, final boolean isGeo) { this.idx = index; @@ -933,6 +1066,7 @@ protected Node(final double[] x, final double[] y, final int index, final int ve this.next = null; this.previousZ = null; this.nextZ = null; + this.isNextEdgeFromPolygon = true; } /** simple deep copy constructor */ @@ -948,6 +1082,7 @@ protected Node(Node other) { this.next = other.next; this.previousZ = other.previousZ; this.nextZ = other.nextZ; + this.isNextEdgeFromPolygon = other.isNextEdgeFromPolygon; } /** get the x value */ @@ -979,9 +1114,11 @@ public String toString() { /** Triangle in the tessellated mesh */ public final static class Triangle { Node[] vertex; + boolean[] edgeFromPolygon; - protected Triangle(Node a, Node b, Node c) { + protected Triangle(Node a, boolean isABfromPolygon, Node b, boolean isBCfromPolygon, Node c, boolean isCAfromPolygon) { this.vertex = new Node[] {a, b, c}; + this.edgeFromPolygon = new boolean[] {isABfromPolygon, isBCfromPolygon, isCAfromPolygon}; } /** get quantized x value for the given vertex */ @@ -1004,6 +1141,11 @@ public double getX(int vertex) { return this.vertex[vertex].getX(); } + /** get if edge is shared with the polygon for the given edge */ + public boolean isEdgefromPolygon(int startVertex) { + return edgeFromPolygon[startVertex]; + } + /** utility method to compute whether the point is in the triangle */ protected boolean containsPoint(double lat, double lon) { return pointInTriangle(lon, lat, @@ -1014,9 +1156,9 @@ protected boolean containsPoint(double lat, double lon) { /** pretty print the triangle vertices */ public String toString() { - String result = vertex[0].x + ", " + vertex[0].y + " " + - vertex[1].x + ", " + vertex[1].y + " " + - vertex[2].x + ", " + vertex[2].y; + String result = vertex[0].x + ", " + vertex[0].y + " [" + edgeFromPolygon[0] + "] " + + vertex[1].x + ", " + vertex[1].y + " [" + edgeFromPolygon[1] + "] " + + vertex[2].x + ", " + vertex[2].y + " [" + edgeFromPolygon[2] + "]"; return result; } } diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/BM25FQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/BM25FQuery.java index 86757daa0889..ebaf343a4519 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/search/BM25FQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/search/BM25FQuery.java @@ -281,9 +281,10 @@ class BM25FWeight extends Weight { termStates = new TermStates[fieldTerms.length]; for (int i = 0; i < termStates.length; i++) { FieldAndWeight field = fieldAndWeights.get(fieldTerms[i].field()); - termStates[i] = TermStates.build(searcher.getTopReaderContext(), fieldTerms[i], true); - TermStatistics termStats = searcher.termStatistics(fieldTerms[i], termStates[i]); - if (termStats != null) { + TermStates ts = TermStates.build(searcher.getTopReaderContext(), fieldTerms[i], true); + termStates[i] = ts; + if (ts.docFreq() > 0) { + TermStatistics termStats = searcher.termStatistics(fieldTerms[i], ts.docFreq(), ts.totalTermFreq()); docFreq = Math.max(termStats.docFreq(), docFreq); totalTermFreq += (double) field.weight * termStats.totalTermFreq(); } diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/MultiRangeQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/MultiRangeQuery.java index 1bb4f4b1bf95..988c20b15cae 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/search/MultiRangeQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/search/MultiRangeQuery.java @@ -279,7 +279,7 @@ public Scorer get(long leadCost) throws IOException { public long cost() { if (cost == -1) { // Computing the cost may be expensive, so only do it if necessary - cost = values.estimatePointCount(visitor) * rangeClauses.size(); + cost = values.estimateDocCount(visitor) * rangeClauses.size(); assert cost >= 0; } return cost; diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java index 95a364e43a40..3e18e9b51af0 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java @@ -361,9 +361,9 @@ public TermAutomatonWeight(Automaton automaton, IndexSearcher searcher, Map ent : idToTerm.entrySet()) { Integer termID = ent.getKey(); if (ent.getValue() != null) { - TermStatistics stats = searcher.termStatistics(new Term(field, ent.getValue()), termStates.get(termID)); - if (stats != null) { - allTermStats.add(stats); + TermStates ts = termStates.get(termID); + if (ts.docFreq() > 0) { + allTermStats.add(searcher.termStatistics(new Term(field, ent.getValue()), ts.docFreq(), ts.totalTermFreq())); } } } diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java b/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java index 0ca3563bb40b..5095bdc0403c 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java @@ -264,18 +264,18 @@ protected double quantizeLonCeil(double rawLon) { } @Override - double[] quantizeTriangle(double ax, double ay, double bx, double by, double cx, double cy) { - int[] decoded = encodeDecodeTriangle(ax, ay, bx, by, cx, cy); - return new double[]{decodeLatitude(decoded[0]), decodeLongitude(decoded[1]), decodeLatitude(decoded[2]), decodeLongitude(decoded[3]), decodeLatitude(decoded[4]), decodeLongitude(decoded[5])}; + double[] quantizeTriangle(double ax, double ay, boolean ab, double bx, double by, boolean bc, double cx, double cy, boolean ca) { + ShapeField.DecodedTriangle decoded = encodeDecodeTriangle(ax, ay, ab, bx, by, bc, cx, cy, ca); + return new double[]{decodeLatitude(decoded.aY), decodeLongitude(decoded.aX), decodeLatitude(decoded.bY), decodeLongitude(decoded.bX), decodeLatitude(decoded.cY), decodeLongitude(decoded.cX)}; } @Override - int[] encodeDecodeTriangle(double ax, double ay, double bx, double by, double cx, double cy) { + ShapeField.DecodedTriangle encodeDecodeTriangle(double ax, double ay, boolean ab, double bx, double by, boolean bc, double cx, double cy, boolean ca) { byte[] encoded = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(encoded, encodeLatitude(ay), encodeLongitude(ax), encodeLatitude(by), encodeLongitude(bx), encodeLatitude(cy), encodeLongitude(cx)); - int[] decoded = new int[6]; - ShapeField.decodeTriangle(encoded, decoded); - return decoded; + ShapeField.encodeTriangle(encoded, encodeLatitude(ay), encodeLongitude(ax), ab, encodeLatitude(by), encodeLongitude(bx), bc, encodeLatitude(cy), encodeLongitude(cx), ca); + ShapeField.DecodedTriangle triangle = new ShapeField.DecodedTriangle(); + ShapeField.decodeTriangle(encoded, triangle); + return triangle; } }; } diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/BaseShapeEncodingTestCase.java b/lucene/sandbox/src/test/org/apache/lucene/document/BaseShapeEncodingTestCase.java index daa9bacb3800..5d7579f5f1bc 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/BaseShapeEncodingTestCase.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/BaseShapeEncodingTestCase.java @@ -16,8 +16,6 @@ */ package org.apache.lucene.document; -import java.util.Arrays; - import org.apache.lucene.geo.GeoUtils; import org.apache.lucene.geo.Polygon2D; import org.apache.lucene.index.PointValues; @@ -55,15 +53,15 @@ public void testPolygonEncodingMinLatMinLon() { int cxEnc = encodeX(cx); verifyEncodingPermutations(ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == cyEnc); - assertTrue(encoded[5] == cxEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, cyEnc); + assertEquals(encoded.cX, cxEnc); } //One shared point with MBR -> MinLat, MaxLon @@ -82,15 +80,15 @@ public void testPolygonEncodingMinLatMaxLon() { int cxEnc = encodeX(cx); verifyEncodingPermutations(ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == cyEnc); - assertTrue(encoded[5] == cxEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, cyEnc); + assertEquals(encoded.cX, cxEnc); } //One shared point with MBR -> MaxLat, MaxLon @@ -109,15 +107,15 @@ public void testPolygonEncodingMaxLatMaxLon() { int cxEnc = encodeX(blon); verifyEncodingPermutations(ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == cyEnc); - assertTrue(encoded[5] == cxEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, cyEnc); + assertEquals(encoded.cX, cxEnc); } //One shared point with MBR -> MaxLat, MinLon @@ -136,15 +134,15 @@ public void testPolygonEncodingMaxLatMinLon() { int cxEnc = encodeX(blon); verifyEncodingPermutations(ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == cyEnc); - assertTrue(encoded[5] == cxEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, cyEnc); + assertEquals(encoded.cX, cxEnc); } //Two shared point with MBR -> [MinLat, MinLon], [MaxLat, MaxLon], third point below @@ -163,15 +161,15 @@ public void testPolygonEncodingMinLatMinLonMaxLatMaxLonBelow() { int cxEnc = encodeX(cx); verifyEncodingPermutations(ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == cyEnc); - assertTrue(encoded[5] == cxEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, cyEnc); + assertEquals(encoded.cX, cxEnc); } //Two shared point with MBR -> [MinLat, MinLon], [MaxLat, MaxLon], third point above @@ -190,15 +188,15 @@ public void testPolygonEncodingMinLatMinLonMaxLatMaxLonAbove() { int cxEnc = encodeX(cx); verifyEncodingPermutations(ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == cyEnc); - assertTrue(encoded[5] == cxEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, cyEnc); + assertEquals(encoded.cX, cxEnc); } //Two shared point with MBR -> [MinLat, MaxLon], [MaxLat, MinLon], third point below @@ -217,15 +215,15 @@ public void testPolygonEncodingMinLatMaxLonMaxLatMinLonBelow() { int cxEnc = encodeX(cx); verifyEncodingPermutations(ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == cyEnc); - assertTrue(encoded[5] == cxEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, cyEnc); + assertEquals(encoded.cX, cxEnc); } //Two shared point with MBR -> [MinLat, MaxLon], [MaxLat, MinLon], third point above @@ -244,15 +242,15 @@ public void testPolygonEncodingMinLatMaxLonMaxLatMinLonAbove() { int cxEnc = encodeX(cx); verifyEncodingPermutations(ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == cyEnc); - assertTrue(encoded[5] == cxEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, cyEnc); + assertEquals(encoded.cX, cxEnc); } //all points shared with MBR @@ -271,15 +269,15 @@ public void testPolygonEncodingAllSharedAbove() { int cxEnc = encodeX(cx); verifyEncodingPermutations(ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == cyEnc); - assertTrue(encoded[5] == cxEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, cyEnc); + assertEquals(encoded.cX, cxEnc); } //all points shared with MBR @@ -297,15 +295,15 @@ public void testPolygonEncodingAllSharedBelow() { int cyEnc = encodeY(cy); int cxEnc = encodeX(cx); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == cyEnc); - assertTrue(encoded[5] == cxEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, cyEnc); + assertEquals(encoded.cX, cxEnc); } //[a,b,c] == [c,a,b] == [b,c,a] == [c,b,a] == [b,a,c] == [a,c,b] @@ -314,34 +312,34 @@ public void verifyEncodingPermutations(int ayEnc, int axEnc, int byEnc, int bxEn assertTrue(GeoUtils.orient(ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc) != 0); byte[] b = new byte[7 * ShapeField.BYTES]; //[a,b,c] - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encodedABC = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, false); + ShapeField.DecodedTriangle encodedABC = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encodedABC); //[c,a,b] - ShapeField.encodeTriangle(b, cyEnc, cxEnc, ayEnc, axEnc, byEnc, bxEnc); - int[] encodedCAB = new int[6]; + ShapeField.encodeTriangle(b, cyEnc, cxEnc, false, ayEnc, axEnc, true, byEnc, bxEnc, true); + ShapeField.DecodedTriangle encodedCAB = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encodedCAB); - assertTrue(Arrays.equals(encodedABC, encodedCAB)); + assertEquals(encodedABC, encodedCAB); //[b,c,a] - ShapeField.encodeTriangle(b, byEnc, bxEnc, cyEnc, cxEnc, ayEnc, axEnc); - int[] encodedBCA = new int[6]; + ShapeField.encodeTriangle(b, byEnc, bxEnc, true, cyEnc, cxEnc, false, ayEnc, axEnc, true); + ShapeField.DecodedTriangle encodedBCA = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encodedBCA); - assertTrue(Arrays.equals(encodedABC, encodedBCA)); + assertEquals(encodedABC, encodedBCA); //[c,b,a] - ShapeField.encodeTriangle(b, cyEnc, cxEnc, byEnc, bxEnc, ayEnc, axEnc); - int[] encodedCBA= new int[6]; + ShapeField.encodeTriangle(b, cyEnc, cxEnc, true, byEnc, bxEnc, true, ayEnc, axEnc, false); + ShapeField.DecodedTriangle encodedCBA= new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encodedCBA); - assertTrue(Arrays.equals(encodedABC, encodedCBA)); + assertEquals(encodedABC, encodedCBA); //[b,a,c] - ShapeField.encodeTriangle(b, byEnc, bxEnc, ayEnc, axEnc, cyEnc, cxEnc); - int[] encodedBAC= new int[6]; + ShapeField.encodeTriangle(b, byEnc, bxEnc, true, ayEnc, axEnc, false, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encodedBAC= new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encodedBAC); - assertTrue(Arrays.equals(encodedABC, encodedBAC)); + assertEquals(encodedABC, encodedBAC); //[a,c,b] - ShapeField.encodeTriangle(b, ayEnc, axEnc, cyEnc, cxEnc, byEnc, bxEnc); - int[] encodedACB= new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, false, cyEnc, cxEnc, true, byEnc, bxEnc, true); + ShapeField.DecodedTriangle encodedACB= new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encodedACB); - assertTrue(Arrays.equals(encodedABC, encodedACB)); + assertEquals(encodedABC, encodedACB); } public void testPointEncoding() { @@ -350,11 +348,15 @@ public void testPointEncoding() { int latEnc = encodeY(lat); int lonEnc = encodeX(lon); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, latEnc, lonEnc, latEnc, lonEnc, latEnc, lonEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, latEnc, lonEnc, true, latEnc, lonEnc, true, latEnc, lonEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == latEnc && encoded[2] == latEnc && encoded[4] == latEnc); - assertTrue(encoded[1] == lonEnc && encoded[3] == lonEnc && encoded[5] == lonEnc); + assertEquals(encoded.aY, latEnc); + assertEquals(encoded.aX, lonEnc); + assertEquals(encoded.bY, latEnc); + assertEquals(encoded.bX, lonEnc); + assertEquals(encoded.cY, latEnc); + assertEquals(encoded.cX, lonEnc); } public void testLineEncodingSameLat() { @@ -365,33 +367,31 @@ public void testLineEncodingSameLat() { int axEnc = encodeX(ax); int bxEnc = encodeX(bx); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, latEnc, axEnc, latEnc, bxEnc, latEnc, axEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, latEnc, axEnc, true, latEnc, bxEnc, true, latEnc, axEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == latEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == latEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == latEnc); - assertTrue(encoded[5] == axEnc); - ShapeField.encodeTriangle(b, latEnc, axEnc, latEnc, axEnc, latEnc, bxEnc); - encoded = new int[6]; + assertEquals(encoded.aY, latEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, latEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, latEnc); + assertEquals(encoded.cX, axEnc); + ShapeField.encodeTriangle(b, latEnc, axEnc, true, latEnc, axEnc, true, latEnc, bxEnc, true); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == latEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == latEnc); - assertTrue(encoded[3] == axEnc); - assertTrue(encoded[4] == latEnc); - assertTrue(encoded[5] == bxEnc); - ShapeField.encodeTriangle(b, latEnc, bxEnc, latEnc, axEnc, latEnc, axEnc); - encoded = new int[6]; + assertEquals(encoded.aY, latEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, latEnc); + assertEquals(encoded.bX, axEnc); + assertEquals(encoded.cY, latEnc); + assertEquals(encoded.cX, bxEnc); + ShapeField.encodeTriangle(b, latEnc, bxEnc, true, latEnc, axEnc, true, latEnc, axEnc, true); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == latEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == latEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == latEnc); - assertTrue(encoded[5] == axEnc); + assertEquals(encoded.aY, latEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, latEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, latEnc); + assertEquals(encoded.cX, axEnc); } public void testLineEncodingSameLon() { @@ -402,33 +402,31 @@ public void testLineEncodingSameLon() { int byEnc = encodeY(by); int lonEnc = encodeX(lon); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, lonEnc, byEnc, lonEnc, ayEnc, lonEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, lonEnc, true, byEnc, lonEnc, true, ayEnc, lonEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == lonEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == lonEnc); - assertTrue(encoded[4] == ayEnc); - assertTrue(encoded[5] == lonEnc); - ShapeField.encodeTriangle(b, ayEnc, lonEnc, ayEnc, lonEnc, byEnc, lonEnc); - encoded = new int[6]; + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, lonEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, lonEnc); + assertEquals(encoded.cY, ayEnc); + assertEquals(encoded.cX, lonEnc); + ShapeField.encodeTriangle(b, ayEnc, lonEnc, true, ayEnc, lonEnc, true, byEnc, lonEnc, true); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == lonEnc); - assertTrue(encoded[2] == ayEnc); - assertTrue(encoded[3] == lonEnc); - assertTrue(encoded[4] == byEnc); - assertTrue(encoded[5] == lonEnc); - ShapeField.encodeTriangle(b, byEnc, lonEnc, ayEnc, lonEnc, ayEnc, lonEnc); - encoded = new int[6]; + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, lonEnc); + assertEquals(encoded.bY, ayEnc); + assertEquals(encoded.bX, lonEnc); + assertEquals(encoded.cY, byEnc); + assertEquals(encoded.cX, lonEnc); + ShapeField.encodeTriangle(b, byEnc, lonEnc, true, ayEnc, lonEnc, true, ayEnc, lonEnc, true); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == lonEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == lonEnc); - assertTrue(encoded[4] == ayEnc); - assertTrue(encoded[5] == lonEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, lonEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, lonEnc); + assertEquals(encoded.cY, ayEnc); + assertEquals(encoded.cX, lonEnc); } public void testLineEncoding() { @@ -441,33 +439,31 @@ public void testLineEncoding() { int axEnc = encodeX(ax); int bxEnc = encodeX(bx); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, ayEnc, axEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, ayEnc, axEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == ayEnc); - assertTrue(encoded[5] == axEnc); - ShapeField.encodeTriangle(b, ayEnc, axEnc, ayEnc, axEnc, byEnc, bxEnc); - encoded = new int[6]; + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, ayEnc); + assertEquals(encoded.cX, axEnc); + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, ayEnc, axEnc, true, byEnc, bxEnc, true); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == ayEnc); - assertTrue(encoded[3] == axEnc); - assertTrue(encoded[4] == byEnc); - assertTrue(encoded[5] == bxEnc); - ShapeField.encodeTriangle(b, byEnc, bxEnc, ayEnc, axEnc, ayEnc, axEnc); - encoded = new int[6]; + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, ayEnc); + assertEquals(encoded.bX, axEnc); + assertEquals(encoded.cY, byEnc); + assertEquals(encoded.cX, bxEnc); + ShapeField.encodeTriangle(b, byEnc, bxEnc, true, ayEnc, axEnc, true, ayEnc, axEnc, true); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == ayEnc); - assertTrue(encoded[1] == axEnc); - assertTrue(encoded[2] == byEnc); - assertTrue(encoded[3] == bxEnc); - assertTrue(encoded[4] == ayEnc); - assertTrue(encoded[5] == axEnc); + assertEquals(encoded.aY, ayEnc); + assertEquals(encoded.aX, axEnc); + assertEquals(encoded.bY, byEnc); + assertEquals(encoded.bX, bxEnc); + assertEquals(encoded.cY, ayEnc); + assertEquals(encoded.cX, axEnc); } public void testRandomPointEncoding() { @@ -505,16 +501,16 @@ private void verifyEncoding(double ay, double ax, double by, double bx, double c //quantize the triangle byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, original[0], original[1], original[2], original[3], original[4], original[5]); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, original[0], original[1], true, original[2], original[3], true, original[4], original[5], true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); double[] encodedQuantize = new double[] { - decodeY(encoded[0]), - decodeX(encoded[1]), - decodeY(encoded[2]), - decodeX(encoded[3]), - decodeY(encoded[4]), - decodeX(encoded[5])}; + decodeY(encoded.aY), + decodeX(encoded.aX), + decodeY(encoded.bY), + decodeX(encoded.bX), + decodeY(encoded.cY), + decodeX(encoded.cX)}; int orientation = GeoUtils.orient(original[1], original[0], original[3], original[2], original[5], original[4]); //quantize original @@ -560,14 +556,14 @@ public void testDegeneratedTriangle() { int cyEnc = encodeY(cy); int cxEnc = encodeX(cx); byte[] b = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(b, ayEnc, axEnc, byEnc, bxEnc, cyEnc, cxEnc); - int[] encoded = new int[6]; + ShapeField.encodeTriangle(b, ayEnc, axEnc, true, byEnc, bxEnc, true, cyEnc, cxEnc, true); + ShapeField.DecodedTriangle encoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(b, encoded); - assertTrue(encoded[0] == byEnc); - assertTrue(encoded[1] == bxEnc); - assertTrue(encoded[2] == cyEnc); - assertTrue(encoded[3] == cxEnc); - assertTrue(encoded[4] == ayEnc); - assertTrue(encoded[5] == axEnc); + assertTrue(encoded.aY == byEnc); + assertTrue(encoded.aX == bxEnc); + assertTrue(encoded.bY == cyEnc); + assertTrue(encoded.bX == cxEnc); + assertTrue(encoded.cY == ayEnc); + assertTrue(encoded.cX == axEnc); } } diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/BaseShapeTestCase.java b/lucene/sandbox/src/test/org/apache/lucene/document/BaseShapeTestCase.java index 1e4e173dcdd4..a7f53feaa315 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/BaseShapeTestCase.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/BaseShapeTestCase.java @@ -545,8 +545,8 @@ protected static abstract class Encoder { abstract double quantizeXCeil(double raw); abstract double quantizeY(double raw); abstract double quantizeYCeil(double raw); - abstract double[] quantizeTriangle(double ax, double ay, double bx, double by, double cx, double cy); - abstract int[] encodeDecodeTriangle(double ax, double ay, double bx, double by, double cx, double cy); + abstract double[] quantizeTriangle(double ax, double ay, boolean ab, double bx, double by, boolean bc, double cx, double cy, boolean ca); + abstract ShapeField.DecodedTriangle encodeDecodeTriangle(double ax, double ay, boolean ab, double bx, double by, boolean bc, double cx, double cy, boolean ca); } private int scaledIterationCount(int shapes) { diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/BaseXYShapeTestCase.java b/lucene/sandbox/src/test/org/apache/lucene/document/BaseXYShapeTestCase.java index b706b5f20cdc..0ef15c5983b2 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/BaseXYShapeTestCase.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/BaseXYShapeTestCase.java @@ -144,18 +144,18 @@ protected Encoder getEncoder() { } @Override - double[] quantizeTriangle(double ax, double ay, double bx, double by, double cx, double cy) { - int[] decoded = encodeDecodeTriangle(ax, ay, bx, by, cx, cy); - return new double[]{decode(decoded[0]), decode(decoded[1]), decode(decoded[2]), decode(decoded[3]), decode(decoded[4]), decode(decoded[5])}; + double[] quantizeTriangle(double ax, double ay, boolean ab, double bx, double by, boolean bc, double cx, double cy, boolean ca) { + ShapeField.DecodedTriangle decoded = encodeDecodeTriangle(ax, ay, ab, bx, by, bc, cx, cy, ca); + return new double[]{decode(decoded.aY), decode(decoded.aX), decode(decoded.bY), decode(decoded.bX), decode(decoded.cY), decode(decoded.cX)}; } @Override - int[] encodeDecodeTriangle(double ax, double ay, double bx, double by, double cx, double cy) { + ShapeField.DecodedTriangle encodeDecodeTriangle(double ax, double ay, boolean ab, double bx, double by, boolean bc, double cx, double cy, boolean ca) { byte[] encoded = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(encoded, encode(ay), encode(ax), encode(by), encode(bx), encode(cy), encode(cx)); - int[] decoded = new int[6]; - ShapeField.decodeTriangle(encoded, decoded); - return decoded; + ShapeField.encodeTriangle(encoded, encode(ay), encode(ax), ab, encode(by), encode(bx), bc, encode(cy), encode(cx), ca); + ShapeField.DecodedTriangle triangle = new ShapeField.DecodedTriangle(); + ShapeField.decodeTriangle(encoded, triangle); + return triangle; } }; } diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java index 335ad1726ef7..8379326cfae2 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java @@ -188,7 +188,7 @@ public void testNearestNeighborRandom() throws Exception { FloatPointNearestNeighbor.NearestHit[] expectedHits = new FloatPointNearestNeighbor.NearestHit[numPoints]; for (int id = 0 ; id < numPoints ; ++id) { FloatPointNearestNeighbor.NearestHit hit = new FloatPointNearestNeighbor.NearestHit(); - hit.distanceSquared = FloatPointNearestNeighbor.euclideanDistanceSquared(origin, values[id]); + hit.distanceSquared = euclideanDistanceSquared(origin, values[id]); hit.docID = id; expectedHits[id] = hit; } @@ -232,6 +232,15 @@ public void testNearestNeighborRandom() throws Exception { dir.close(); } + private static double euclideanDistanceSquared(float[] a, float[] b) { + double sumOfSquaredDifferences = 0.0d; + for (int d = 0 ; d < a.length ; ++d) { + double diff = (double)a[d] - (double)b[d]; + sumOfSquaredDifferences += diff * diff; + } + return sumOfSquaredDifferences; + } + private IndexWriterConfig getIndexWriterConfig() { IndexWriterConfig iwc = newIndexWriterConfig(); iwc.setCodec(Codec.forName("Lucene80")); diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java index d7ed52946d1d..fa31b00071e9 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java @@ -83,13 +83,13 @@ public boolean testBBoxQuery(double minLat, double maxLat, double minLon, double Line line = (Line)shape; Rectangle2D rectangle2D = Rectangle2D.create(new Rectangle(minLat, maxLat, minLon, maxLon)); for (int i = 0, j = 1; j < line.numPoints(); ++i, ++j) { - int[] decoded = encoder.encodeDecodeTriangle(line.getLon(i), line.getLat(i), line.getLon(j), line.getLat(j), line.getLon(i), line.getLat(i)); + ShapeField.DecodedTriangle decoded = encoder.encodeDecodeTriangle(line.getLon(i), line.getLat(i), true, line.getLon(j), line.getLat(j), true, line.getLon(i), line.getLat(i), true); if (queryRelation == QueryRelation.WITHIN) { - if (rectangle2D.containsTriangle(decoded[1], decoded[0], decoded[3], decoded[2], decoded[5], decoded[4]) == false) { + if (rectangle2D.containsTriangle(decoded.aX, decoded.aY, decoded.bX, decoded.bY, decoded.cX, decoded.cY) == false) { return false; } } else { - if (rectangle2D.intersectsTriangle(decoded[1], decoded[0], decoded[3], decoded[2], decoded[5], decoded[4]) == true) { + if (rectangle2D.intersectsTriangle(decoded.aX, decoded.aY, decoded.bX, decoded.bY, decoded.cX, decoded.cY) == true) { return queryRelation == QueryRelation.INTERSECTS; } } @@ -110,7 +110,7 @@ public boolean testPolygonQuery(Object poly2d, Object shape) { private boolean testLine(EdgeTree queryPoly, Line line) { for (int i = 0, j = 1; j < line.numPoints(); ++i, ++j) { - double[] qTriangle = encoder.quantizeTriangle(line.getLon(i), line.getLat(i), line.getLon(j), line.getLat(j), line.getLon(i), line.getLat(i)); + double[] qTriangle = encoder.quantizeTriangle(line.getLon(i), line.getLat(i), true, line.getLon(j), line.getLat(j), true, line.getLon(i), line.getLat(i), true); Relation r = queryPoly.relateTriangle(qTriangle[1], qTriangle[0], qTriangle[3], qTriangle[2], qTriangle[5], qTriangle[4]); if (queryRelation == QueryRelation.DISJOINT) { if (r != Relation.CELL_OUTSIDE_QUERY) return false; diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java index 8b3cab4edda5..8fdbf5c182c3 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java @@ -72,13 +72,15 @@ public boolean testBBoxQuery(double minLat, double maxLat, double minLon, double Rectangle2D rectangle2D = Rectangle2D.create(new Rectangle(minLat, maxLat, minLon, maxLon)); List tessellation = Tessellator.tessellate(p); for (Tessellator.Triangle t : tessellation) { - int[] decoded = encoder.encodeDecodeTriangle(t.getX(0), t.getY(0), t.getX(1), t.getY(1), t.getX(2), t.getY(2)); + ShapeField.DecodedTriangle decoded = encoder.encodeDecodeTriangle(t.getX(0), t.getY(0), t.isEdgefromPolygon(0), + t.getX(1), t.getY(1), t.isEdgefromPolygon(1), + t.getX(2), t.getY(2), t.isEdgefromPolygon(2)); if (queryRelation == QueryRelation.WITHIN) { - if (rectangle2D.containsTriangle(decoded[1], decoded[0], decoded[3], decoded[2], decoded[5], decoded[4]) == false) { + if (rectangle2D.containsTriangle(decoded.aX, decoded.aY, decoded.bX, decoded.bY, decoded.cX, decoded.cY) == false) { return false; } } else { - if (rectangle2D.intersectsTriangle(decoded[1], decoded[0], decoded[3], decoded[2], decoded[5], decoded[4]) == true) { + if (rectangle2D.intersectsTriangle(decoded.aX, decoded.aY, decoded.bX, decoded.bY, decoded.cX, decoded.cY) == true) { return queryRelation == QueryRelation.INTERSECTS; } } @@ -99,7 +101,9 @@ public boolean testPolygonQuery(Object query, Object shape) { private boolean testPolygon(EdgeTree tree, Polygon shape) { List tessellation = Tessellator.tessellate(shape); for (Tessellator.Triangle t : tessellation) { - double[] qTriangle = encoder.quantizeTriangle(t.getX(0), t.getY(0), t.getX(1), t.getY(1), t.getX(2), t.getY(2)); + double[] qTriangle = encoder.quantizeTriangle(t.getX(0), t.getY(0), t.isEdgefromPolygon(0), + t.getX(1), t.getY(1), t.isEdgefromPolygon(1), + t.getX(2), t.getY(2), t.isEdgefromPolygon(2)); Relation r = tree.relateTriangle(qTriangle[1], qTriangle[0], qTriangle[3], qTriangle[2], qTriangle[5], qTriangle[4]); if (queryRelation == QueryRelation.DISJOINT) { if (r != Relation.CELL_OUTSIDE_QUERY) return false; diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShape.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShape.java index 66948a42ebc7..22ae32c5ea88 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShape.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonShape.java @@ -236,12 +236,13 @@ public void testLUCENE8454() throws Exception { Tessellator.Triangle t = Tessellator.tessellate(poly).get(0); byte[] encoded = new byte[7 * ShapeField.BYTES]; - ShapeField.encodeTriangle(encoded, encodeLatitude(t.getY(0)), encodeLongitude(t.getX(0)), - encodeLatitude(t.getY(1)), encodeLongitude(t.getX(1)), encodeLatitude(t.getY(2)), encodeLongitude(t.getX(2))); - int[] decoded = new int[6]; + ShapeField.encodeTriangle(encoded, encodeLatitude(t.getY(0)), encodeLongitude(t.getX(0)), t.isEdgefromPolygon(0), + encodeLatitude(t.getY(1)), encodeLongitude(t.getX(1)), t.isEdgefromPolygon(1), + encodeLatitude(t.getY(2)), encodeLongitude(t.getX(2)), t.isEdgefromPolygon(2)); + ShapeField.DecodedTriangle decoded = new ShapeField.DecodedTriangle(); ShapeField.decodeTriangle(encoded, decoded); - int expected =rectangle2D.intersectsTriangle(decoded[1], decoded[0], decoded[3], decoded[2], decoded[5], decoded[4]) ? 0 : 1; + int expected =rectangle2D.intersectsTriangle(decoded.aX, decoded.aY, decoded.bX, decoded.bY, decoded.cX, decoded.cY) ? 0 : 1; Document document = new Document(); addPolygonsToDoc(FIELDNAME, document, poly); diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestXYLineShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestXYLineShapeQueries.java index c66b9d1e0792..5f91175c6049 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestXYLineShapeQueries.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestXYLineShapeQueries.java @@ -81,13 +81,13 @@ public boolean testBBoxQuery(double minY, double maxY, double minX, double maxX, XYLine line = (XYLine)shape; XYRectangle2D rectangle2D = XYRectangle2D.create(new XYRectangle(minX, maxX, minY, maxY)); for (int i = 0, j = 1; j < line.numPoints(); ++i, ++j) { - int[] decoded = encoder.encodeDecodeTriangle(line.getX(i), line.getY(i), line.getX(j), line.getY(j), line.getX(i), line.getY(i)); + ShapeField.DecodedTriangle decoded = encoder.encodeDecodeTriangle(line.getX(i), line.getY(i), true, line.getX(j), line.getY(j), true, line.getX(i), line.getY(i), true); if (queryRelation == QueryRelation.WITHIN) { - if (rectangle2D.containsTriangle(decoded[1], decoded[0], decoded[3], decoded[2], decoded[5], decoded[4]) == false) { + if (rectangle2D.containsTriangle(decoded.aX, decoded.aY, decoded.bX, decoded.bY, decoded.cX, decoded.cY) == false) { return false; } } else { - if (rectangle2D.intersectsTriangle(decoded[1], decoded[0], decoded[3], decoded[2], decoded[5], decoded[4]) == true) { + if (rectangle2D.intersectsTriangle(decoded.aX, decoded.aY, decoded.bX, decoded.bY, decoded.cX, decoded.cY) == true) { return queryRelation == QueryRelation.INTERSECTS; } } @@ -108,7 +108,7 @@ public boolean testPolygonQuery(Object poly2d, Object shape) { private boolean testLine(EdgeTree queryPoly, XYLine line) { for (int i = 0, j = 1; j < line.numPoints(); ++i, ++j) { - double[] qTriangle = encoder.quantizeTriangle(line.getX(i), line.getY(i), line.getX(j), line.getY(j), line.getX(i), line.getY(i)); + double[] qTriangle = encoder.quantizeTriangle(line.getX(i), line.getY(i), true, line.getX(j), line.getY(j), true, line.getX(i), line.getY(i), true); Relation r = queryPoly.relateTriangle(qTriangle[1], qTriangle[0], qTriangle[3], qTriangle[2], qTriangle[5], qTriangle[4]); if (queryRelation == QueryRelation.DISJOINT) { if (r != Relation.CELL_OUTSIDE_QUERY) return false; diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestXYPolygonShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestXYPolygonShapeQueries.java index 82d887aa2582..bbcc554f26a7 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestXYPolygonShapeQueries.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestXYPolygonShapeQueries.java @@ -72,13 +72,15 @@ public boolean testBBoxQuery(double minY, double maxY, double minX, double maxX, XYRectangle2D rectangle2D = XYRectangle2D.create(new XYRectangle(minX, maxX, minY, maxY)); List tessellation = Tessellator.tessellate(p); for (Tessellator.Triangle t : tessellation) { - int[] decoded = encoder.encodeDecodeTriangle(t.getX(0), t.getY(0), t.getX(1), t.getY(1), t.getX(2), t.getY(2)); + ShapeField.DecodedTriangle decoded = encoder.encodeDecodeTriangle(t.getX(0), t.getY(0), t.isEdgefromPolygon(0), + t.getX(1), t.getY(1), t.isEdgefromPolygon(1), + t.getX(2), t.getY(2), t.isEdgefromPolygon(2)); if (queryRelation == QueryRelation.WITHIN) { - if (rectangle2D.containsTriangle(decoded[1], decoded[0], decoded[3], decoded[2], decoded[5], decoded[4]) == false) { + if (rectangle2D.containsTriangle(decoded.aX, decoded.aY, decoded.bX, decoded.bY, decoded.cX, decoded.cY) == false) { return false; } } else { - if (rectangle2D.intersectsTriangle(decoded[1], decoded[0], decoded[3], decoded[2], decoded[5], decoded[4]) == true) { + if (rectangle2D.intersectsTriangle(decoded.aX, decoded.aY, decoded.bX, decoded.bY, decoded.cX, decoded.cY) == true) { return queryRelation == QueryRelation.INTERSECTS; } } @@ -99,7 +101,9 @@ public boolean testPolygonQuery(Object query, Object shape) { private boolean testPolygon(EdgeTree tree, XYPolygon shape) { List tessellation = Tessellator.tessellate(shape); for (Tessellator.Triangle t : tessellation) { - double[] qTriangle = encoder.quantizeTriangle(t.getX(0), t.getY(0), t.getX(1), t.getY(1), t.getX(2), t.getY(2)); + double[] qTriangle = encoder.quantizeTriangle(t.getX(0), t.getY(0), t.isEdgefromPolygon(0), + t.getX(1), t.getY(1), t.isEdgefromPolygon(1), + t.getX(2), t.getY(2), t.isEdgefromPolygon(2)); Relation r = tree.relateTriangle(qTriangle[1], qTriangle[0], qTriangle[3], qTriangle[2], qTriangle[5], qTriangle[4]); if (queryRelation == QueryRelation.DISJOINT) { if (r != Relation.CELL_OUTSIDE_QUERY) return false; diff --git a/lucene/sandbox/src/test/org/apache/lucene/geo/TestRectangle2D.java b/lucene/sandbox/src/test/org/apache/lucene/geo/TestRectangle2D.java index ef90c338420c..787a2a5bc0b9 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/geo/TestRectangle2D.java +++ b/lucene/sandbox/src/test/org/apache/lucene/geo/TestRectangle2D.java @@ -87,7 +87,13 @@ public void testRandomTriangles() { NumericUtils.intToSortableBytes(tMaxY, triangle, 2 * BYTES); NumericUtils.intToSortableBytes(tMaxX, triangle, 3 * BYTES); - PointValues.Relation r = rectangle2D.relateRangeBBox(BYTES, 0, triangle, 3 * BYTES, 2 * BYTES, triangle); + PointValues.Relation r; + if (random().nextBoolean()) { + r = rectangle2D.relateRangeBBox(BYTES, 0, triangle, 3 * BYTES, 2 * BYTES, triangle); + } else { + r = rectangle2D.intersectRangeBBox(BYTES, 0, triangle, 3 * BYTES, 2 * BYTES, triangle); + } + if (r == PointValues.Relation.CELL_OUTSIDE_QUERY) { assertFalse(rectangle2D.intersectsTriangle(ax, ay, bx, by , cx, cy)); assertFalse(rectangle2D.containsTriangle(ax, ay, bx, by , cx, cy)); @@ -97,4 +103,54 @@ else if (rectangle2D.containsTriangle(ax, ay, bx, by , cx, cy)) { } } } + + public void testIntersectOptimization() { + byte[] minTriangle = box(0, 0, 10, 5); + byte[] maxTriangle = box(20, 10, 30, 15); + + Rectangle2D rectangle2D = Rectangle2D.create(new Rectangle(-0.1, 30.1, -0.1, 15.1)); + assertEquals(PointValues.Relation.CELL_INSIDE_QUERY, + rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + assertEquals(PointValues.Relation.CELL_INSIDE_QUERY, + rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + + rectangle2D = Rectangle2D.create(new Rectangle(-0.1, 30.1, -0.1, 10.1)); + assertEquals(PointValues.Relation.CELL_INSIDE_QUERY, + rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + assertEquals(PointValues.Relation.CELL_CROSSES_QUERY, + rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + + rectangle2D = Rectangle2D.create(new Rectangle(-0.1, 30.1, 4.9, 15.1)); + assertEquals(PointValues.Relation.CELL_INSIDE_QUERY, + rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + assertEquals(PointValues.Relation.CELL_CROSSES_QUERY, + rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + + rectangle2D = Rectangle2D.create(new Rectangle(-0.1, 20.1, -0.1, 15.1)); + assertEquals(PointValues.Relation.CELL_INSIDE_QUERY, + rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + assertEquals(PointValues.Relation.CELL_CROSSES_QUERY, + rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + + rectangle2D = Rectangle2D.create(new Rectangle(9.9, 30.1, -0.1, 15.1)); + assertEquals(PointValues.Relation.CELL_INSIDE_QUERY, + rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + assertEquals(PointValues.Relation.CELL_CROSSES_QUERY, + rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + + rectangle2D = Rectangle2D.create(new Rectangle(5, 25, 3, 13)); + assertEquals(PointValues.Relation.CELL_CROSSES_QUERY, + rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + assertEquals(PointValues.Relation.CELL_CROSSES_QUERY, + rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle)); + } + + private byte[] box(int minY, int minX, int maxY, int maxX) { + byte[] bytes = new byte[4 * BYTES]; + NumericUtils.intToSortableBytes(GeoEncodingUtils.encodeLatitude(minY), bytes, 0); // min y + NumericUtils.intToSortableBytes(GeoEncodingUtils.encodeLongitude(minX), bytes, BYTES); // min x + NumericUtils.intToSortableBytes(GeoEncodingUtils.encodeLatitude(maxY), bytes, 2 * BYTES); // max y + NumericUtils.intToSortableBytes(GeoEncodingUtils.encodeLongitude(maxX), bytes, 3 * BYTES); // max x + return bytes; + } } diff --git a/lucene/sandbox/src/test/org/apache/lucene/geo/TestTessellator.java b/lucene/sandbox/src/test/org/apache/lucene/geo/TestTessellator.java index a6dbd5ff2418..485ae9a96248 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/geo/TestTessellator.java +++ b/lucene/sandbox/src/test/org/apache/lucene/geo/TestTessellator.java @@ -554,6 +554,9 @@ private void checkPolygon(String wkt) throws Exception { Polygon polygon = (Polygon) SimpleWKTShapeParser.parse(wkt); List tessellation = Tessellator.tessellate(polygon); assertEquals(area(polygon), area(tessellation), 0.0); + for (Tessellator.Triangle t : tessellation) { + checkTriangleEdgesFromPolygon(polygon, t); + } } private double area(Polygon p) { @@ -578,4 +581,77 @@ private double area(List triangles) { } return area; } + + private void checkTriangleEdgesFromPolygon(Polygon p, Tessellator.Triangle t) { + // first edge + assertEquals(t.isEdgefromPolygon(0), isEdgeFromPolygon(p, t.getX(0), t.getY(0), t.getX(1), t.getY(1))); + // second edge + assertEquals(t.isEdgefromPolygon(1), isEdgeFromPolygon(p, t.getX(1), t.getY(1), t.getX(2), t.getY(2))); + // third edge + assertEquals(t.isEdgefromPolygon(2), isEdgeFromPolygon(p, t.getX(2), t.getY(2), t.getX(0), t.getY(0))); + } + + private boolean isEdgeFromPolygon(Polygon p, double aLon, double aLat, double bLon, double bLat) { + for (int i = 0; i < p.getPolyLats().length - 1; i++) { + if (isPointInLine(p.getPolyLon(i), p.getPolyLat(i), p.getPolyLon(i + 1), p.getPolyLat(i + 1), aLon, aLat) && + isPointInLine(p.getPolyLon(i), p.getPolyLat(i), p.getPolyLon(i + 1), p.getPolyLat(i + 1), bLon, bLat)) { + return true; + } + if (p.getPolyLon(i) != p.getPolyLon(i + 1) || p.getPolyLat(i) != p.getPolyLat(i + 1)) { + //Check for co-planar points + final int length = p.getPolyLats().length; + final int offset = i + 2; + int j = 0; + int index = getIndex(length, j + offset); + while (j < length && area(p.getPolyLon(i), p.getPolyLat(i), p.getPolyLon(i + 1), p.getPolyLat(i + 1), p.getPolyLon(index), p.getPolyLat(index)) == 0) { + if (isPointInLine(p.getPolyLon(i), p.getPolyLat(i), p.getPolyLon(index), p.getPolyLat(index), aLon, aLat) && + isPointInLine(p.getPolyLon(i), p.getPolyLat(i), p.getPolyLon(index), p.getPolyLat(index), bLon, bLat)) { + return true; + } + index = getIndex(length, ++j + offset); + } + } + } + if (p.getHoles() != null && p.getHoles().length > 0) { + for (Polygon hole : p.getHoles()) { + if (isEdgeFromPolygon(hole, aLon, aLat, bLon, bLat)) { + return true; + } + } + } + return false; + } + + private int getIndex(int size, int index) { + if (index < size) { + return index; + } + return index - size; + } + + /** Compute signed area of triangle */ + private double area(final double aX, final double aY, final double bX, final double bY, + final double cX, final double cY) { + return (bY - aY) * (cX - bX) - (bX - aX) * (cY - bY); + } + + private boolean isPointInLine(final double aX, final double aY, final double bX, final double bY, double lon, double lat) { + double dxc = lon - aX; + double dyc = lat - aY; + + double dxl = bX - aX; + double dyl = bY - aY; + + if (dxc * dyl - dyc * dxl == 0) { + if (Math.abs(dxl) >= Math.abs(dyl)) + return dxl > 0 ? + aX <= lon && lon <= bX : + bX <= lon && lon <= aX; + else + return dyl > 0 ? + aY <= lat && lat <= bY : + bY <= lat && lat <= aY; + } + return false; + } } \ No newline at end of file diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java b/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java index 27d4e6c34f72..c71911f23adc 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java @@ -186,8 +186,10 @@ Map getNodeTermStats(Set terms, int nodeID, long vers } try { for(Term term : terms) { - final TermStates termStates = TermStates.build(s.getIndexReader().getContext(), term, true); - stats.put(term, s.termStatistics(term, termStates)); + final TermStates ts = TermStates.build(s.getIndexReader().getContext(), term, true); + if (ts.docFreq() > 0) { + stats.put(term, s.termStatistics(term, ts.docFreq(), ts.totalTermFreq())); + } } } finally { node.searchers.release(s); @@ -262,36 +264,31 @@ public Query rewrite(Query original) throws IOException { } @Override - public TermStatistics termStatistics(Term term, TermStates context) throws IOException { + public TermStatistics termStatistics(Term term, int docFreq, long totalTermFreq) throws IOException { assert term != null; - long docFreq = 0; - long totalTermFreq = 0; + long distributedDocFreq = 0; + long distributedTotalTermFreq = 0; for(int nodeID=0;nodeID 0; + return new TermStatistics(term.bytes(), distributedDocFreq, distributedTotalTermFreq); } @Override diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java index be372e21ed83..dd8ef65a56e6 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java @@ -115,6 +115,7 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; import org.junit.runner.RunWith; +import org.junit.internal.AssumptionViolatedException; import com.carrotsearch.randomizedtesting.JUnit4MethodProvider; import com.carrotsearch.randomizedtesting.LifecycleScope; @@ -2713,17 +2714,16 @@ public static T expectThrows(Class expectedType, Throwi /** Checks a specific exception class is thrown by the given runnable, and returns it. */ public static T expectThrows(Class expectedType, String noExceptionMessage, ThrowingRunnable runnable) { - try { - runnable.run(); - } catch (Throwable e) { - if (expectedType.isInstance(e)) { - return expectedType.cast(e); - } - AssertionFailedError assertion = new AssertionFailedError("Unexpected exception type, expected " + expectedType.getSimpleName() + " but got " + e); - assertion.initCause(e); - throw assertion; + final Throwable thrown = _expectThrows(Collections.singletonList(expectedType), runnable); + if (expectedType.isInstance(thrown)) { + return expectedType.cast(thrown); } - throw new AssertionFailedError(noExceptionMessage); + if (null == thrown) { + throw new AssertionFailedError(noExceptionMessage); + } + AssertionFailedError assertion = new AssertionFailedError("Unexpected exception type, expected " + expectedType.getSimpleName() + " but got " + thrown); + assertion.initCause(thrown); + throw assertion; } /** Checks a specific exception class is thrown by the given runnable, and returns it. */ @@ -2732,16 +2732,13 @@ public static T expectThrowsAnyOf(List> throw new AssertionError("At least one expected exception type is required?"); } - Throwable thrown = null; - try { - runnable.run(); - } catch (Throwable e) { + final Throwable thrown = _expectThrows(expectedTypes, runnable); + if (null != thrown) { for (Class expectedType : expectedTypes) { - if (expectedType.isInstance(e)) { - return expectedType.cast(e); + if (expectedType.isInstance(thrown)) { + return expectedType.cast(thrown); } } - thrown = e; } List exceptionTypes = expectedTypes.stream().map(c -> c.getSimpleName()).collect(Collectors.toList()); @@ -2764,29 +2761,28 @@ public static T expectThrowsAnyOf(List> */ public static TW expectThrows (Class expectedOuterType, Class expectedWrappedType, ThrowingRunnable runnable) { - try { - runnable.run(); - } catch (Throwable e) { - if (expectedOuterType.isInstance(e)) { - Throwable cause = e.getCause(); - if (expectedWrappedType.isInstance(cause)) { - return expectedWrappedType.cast(cause); - } else { - AssertionFailedError assertion = new AssertionFailedError - ("Unexpected wrapped exception type, expected " + expectedWrappedType.getSimpleName() - + " but got: " + cause); - assertion.initCause(e); - throw assertion; - } + final Throwable thrown = _expectThrows(Collections.singletonList(expectedOuterType), runnable); + if (null == thrown) { + throw new AssertionFailedError("Expected outer exception " + expectedOuterType.getSimpleName() + + " but no exception was thrown."); + } + if (expectedOuterType.isInstance(thrown)) { + Throwable cause = thrown.getCause(); + if (expectedWrappedType.isInstance(cause)) { + return expectedWrappedType.cast(cause); + } else { + AssertionFailedError assertion = new AssertionFailedError + ("Unexpected wrapped exception type, expected " + expectedWrappedType.getSimpleName() + + " but got: " + cause); + assertion.initCause(thrown); + throw assertion; } - AssertionFailedError assertion = new AssertionFailedError - ("Unexpected outer exception type, expected " + expectedOuterType.getSimpleName() - + " but got: " + e); - assertion.initCause(e); - throw assertion; } - throw new AssertionFailedError("Expected outer exception " + expectedOuterType.getSimpleName() - + " but no exception was thrown."); + AssertionFailedError assertion = new AssertionFailedError + ("Unexpected outer exception type, expected " + expectedOuterType.getSimpleName() + + " but got: " + thrown); + assertion.initCause(thrown); + throw assertion; } /** @@ -2798,41 +2794,65 @@ public static T expectThrowsAnyOf(List> */ public static TO expectThrowsAnyOf (LinkedHashMap,List>> expectedOuterToWrappedTypes, ThrowingRunnable runnable) { - try { - runnable.run(); - } catch (Throwable e) { - for (Map.Entry, List>> entry : expectedOuterToWrappedTypes.entrySet()) { - Class expectedOuterType = entry.getKey(); - List> expectedWrappedTypes = entry.getValue(); - Throwable cause = e.getCause(); - if (expectedOuterType.isInstance(e)) { - if (expectedWrappedTypes.isEmpty()) { - return null; // no wrapped exception - } else { - for (Class expectedWrappedType : expectedWrappedTypes) { - if (expectedWrappedType.isInstance(cause)) { - return expectedOuterType.cast(e); - } + final List> outerClasses = expectedOuterToWrappedTypes.keySet().stream().collect(Collectors.toList()); + final Throwable thrown = _expectThrows(outerClasses, runnable); + + if (null == thrown) { + List outerTypes = outerClasses.stream().map(Class::getSimpleName).collect(Collectors.toList()); + throw new AssertionFailedError("Expected any of the following outer exception types: " + outerTypes + + " but no exception was thrown."); + } + for (Map.Entry, List>> entry : expectedOuterToWrappedTypes.entrySet()) { + Class expectedOuterType = entry.getKey(); + List> expectedWrappedTypes = entry.getValue(); + Throwable cause = thrown.getCause(); + if (expectedOuterType.isInstance(thrown)) { + if (expectedWrappedTypes.isEmpty()) { + return null; // no wrapped exception + } else { + for (Class expectedWrappedType : expectedWrappedTypes) { + if (expectedWrappedType.isInstance(cause)) { + return expectedOuterType.cast(thrown); } - List wrappedTypes = expectedWrappedTypes.stream().map(Class::getSimpleName).collect(Collectors.toList()); - AssertionFailedError assertion = new AssertionFailedError - ("Unexpected wrapped exception type, expected one of " + wrappedTypes + " but got: " + cause); - assertion.initCause(e); - throw assertion; } + List wrappedTypes = expectedWrappedTypes.stream().map(Class::getSimpleName).collect(Collectors.toList()); + AssertionFailedError assertion = new AssertionFailedError + ("Unexpected wrapped exception type, expected one of " + wrappedTypes + " but got: " + cause); + assertion.initCause(thrown); + throw assertion; } } - List outerTypes = expectedOuterToWrappedTypes.keySet().stream().map(Class::getSimpleName).collect(Collectors.toList()); - AssertionFailedError assertion = new AssertionFailedError - ("Unexpected outer exception type, expected one of " + outerTypes + " but got: " + e); - assertion.initCause(e); - throw assertion; } - List outerTypes = expectedOuterToWrappedTypes.keySet().stream().map(Class::getSimpleName).collect(Collectors.toList()); - throw new AssertionFailedError("Expected any of the following outer exception types: " + outerTypes - + " but no exception was thrown."); + List outerTypes = outerClasses.stream().map(Class::getSimpleName).collect(Collectors.toList()); + AssertionFailedError assertion = new AssertionFailedError + ("Unexpected outer exception type, expected one of " + outerTypes + " but got: " + thrown); + assertion.initCause(thrown); + throw assertion; } + /** + * Helper method for {@link #expectThrows} and {@link #expectThrowsAnyOf} that takes care of propagating + * any {@link AssertionError} or {@link AssumptionViolatedException} instances thrown if and only if they + * are super classes of the expectedTypes. Otherwise simply returns any {@link Throwable} + * thrown, regardless of type, or null if the runnable completed w/o error. + */ + private static Throwable _expectThrows(List> expectedTypes, ThrowingRunnable runnable) { + + try { + runnable.run(); + } catch (AssertionError | AssumptionViolatedException ae) { + for (Class expectedType : expectedTypes) { + if (expectedType.isInstance(ae)) { // user is expecting this type explicitly + return ae; + } + } + throw ae; + } catch (Throwable e) { + return e; + } + return null; + } + /** Returns true if the file exists (can be opened), false * if it cannot be opened, and (unlike Java's * File.exists) throws IOException if there's some diff --git a/lucene/test-framework/src/test/org/apache/lucene/util/TestExpectThrows.java b/lucene/test-framework/src/test/org/apache/lucene/util/TestExpectThrows.java new file mode 100644 index 000000000000..cfc70be9f2cf --- /dev/null +++ b/lucene/test-framework/src/test/org/apache/lucene/util/TestExpectThrows.java @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.util; + +import java.util.concurrent.atomic.AtomicBoolean; +import java.io.IOException; + +import org.junit.internal.AssumptionViolatedException; + +public class TestExpectThrows extends LuceneTestCase { + + private static class HuperDuperException extends IOException { + public HuperDuperException() { + /* No-Op */ + } + } + + /** + * Tests that {@link #expectThrows} behaves correctly when the Runnable throws (an + * instance of a subclass of) the expected Exception type: by returning that Exception. + */ + public void testPass() { + final AtomicBoolean ran = new AtomicBoolean(false); + final IOException returned = expectThrows(IOException.class, () -> { + ran.getAndSet(true); + throw new HuperDuperException(); + }); + assertTrue(ran.get()); + assertNotNull(returned); + assertEquals(HuperDuperException.class, returned.getClass()); + } + + /** + * Tests that {@link #expectThrows} behaves correctly when the Runnable does not throw (an + * instance of a subclass of) the expected Exception type: by throwing an assertion to + * FAIL the test. + */ + public void testFail() { + final AtomicBoolean ran = new AtomicBoolean(false); + AssertionError caught = null; + try { + final IOException returned = expectThrows(IOException.class, () -> { + ran.getAndSet(true); + }); + fail("must not complete"); // NOTE: we don't use expectThrows to test expectThrows + } catch (AssertionError ae) { + caught = ae; + } + assertTrue(ran.get()); + assertNotNull(caught); + assertEquals("Expected exception IOException but no exception was thrown", caught.getMessage()); + + } + + /** + * Tests that {@link #expectThrows} behaves correctly when the Runnable contains an + * assertion that does not pass: by allowing that assertion to propogate and + * FAIL the test. + */ + public void testNestedFail() { + final AtomicBoolean ran = new AtomicBoolean(false); + AssertionError caught = null; + try { + final IOException returned = expectThrows(IOException.class, () -> { + ran.getAndSet(true); + fail("this failure should propogate"); + }); + fail("must not complete"); // NOTE: we don't use expectThrows to test expectThrows + } catch (AssertionError ae) { + caught = ae; + } + assertTrue(ran.get()); + assertNotNull(caught); + assertEquals("this failure should propogate", caught.getMessage()); + } + + /** + * Tests that {@link #expectThrows} behaves correctly when the Runnable contains an + * assumption that does not pass: by allowing that assumption to propogate and cause + * the test to SKIP. + */ + public void testNestedAssume() { + final AtomicBoolean ran = new AtomicBoolean(false); + AssumptionViolatedException caught = null; + try { + final IOException returned = expectThrows(IOException.class, () -> { + ran.getAndSet(true); + assumeTrue("this assumption should propogate", false); + }); + fail("must not complete"); // NOTE: we don't use expectThrows to test expectThrows + } catch (AssumptionViolatedException ave) { + caught = ave; + } + assertTrue(ran.get()); + assertNotNull(caught); + assertEquals("this assumption should propogate", caught.getMessage()); + } + + /** + * Tests that {@link #expectThrows} behaves correctly when the Runnable contains an + * assertion that does not pass but the caller has explicitly said they expect an Exception of that type: + * by returning that assertion failure Exception. + */ + public void testExpectingNestedFail() { + final AtomicBoolean ran = new AtomicBoolean(false); + AssertionError returned = null; + try { + returned = expectThrows(AssertionError.class, () -> { + ran.getAndSet(true); + fail("this failure should be returned, not propogated"); + }); + } catch (AssertionError caught) { // NOTE: we don't use expectThrows to test expectThrows + assertNull("An exception should not have been thrown", caught); + } + assertTrue(ran.get()); + assertNotNull(returned); + assertEquals("this failure should be returned, not propogated", returned.getMessage()); + } + + /** + * Tests that {@link #expectThrows} behaves correctly when the Runnable contains an + * assumption that does not pass but the caller has explicitly said they expect an Exception of that type: + * by returning that assumption failure Exception. + */ + public void testExpectingNestedAssume() { + final AtomicBoolean ran = new AtomicBoolean(false); + AssumptionViolatedException returned = null; + try { + returned = expectThrows(AssumptionViolatedException.class, () -> { + ran.getAndSet(true); + assumeTrue("this assumption should be returned, not propogated", false); + }); + } catch (AssumptionViolatedException caught) { // NOTE: we don't use expectThrows to test expectThrows + assertNull("An exception should not have been thrown", caught); + } + assertTrue(ran.get()); + assertNotNull(returned); + assertEquals("this assumption should be returned, not propogated", returned.getMessage()); + } + +} diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 14c827617359..72c3873a3ab9 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -38,6 +38,9 @@ Upgrade Notes is now deprecated, and users are encouraged to use SolrTestCaseJ4.initAndGetDataDir() in it's place. See SOLR-13664 for more details. +* For JWTAuthPlugin, the 'jwkUrl' configuration key is deprecated and may be removed later, please use 'jwksUrl' + instead. See SOLR-13734. + New Features ---------------------- @@ -48,10 +51,6 @@ New Features when using compositeIds. Document distribution is calculated using the "id_prefix" field (if it exists) containing just the compositeId prefixes, or directly from the indexed "id" field otherwise. (yonik, Megan Carey) -* SOLR-13565: Node level runtime libs loaded from remote urls (noble) - -* SOLR-13553: Node level custom RequestHandlers (noble) - * SOLR-13622: Add cat() stream source to create tuples from lines in local files (Jason Gerlowski and Joel Bernstein) * SOLR-11866: QueryElevationComponent can have query rules configured with match="subset" wherein the words need only @@ -59,30 +58,37 @@ New Features * SOLR-13682: command line option to export documents to a file (noble) -* SOLR-13650: Solr now can define and add "packages" with plugins. Each plugin can choose to - load from one of those packages & updating packages can reload those plugins independently (noble) - * SOLR-13257: Support deterministic replica routing preferences for better cache usage (Michael Gibney via Christine Poerschke, Tomás Fernández Löbbe) -* SOLR-13707: API to expose the currently used package name, details for each plugin (noble) +* SOLR-13122: Ability to query aliases in Solr Admin UI (janhoy) -* SOLR-13710: Persist package jars locally & expose them over http at /api/node/blob (noble) +* SOLR-13713: JWTAuthPlugin to support multiple JWKS endpoints (janhoy) -* SOLR-13122: Ability to query aliases in Solr Admin UI (janhoy) +* SOLR-13734: JWTAuthPlugin now supports multiple IdP issuers through configuring a new 'issuers' configuration key. + Access tokens issued and signed by any of the configured issuers will be validated (janhoy) + +* SOLR-13272: Add support for arbitrary ranges in JSON facet's Range facets. + (Apoorv Bhawsar, Munendra S N, Mikhail Khludnev, Ishan Chattopadhyaya, Jan Høydahl) + +* SOLR-13632: Support integral plots, cosine distance and string truncation with math expressions (Joel Bernstein) + +* SOLR-13667: Add upper, lower, trim and split Stream Evaluators (Joel Bernstein) + +* SOLR-13625: Add CsvStream, TsvStream Streaming Expressions and supporting Stream Evaluators (Joel bernstein) + +* SOLR-8241: Add CaffeineCache, an efficient implementation of SolrCache.(Ben Manes, Shawn Heisey, David Smiley, Andrzej Bialecki) Improvements ---------------------- * SOLR-12368: Support InPlace DV updates for a field that does not yet exist in any documents -(hossman, Simon Willnauer, Adrien Grand, Munendra S N) + (hossman, Simon Willnauer, Adrien Grand, Munendra S N) * SOLR-13558, SOLR-13693: Allow dynamic resizing of SolrCache-s. (ab) * SOLR-6305: Ability to set the replication factor for index files created by HDFSDirectoryFactory (Boris Pasko via Kevin Risden) -* SOLR-13677: All Metrics Gauges should be unregistered by the objects that registered them (noble) - * SOLR-13702: Some components register twice their metric names (janhoy) * SOLR-11601: Improved error message when geodist(llpsf) is used with arguments referring to a LatLonPointSpatialField. @@ -96,6 +102,31 @@ Improvements * SOLR-13728: If a partial update (aka atomic update) is attempted on a document that has child docs, then ensure the schema supports it (_root_ stored/docValues) by throwing an exception. (David Smiley) +* SOLR-13742: Allow optional redaction of data saved by 'bin/solr autoscaling -save'. (ab) + +* SOLR-13739: Optimized large managed schema modifications; Internal O(n^2) problem. (Thomas Wöckinger via David Smiley) + +* SOLR-9658: Max idle time support for SolrCache implementations. (hoss, ab) + +* SOLR-13763: Improve the tracking of "freedisk" in autoscaling simulations. (ab) + +* SOLR-13773: Add Prometheus Exporter GC and Heap options. (Houston Putman via Anshum Gupta, David Smiley) + +* SOLR-13638: Add debug, trace logging to RuleBasedAuthorizationPlugin (Jason Gerlowski) + +* SOLR-13784: EmbeddedSolrServer's defaultCore constructor argument is now optional (David Smiley) + +* LUCENE-8984: MoreLikeThis MLT is biased for uncommon fields (Andy Hind via Anshum Gupta) + +* SOLR-13798: SSL: Adding Enabling/Disabling client's hostname verification config (Cao Manh Dat) + +* SOLR-13771: Add -v and -m to ulimit section of reference guide and bin/solr checks (Erick Erickson) + +* SOLR-13795: Managed schema operations should do a core reload in Solr standalone mode. + (Thomas Wöckinger via David Smiley) + +* SOLR-13719: Introducing SolrClient.ping(collection) in SolrJ (Geza Nagy via Mikhail Khludnev) + Bug Fixes ---------------------- @@ -143,9 +174,72 @@ Bug Fixes * SOLR-13240: Fixed UTILIZENODE action resulting in IllegalArgumentException. (Hendrik Haddorp, Richard Goodman, Tim Owen, shalin, noble, Christine Poerschke) +* SOLR-13238: BlobHandler generates non-padded md5 (Jeff Walraven via janhoy) + +* SOLR-13780: Fix ClassCastException in NestableJsonFacet (Tiago Martinho de Barros, Munendra S N) + +* SOLR-13725: Allow negative values for limit in TermsFacetMap (Richard Walker, Munendra S N) + +* SOLR-13022: Fix NPE when sorting by non-existent aggregate function in JSON Facet (hossman, Munendra S N) + +* SOLR-13727: Fixed V2Requests - HttpSolrClient replaced first instance of "/solr" with "/api" which + caused a change in host names starting with "solr". (Megan Carey via yonik) + +* SOLR-13180: Fix ClassCastException in Json Request API (Johannes Kloos, Jan Høydahl, Munendra S N) + +* SOLR-13417: Handle stats aggregation on date and string fields in SolrJ's JSON facet response processing + (Jason Gerlowski, Munendra S N) + +* SOLR-13712: JMX MBeans are not exposed because of race condition between creating platform mbean server and + registering mbeans. (shalin) + +* SOLR-13802: Managed schema manipulations were not persisting the optional luceneMatchVersion that can be set + on an Analyzer. (Thomas Wöckinger) + +* SOLR-13790: LRUStatsCache size explosion and ineffective caching. (ab) + +* SOLR-13539: Fix for class-cast issues during atomic-update 'removeregex' operations. This also incorporated some + tests Tim wrote as a part of SOLR-9505. (Tim Owen via Jason Gerlowski) + +* SOLR-13376: Multi-node race condition to create/remove nodeLost markers. (hoss, ab) + +* SOLR-13293: ConcurrentUpdateHttp2SolrClient always log AsynchronousCloseException exception error on indexing. + (Cao Manh Dat) + +* SOLR-13828: Improve ExecutePlanAction error handling. (ab) + +* SOLR-13760: Fix regression in support for Date math in TRA start date that was introduced by SOLR-13375 + +* SOLR-13829: RecursiveEvaluator casts Continuous numbers to Discrete Numbers, causing mismatch (Trey Grainger, Joel Bernstein) + +* SOLR-13815: Live shard split (where updates actively continue during the split) can lose updates due to cluster + state happening to change between checking if the current shard is active and later checking if there are any + sub-shard leaders to forward the update to. (yonik) + +* SOLR-13665: Added missing netty dependencies to solrJ and upgraded netty to v 4.1.29.Final (Jörn Franke, janhoy) + +* SOLR-13793: HttpSolrCall now maintains internal request count (_forwardedCount) for remote queries and limits them to + the number of replicas. This avoids making too many cascading calls to remote servers, which, if not restricted, can + bring down nodes containing the said collection (Kesharee Nandan Vishwakarma, Ishan Chattopadhyaya) + +* SOLR-13834: ZkController#getSolrCloudManager() created a new instance of ZkStateReader, thereby causing mismatch in the + visibility of the cluster state and, as a result, undesired race conditions (Clay Goddard via Ishan Chattopadhyaya) + +* SOLR-13835: HttpSolrCall produces incorrect extra AuditEvent on AuthorizationResponse.PROMPT (janhoy, hossman) + +* SOLR-13843: The MOVEREPLICA API ignores replica type and always adds 'nrt' replicas (Amrit Sarkar via shalin) + +* SOLR-13677: All Metrics Gauges should be unregistered by components that registered them. (noble, ab) + +* SOLR-13855: DistributedZkUpdateProcessor should have been propagating URP.finish() lifecycle like it used to before + 8.1 (a regression). Impacts integrity since Run URP's finish() propagates this to the updateLog to fsync. + (David Smiley) + Other Changes ---------------------- +* SOLR-13779: Use the safe fork of simple-xml for clustering contrib. (Dawid Weiss) + * SOLR-13585: Factor out SearchGroupsResultTransformer.[de]serializeOneSearchGroup methods. (Christine Poerschke, Diego Ceccarelli) * SOLR-12870: Use StandardCharsets instead of String values (Peter Somogyi via Munendra S N) @@ -156,14 +250,28 @@ Other Changes * SOLR-13643: Add Getters/Setters in ResponseBuilder for analytics response handling (Neal Sidhwaney via Munendra S N) -* SOLR-13659: Refactor CacheConfig to lazily load the the implementation class (noble) - * SOLR-13680: Use try-with-resource to close the closeable resource (Furkan KAMACI, Munendra S N) * SOLR-13573: Add SolrRangeQuery getters for upper, lower bound (Brian Rhees via Jason Gerlowski) * SOLR-13658: Precommit fail Java "var" until 9x. Fail "var...<>" constructs entirely (Erick Erickson) +* SOLR-13767: Upgrade jackson to 2.9.9 (janhoy) + +* SOLR-11492: Clean up /solr/cloud-dev scripts and provide a single well documented script (Gus Heck, Robert Bunch) + +* SOLR-13747: New TestSSLTestConfig.testFailIfUserRunsTestsWithJVMThatHasKnownSSLBugs() to give people running + tests more visibility if/when they use a known-buggy JVM causing most SSL tests to silently SKIP. (hossman) + +* SOLR-13791: Remove remaining Commons BeanUtils references. (Andras Salamon, Christine Poerschke) + +* SOLR-13812: Add javadocs, uneven rejection and basic test coverage for the SolrTestCaseJ4.params method. + (Diego Ceccarelli, Christine Poerschke, Munendra S N) + +* SOLR-13787: An annotation based system to write v2 APIs (noble) + +* SOLR-12786: Update Ref Guide build tool versions (Cassandra) + ================== 8.2.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. @@ -224,8 +332,6 @@ New Features * SOLR-13552: Add recNum Stream Evaluator (Joel Bernstein) -* SOLR-13534: Dynamic loading to support loading jars from a URL (noble) - * SOLR-13560: Add isNull and notNull Stream Evaluators (Joel Bernstein) * SOLR-10291: Add matches Stream Evaluator to support regex matching (Joel Bernstein) diff --git a/solr/bin/solr b/solr/bin/solr index ca1948f7d88a..596242fac4b8 100755 --- a/solr/bin/solr +++ b/solr/bin/solr @@ -209,6 +209,11 @@ if [ "$SOLR_SSL_ENABLED" == "true" ]; then if [ -n "$SOLR_SSL_NEED_CLIENT_AUTH" ]; then SOLR_SSL_OPTS+=" -Dsolr.jetty.ssl.needClientAuth=$SOLR_SSL_NEED_CLIENT_AUTH" fi + + if [ -z "$SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION" ] ; then + SOLR_SSL_OPTS+=" -Dsolr.jetty.ssl.verifyClientHostName=HTTPS" + fi + if [ -n "$SOLR_SSL_WANT_CLIENT_AUTH" ]; then SOLR_SSL_OPTS+=" -Dsolr.jetty.ssl.wantClientAuth=$SOLR_SSL_WANT_CLIENT_AUTH" fi @@ -1516,6 +1521,8 @@ if [ -z "$SOLR_ULIMIT_CHECKS" ] || [ "$SOLR_ULIMIT_CHECKS" != "false" ]; then if hash ulimit 2>/dev/null; then openFiles=$(ulimit -n) maxProcs=$(ulimit -u) + virtualMemory=$(ulimit -v) + maxMemory=$(ulimit -m) if [ $openFiles != "unlimited" ] && [ $openFiles -lt "$SOLR_RECOMMENDED_OPEN_FILES" ]; then echo "*** [WARN] *** Your open file limit is currently $openFiles. " echo " It should be set to $SOLR_RECOMMENDED_OPEN_FILES to avoid operational disruption. " @@ -1527,10 +1534,23 @@ if [ -z "$SOLR_ULIMIT_CHECKS" ] || [ "$SOLR_ULIMIT_CHECKS" != "false" ]; then echo " It should be set to $SOLR_RECOMMENDED_MAX_PROCESSES to avoid operational disruption. " echo " If you no longer wish to see this warning, set SOLR_ULIMIT_CHECKS to false in your profile or solr.in.sh" fi + if [ $virtualMemory != "unlimited" ]; then + echo "*** [WARN] *** Your Virtual Memory limit is $virtualMemory. " + echo " It should be set to 'unlimited' to avoid operational disruption. " + echo " If you no longer wish to see this warning, set SOLR_ULIMIT_CHECKS to false in your profile or solr.in.sh" + fi + if [ $maxMemory != "unlimited" ]; then + echo "*** [WARN] *** Your Max Memory Size limit is $maxMemory. " + echo " It should be set to 'unlimited' to avoid operational disruption. " + echo " If you no longer wish to see this warning, set SOLR_ULIMIT_CHECKS to false in your profile or solr.in.sh" + fi + else echo "Could not check ulimits for processes and open files, recommended values are" - echo " max processes: $SOLR_RECOMMENDED_MAX_PROCESSES " - echo " open files: $SOLR_RECOMMENDED_OPEN_FILES" + echo " max processes: $SOLR_RECOMMENDED_MAX_PROCESSES " + echo " open files: $SOLR_RECOMMENDED_OPEN_FILES" + echo " virtual memory: unlimited" + echo " max memorh size: unlimited" fi fi fi diff --git a/solr/bin/solr.in.cmd b/solr/bin/solr.in.cmd index a831c55d3a7a..e46233672d33 100755 --- a/solr/bin/solr.in.cmd +++ b/solr/bin/solr.in.cmd @@ -122,6 +122,8 @@ REM Require clients to authenticate REM set SOLR_SSL_NEED_CLIENT_AUTH=false REM Enable clients to authenticate (but not require) REM set SOLR_SSL_WANT_CLIENT_AUTH=false +REM Verify client hostname during SSL handshake +REM set SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION=false REM SSL Certificates contain host/ip "peer name" information that is validated by default. Setting REM this to false can be useful to disable these checks when re-using a certificate on many hosts REM set SOLR_SSL_CHECK_PEER_NAME=true diff --git a/solr/bin/solr.in.sh b/solr/bin/solr.in.sh index 9d1be37d2e37..d4e6b7bb6682 100644 --- a/solr/bin/solr.in.sh +++ b/solr/bin/solr.in.sh @@ -139,6 +139,8 @@ #SOLR_SSL_NEED_CLIENT_AUTH=false # Enable clients to authenticate (but not require) #SOLR_SSL_WANT_CLIENT_AUTH=false +# Verify client's hostname during SSL handshake +#SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION=false # SSL Certificates contain host/ip "peer name" information that is validated by default. Setting # this to false can be useful to disable these checks when re-using a certificate on many hosts #SOLR_SSL_CHECK_PEER_NAME=true diff --git a/solr/cloud-dev/clean.sh b/solr/cloud-dev/clean.sh deleted file mode 100755 index 2f42d45549a7..000000000000 --- a/solr/cloud-dev/clean.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash - -numServers=$1 - -die () { - echo >&2 "$@" - exit 1 -} - -[ "$#" -eq 1 ] || die "1 argument required, $# provided, usage: clean.sh {numServers}" - -cd .. - -for (( i=1; i <= $numServers; i++ )) -do - rm -r -f server$i -done - -rm -r -f serverzk -rm -r -f server-lastlogs \ No newline at end of file diff --git a/solr/cloud-dev/cli-test-solrcloud-start.sh b/solr/cloud-dev/cli-test-solrcloud-start.sh deleted file mode 100755 index 1634ab721ba1..000000000000 --- a/solr/cloud-dev/cli-test-solrcloud-start.sh +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env bash - -# TODO: !OUT OF DATE! - -cd .. - -rm -r -f server2 -rm -r -f server3 -rm -r -f server4 -rm -r -f server5 -rm -r -f server6 - -rm -r -f dist -rm -r -f build -rm -r -f server/solr/zoo_data -rm -r -f server/solr/data -rm -f server/server.log - -ant server dist - -cp -r -f server server2 -cp -r -f server server3 -cp -r -f server server4 -cp -r -f server server5 -cp -r -f server server6 - -# first try uploading a conf dir -java -classpath lib/*:dist/*:build/lucene-libs/* org.apache.solr.cloud.ZkCLI -cmd upconfig -zkhost 127.0.0.1:9983 -confdir server/solr/collection1/conf -confname conf1 -solrhome server/solr -runzk 8983 - -# upload a second conf set so we avoid single conf auto linking -java -classpath lib/*:dist/*:build/lucene-libs/* org.apache.solr.cloud.ZkCLI -cmd upconfig -zkhost 127.0.0.1:9983 -confdir server/solr/collection1/conf -confname conf2 -solrhome server/solr -runzk 8983 - -# now try linking a collection to a conf set -java -classpath lib/*:dist/*:build/lucene-libs/* org.apache.solr.cloud.ZkCLI -cmd linkconfig -zkhost 127.0.0.1:9983 -collection collection1 -confname conf1 -solrhome server/solr -runzk 8983 - - -cd server -java -DzkRun -DnumShards=2 -DSTOP.PORT=7983 -DSTOP.KEY=key -jar start.jar 1>server.log 2>&1 & - -cd ../server2 -java -Djetty.port=7574 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6574 -DSTOP.KEY=key -jar start.jar 1>server2.log 2>&1 & - -cd ../server3 -java -Djetty.port=7575 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6575 -DSTOP.KEY=key -jar start.jar 1>server3.log 2>&1 & - -cd ../server4 -java -Djetty.port=7576 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6576 -DSTOP.KEY=key -jar start.jar 1>server4.log 2>&1 & - -cd ../server5 -java -Djetty.port=7577 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6577 -DSTOP.KEY=key -jar start.jar 1>server5.log 2>&1 & - -cd ../server6 -java -Djetty.port=7578 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6578 -DSTOP.KEY=key -jar start.jar 1>server6.log 2>&1 & diff --git a/solr/cloud-dev/cloud.sh b/solr/cloud-dev/cloud.sh new file mode 100644 index 000000000000..3b6d710da53a --- /dev/null +++ b/solr/cloud-dev/cloud.sh @@ -0,0 +1,383 @@ +#!/bin/bash + +################################################################################## +# +# The goal of this script is to allow quick setup of a blank local multi node +# cluster for development testing without needing to erase or interfere with +# previous testing. It also enables redeployment of the code for such testing +# clusters without erasing the data previously indexed. +# +# It is for dev testing only NOT for production use. +# +# This is also NOT meant to be run from this directory within a lucene-solr +# working copy. Typical usage is to copy it out to a separate workspace +# such as (/../testing) and edit then either use the -w option +# or edit the definition of DEFAULT_VCS_WORKSPACE variable below. +# +# Usage: +# ./cloud.sh [options] [name] +# +# Options: +# -c clean the data & zk collections erasing all indexed data +# -r recompile server with 'ant clean server create-package' +# -m memory per node +# -a additional JVM options +# -n number of nodes to create/start if this doesn't match error +# -w path to the vcs checkout +# -z port to look for zookeeper on (2181 default) +# +# Commands: +# new Create a new cluster named by the current date or [name] +# start Start an existing cluster specified by [name] +# stop stop the cluster specified by [name] +# restart stop and then start +# +# In all cases if [name] is unspecified ls -t will be used to determine the +# most recent cluster working directory, and that will be used. If it is +# specified it will be resolved as a path from the directory where cloud.sh +# has been run. +# +# By default the script sets up a local Solr cloud with 4 nodes, in a local +# directory with ISO date as the name. A local zookeeper at 2181 or the +# specified port is presumed to be available, a new zk chroot is used for each +# cluster based on the file system path to the cluster directory. the default +# solr.xml is added to this solr root dir in zookeeper. +# +# Debugging ports are automatically opened for each node starting with port 5001 +# +# Specifying an explicit destination path will cause the script to +# use that path and a zk chroot that matches, so more than one install +# can be created in a day, or issue numbers etc can be used. Normally the +# directories containing clusters created by this tool are in the same +# directory as this script. Distant paths with slashes or funny characters +# *might* work, but are not well tested, YMMV. +# +# PEREQ: 1. Zookeeper on localhost:2181 (or as specified by -z option) where +# it is ok to create a lot of top level directories named for +# the absolute path of the [name] directory (for example: +# /solr_home_myuser_projects_solr_testing_2019-01-01) Note +# that not using the embedded zookeeper is key to being able +# switch between testing setups and to test vs alternate versions +# of zookeeper if desired. +# +# SETUP: 1. Place this script in a directory intended to hold all your +# testing installations of solr. +# 2. Edit DEFAULT_VCS_WORKSPACE if the present value does not suit +# your purposes. +# 3. chmod +x cloud.sh +# +# EXAMPLES: +# +# Create a brand new 4 node cluster deployed in a directory named for today +# +# ./cloud.sh new +# +# Create a brand new 4 node cluster deployed in a directory named SOLR-1234567 +# +# ./cloud.sh new SOLR-1234567 +# +# Stop the cluster +# +# ./cloud.sh stop +# +# Compile and push new code to a running cluster (incl bounce the cluster) +# +# ./cloud.sh restart -r +# +# Dump your hoplessly fubar'd test collections and start fresh with current tarball +# +# ./cloud.sh restart -c +# +################################################################################## + +DEFAULT_VCS_WORKSPACE='../code/lucene-solr' + +############## Normally no need to edit below this line ############## + +############## +# Parse Args # +############## + +COMMAND=$1 +shift + +CLEAN=false # default +MEMORY=1g # default +JVM_ARGS='' # default +RECOMPILE=false # default +NUM_NODES=0 # need to detect if not specified +VCS_WORK=${DEFAULT_VCS_WORKSPACE} +ZK_PORT=2181 + +while getopts ":crm:a:n:w:z:" opt; do + case ${opt} in + c) + CLEAN=true + ;; + r) + RECOMPILE=true + ;; + m) + MEMORY=$OPTARG + ;; + a) + JVM_ARGS=$OPTARG + ;; + n) + NUM_NODES=$OPTARG + ;; + w) + VCS_WORK=$OPTARG + ;; + z) + ZK_PORT=$OPTARG + ;; + \?) + echo "Invalid option: -$OPTARG" >&2 + exit 1 + esac +done +shift $((OPTIND -1)) + +CLUSTER_WD=$1 + +################# +# Validate Args # +################# +case ${COMMAND} in + new);; + stop);; + start);; + restart);; + *) echo "Invalid command $COMMAND"; exit 2; +esac + +case ${NUM_NODES} in + ''|*[!0-9]*) echo "$NUM_NODES (-n) is not a positive integer"; exit 3 ;; + *) ;; +esac + +case ${ZK_PORT} in + ''|*[!0-9]*) echo "$NUM_NODES (-z) is not a positive integer"; exit 3 ;; + *) ;; +esac + +if [[ "$COMMAND" = "new" ]]; then + if [[ "$CLEAN" = true ]]; then + echo "Command new and option -c (clean) do not make sense together since a newly created cluster has no data to clean."; exit 1; + fi +fi + +if [[ ! -d "$VCS_WORK" ]]; then + echo "$VCS_WORK (vcs working directory) does not exist"; exit 4; +fi + +if [[ ! "$COMMAND" = "new" ]]; then + if [[ -z "$CLUSTER_WD" ]]; then + # find the most recently touched directory in the local directory + CLUSTER_WD=$(find . -maxdepth 1 -mindepth 1 -type d -print0 | xargs -0 ls -1 -td | sed -E 's/\.\/(.*)/\1/' | head -n1) + fi +fi + +if [[ ! -z "$CLUSTER_WD" ]]; then + if [[ ! -d "$CLUSTER_WD" && ! "$COMMAND" = "new" ]]; then + echo "$CLUSTER_WD (cluster working directory) does not exist or is not a directory"; exit 5; + fi +fi + +############################ +# Print our initialization # +############################ +echo "COMMAND : $COMMAND" +echo "VCS WD : $VCS_WORK" +echo "CLUSTER WD : $CLUSTER_WD" +echo "NUM NODES : $NUM_NODES" +echo "ZK PORT : $ZK_PORT" +echo "CLEAN : $CLEAN" +echo "RECOMPILE : $RECOMPILE" + +########################################################### +# Create new cluster working dir if new command specified # +########################################################### +mkdirIfReq() { + if [[ "$COMMAND" = "new" ]]; then + if [[ -z "$CLUSTER_WD" ]]; then + DATE=$(date "+%Y-%m-%d") + CLUSTER_WD="${DATE}" + fi + mkdir "$CLUSTER_WD" + if [[ "$?" -ne 0 ]]; then + echo "Unable to create $CLUSTER_WD"; exit 6; + fi + fi +} + +################# +# Find Solr etc # +################# + +findSolr() { + pushd ${CLUSTER_WD} + CLUSTER_WD_FULL=$(pwd -P) + SOLR=${CLUSTER_WD}/$(find . -maxdepth 1 -name 'solr*' -type d -print0 | xargs -0 ls -1 -td | sed -E 's/\.\/(solr.*)/\1/' | head -n1) + popd + + #echo "Found solr at $SOLR" + SAFE_DEST="${CLUSTER_WD_FULL//\//_}"; +} + +############################################### +# Clean node dir (and thus data) if requested # +############################################### +cleanIfReq() { + if [[ "$CLEAN" = true ]]; then + if [[ -d "$CLUSTER_WD" ]]; then + echo "Cleaning out $CLUSTER_WD" + pushd ${CLUSTER_WD} + rm -rf n* # remove node dirs which are are n1, n2, n3 etc + popd + fi + findSolr + echo COLLECTIONS FOUND IN ZK | egrep --color=always '.*' + COLLECTIONS_TO_CLEAN=`${SOLR}/bin/solr zk ls /solr_${SAFE_DEST}/collections -z localhost:${ZK_PORT}`; echo $COLLECTIONS_TO_CLEAN | egrep --color=always '.*' + for collection in ${COLLECTIONS_TO_CLEAN}; do + echo nuke $collection + ${SOLR}/bin/solr zk rm -r /solr_${SAFE_DEST}/collections/${collection} -z localhost:${ZK_PORT} + echo $? + done + fi +} + +################################# +# Recompile server if requested # +################################# +recompileIfReq() { + if [[ "$RECOMPILE" = true ]]; then + pushd "$VCS_WORK"/solr + ant clean server create-package + if [[ "$?" -ne 0 ]]; then + echo "BUILD FAIL - cloud.sh stopping, see above output for details"; popd; exit 7; + fi + popd + copyTarball + fi +} + +################ +# Copy tarball # +################ +copyTarball() { + echo "foo" + pushd ${CLUSTER_WD} + echo "bar" + rm -rf solr-* # remove tarball and dir to which it extracts + echo "baz" + pushd # back to original dir to properly resolve vcs working dir + echo "foobar:"$(pwd) + if [[ ! -f $(ls "$VCS_WORK"/solr/package/solr-*.tgz) ]]; then + echo "No solr tarball found try again with -r"; popd; exit 10; + fi + cp "$VCS_WORK"/solr/package/solr-*.tgz ${CLUSTER_WD} + pushd # back into cluster wd to unpack + tar xzvf solr-*.tgz + popd +} + +############################################# +# Test to see if port for zookeeper is open # +# Assume that zookeeper holds it if it is # +############################################# +testZookeeper() { + PORT_FOUND=$( netstat -an | grep '\b'${ZK_PORT}'\s' | grep LISTEN | awk '{print $4}' | sed -E 's/.*\b('${ZK_PORT}')\s*/\1/'); + if [[ -z "$PORT_FOUND" ]]; then + echo "No process listening on port ${ZK_PORT}. Please start zookeeper and try again"; exit 8; + fi +} + +########################## +# Start server instances # +########################## +start(){ + testZookeeper + echo "Starting servers" + findSolr + + echo "SOLR=$SOLR" + SOLR_ROOT=$("${SOLR}/server/scripts/cloud-scripts/zkcli.sh" -zkhost localhost:${ZK_PORT} -cmd getfile "/solr_${SAFE_DEST}" /dev/stdout); + if [[ -z ${SOLR_ROOT} ]]; then + # Need a fresh root in zookeeper... + "${SOLR}/server/scripts/cloud-scripts/zkcli.sh" -zkhost localhost:${ZK_PORT} -cmd makepath "/solr_${SAFE_DEST}"; + "${SOLR}/server/scripts/cloud-scripts/zkcli.sh" -zkhost localhost:${ZK_PORT} -cmd put "/solr_${SAFE_DEST}" "created by cloud.sh"; # so we can test for existence next time + "${SOLR}/server/scripts/cloud-scripts/zkcli.sh" -zkhost localhost:${ZK_PORT} -cmd putfile "/solr_${SAFE_DEST}/solr.xml" "${SOLR}/server/solr/solr.xml"; + fi + + ACTUAL_NUM_NODES=$(ls -1 -d ${CLUSTER_WD}/n* | wc -l ) + if [[ "$NUM_NODES" -eq 0 ]]; then + NUM_NODES=${ACTUAL_NUM_NODES} + else + if [[ "$NUM_NODES" -ne "$ACTUAL_NUM_NODES" ]]; then + #check that this isn't first time startup.. + if [[ "$ACTUAL_NUM_NODES" -ne 0 ]]; then + echo "Requested $NUM_NODES for a cluster that already has $ACTUAL_NUM_NODES. Refusing to start!"; exit 9; + fi + fi + fi + + if [[ "$NUM_NODES" -eq 0 ]]; then + NUM_NODES=4 # nothing pre-existing found, default to 4 + fi + echo "Final NUM_NODES is $NUM_NODES" + for i in `seq 1 $NUM_NODES`; do + mkdir -p "${CLUSTER_WD}/n${i}" + argsArray=(-c -s $CLUSTER_WD_FULL/n${i} -z localhost:${ZK_PORT}/solr_${SAFE_DEST} -p 898${i} -m $MEMORY \ + -a "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=500${i} \ + -Dsolr.solrxml.location=zookeeper -Dsolr.log.dir=$CLUSTER_WD_FULL/n${i} $JVM_ARGS") + FINAL_COMMAND="${SOLR}/bin/solr ${argsArray[@]}" + echo ${FINAL_COMMAND} + ${SOLR}/bin/solr "${argsArray[@]}" + done + + touch ${CLUSTER_WD} # make this the most recently updated dir for ls -t + +} + +stop() { + echo "Stopping servers" + pushd ${CLUSTER_WD} + SOLR=${CLUSTER_WD}/$(find . -maxdepth 1 -name 'solr*' -type d -print0 | xargs -0 ls -1 -td | sed -E 's/\.\/(solr.*)/\1/' | head -n1) + popd + + "${SOLR}/bin/solr" stop -all +} + +######################## +# process the commands # +######################## +case ${COMMAND} in + new) + testZookeeper + mkdirIfReq + recompileIfReq + if [[ "$RECOMPILE" = false ]]; then + copyTarball + fi + start + ;; + stop) + stop + ;; + start) + testZookeeper + cleanIfReq + recompileIfReq + start + ;; + restart) + testZookeeper + stop + cleanIfReq + recompileIfReq + start + ;; + *) echo "Invalid command $COMMAND"; exit 2; +esac \ No newline at end of file diff --git a/solr/cloud-dev/control.sh b/solr/cloud-dev/control.sh deleted file mode 100755 index 575e40cced88..000000000000 --- a/solr/cloud-dev/control.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash - -source ./functions.sh - -case "$1" in - start) - start $2 $3 "$4" - ;; - stop) - stop $2 - ;; - kill) - do_kill $2 - ;; - reinstall) - reinstall $2 - ;; - rebuild) - rebuild $2 - ;; - status) - status $2 - ;; - cleanlogs) - cleanlogs $2 - ;; - taillogs) - taillogs $2 - ;; - createshard) - createshard $2 $3 $4 $5 - ;; - *) - echo $"Usage: $0 { rebuild| reinstall | start [numshards]| stop |kill | status| cleanlogs| createshard [shardId]}" - exit 1 -esac -exit 0 \ No newline at end of file diff --git a/solr/cloud-dev/example1.sh b/solr/cloud-dev/example1.sh deleted file mode 100755 index 418642d17640..000000000000 --- a/solr/cloud-dev/example1.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash - -cd .. - -rm -r -f example2 - -rm -r -f dist -rm -r -f build -rm -r -f example/solr/zoo_data -rm -r -f example/solr/collection1/data -rm -f example/example.log - -ant server dist - -cp -r -f example example2 - - -cd example -java -DzkRun -DnumShards=2 -DSTOP.PORT=7983 -DSTOP.KEY=key -Dbootstrap_conf=true -jar start.jar 1>example.log 2>&1 & - -sleep 10 - -cd ../example2 -java -Djetty.port=9574 -DzkRun -DzkHost=localhost:9983 -DSTOP.PORT=6574 -DSTOP.KEY=key -jar start.jar 1>example2.log 2>&1 & - - diff --git a/solr/cloud-dev/example2.sh b/solr/cloud-dev/example2.sh deleted file mode 100755 index 3c9f23268fc2..000000000000 --- a/solr/cloud-dev/example2.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -cd .. - -rm -r -f example2 -rm -r -f example3 -rm -r -f example4 - -rm -r -f dist -rm -r -f build -rm -r -f example/solr/zoo_data -rm -r -f example/solr/collection1/data -rm -f example/example.log - -ant server dist - -cp -r -f example example2 -cp -r -f example example3 -cp -r -f example example4 - - -cd example -java -DzkRun -DnumShards=2 -DSTOP.PORT=7983 -DSTOP.KEY=key -Dbootstrap_conf=true -jar start.jar 1>example.log 2>&1 & - -# wait for config to go up -sleep 10 - -cd ../example2 -java -Djetty.port=9574 -DzkRun -DzkHost=localhost:9983 -DSTOP.PORT=6574 -DSTOP.KEY=key -jar start.jar 1>example2.log 2>&1 & - -cd ../example3 -java -Djetty.port=9575 -DzkRun -DzkHost=localhost:9983 -DSTOP.PORT=6575 -DSTOP.KEY=key -jar start.jar 1>example3.log 2>&1 & - -cd ../example4 -java -Djetty.port=9576 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6576 -DSTOP.KEY=key -jar start.jar 1>example4.log 2>&1 & - diff --git a/solr/cloud-dev/example3.sh b/solr/cloud-dev/example3.sh deleted file mode 100755 index 404db0184895..000000000000 --- a/solr/cloud-dev/example3.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -cd .. - -rm -r -f example2 -rm -r -f example3 -rm -r -f example4 - -rm -r -f dist -rm -r -f build -rm -r -f example/solr/zoo_data -rm -r -f example/solr/collection1/data -rm -f example/example.log - -ant server dist - -cp -r -f example example2 -cp -r -f example example3 -cp -r -f example example4 - - -cd example -java -DzkRun -DnumShards=2 -DSTOP.PORT=7983 -DSTOP.KEY=key -Dbootstrap_conf=true -DzkHost=localhost:9983,localhost:14574,localhost:14585 -jar start.jar 1>example.log 2>&1 & - -cd ../example2 -java -Djetty.port=13574 -DzkRun -DzkHost=localhost:9983,localhost:14574,localhost:14585 -DSTOP.PORT=6574 -DSTOP.KEY=key -jar start.jar 1>example2.log 2>&1 & - -cd ../example3 -java -Djetty.port=13585 -DzkRun -DzkHost=localhost:9983,localhost:14574,localhost:14585 -DSTOP.PORT=6575 -DSTOP.KEY=key -jar start.jar 1>example3.log 2>&1 & - -# wait for config to go up -sleep 10 - -cd ../example4 -java -Djetty.port=13596 -DzkHost=localhost:9983,localhost:14574,localhost:14585 -DnumShards=2 -DSTOP.PORT=6576 -DSTOP.KEY=key -jar start.jar 1>example4.log 2>&1 & diff --git a/solr/cloud-dev/functions.sh b/solr/cloud-dev/functions.sh deleted file mode 100755 index 148ec6922d3e..000000000000 --- a/solr/cloud-dev/functions.sh +++ /dev/null @@ -1,77 +0,0 @@ -INT_JAVA_OPTS="-server -Xms256M -Xmx256M" -BASE_PORT=8900 -BASE_STOP_PORT=9900 -ZK_PORT="2414" -ZK_CHROOT="solr" - -rebuild() { - echo "Rebuilding" - cd .. - rm -r -f dist - rm -r -f build - rm -r -f server/solr/zoo_data - rm -f server/server.log - ant server dist -} - -setports() { - PORT="$(( $BASE_PORT + $1 ))" - STOP_PORT="$(( $BASE_STOP_PORT + $1 ))" -} - -reinstall() { - echo "Reinstalling instance $1" - cd .. - rm -rf server$1 - cp -r -f server server$1 -} - -start() { - OPT="-DzkHost=localhost:$ZK_PORT/$ZK_CHROOT" - NUMSHARDS=$2 - - echo "Starting instance $1" - - setports $1 - cd ../server$1 - java $JAVA_OPTS -Djetty.port=$PORT $OPT -jar start.jar --module=http STOP.PORT=$STOP_PORT STOP.KEY=key jetty.base=. 1>server$1.log 2>&1 & -} - -stop() { - echo "Stopping instance $1" - setports $1 - cd ../server$1 - java -jar start.jar --module=http STOP.PORT=$STOP_PORT STOP.KEY=key --stop -} - -do_kill() { - echo "Killing instance $1" - setports $1 - PID=`ps aux|grep "STOP.PORT=$STOP_PORT"|grep -v grep|cut -b 8-15` - if [ "" = "$PID" ]; then - echo "not running?" - else - kill -9 $PID - fi -} - -status() { - echo "Status:" - ps aux|grep "STOP.PORT"|grep -v grep -} - -cleanlogs() { - cd ../server$1 - mv server$1.log server$1.oldlog -} - -taillogs() { - cd ../server$1 - tail -f server$1.log -} - -createshard() { - setports $1 - echo "Creating new shard @instance $1, collection=$2, shard=$3, name=$4" - curl "http://127.0.0.1:$PORT/solr/admin/cores?action=CREATE&collection=$2&name=$3&shard=$4" -} diff --git a/solr/cloud-dev/solrcloud-start-existing.sh b/solr/cloud-dev/solrcloud-start-existing.sh deleted file mode 100755 index 9c5ec29fee4b..000000000000 --- a/solr/cloud-dev/solrcloud-start-existing.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash - -numServers=$1 - -baseJettyPort=8900 -baseStopPort=9900 - -ZK_CHROOT="solr" - -die () { - echo >&2 "$@" - exit 1 -} - -[ "$#" -eq 1 ] || die "1 argument required, $# provided, usage: solrcloud-start-exisiting.sh [numServers]" - - -cd .. - -# Useful if you want to startup on an existing setup with new code mods -# ant server dist - -cd serverzk -stopPort=1313 -jettyPort=8900 -exec -a jettyzk java -Xmx512m $JAVA_OPTS -Djetty.port=$jettyPort -DhostPort=$jettyPort -DzkRun -DzkHost=localhost:9900/$ZK_CHROOT -DzkRunOnly=true -jar start.jar --module=http STOP.PORT=$stopPort STOP.KEY=key jetty.base=. 1>serverzk.log 2>&1 & - -cd .. - -cd server - -for (( i=1; i <= $numServers; i++ )) -do - echo "starting server$i" - cd ../server$i - stopPort=`expr $baseStopPort + $i` - jettyPort=`expr $baseJettyPort + $i` - exec -a jetty java -Xmx1g $JAVA_OPTS -Djetty.port=$jettyPort -DzkHost=localhost:9900/$ZK_CHROOT -jar start.jar --module=http STOP.PORT=$stopPort STOP.KEY=key jetty.base=. 1>server$i.log 2>&1 & -done diff --git a/solr/cloud-dev/solrcloud-start.sh b/solr/cloud-dev/solrcloud-start.sh deleted file mode 100755 index bf256184f11e..000000000000 --- a/solr/cloud-dev/solrcloud-start.sh +++ /dev/null @@ -1,74 +0,0 @@ -#!/bin/bash - -# These scripts are best effort developer scripts. No promises. - -# To run on hdfs, try something along the lines of: -# export JAVA_OPTS="-Dsolr.directoryFactory=solr.HdfsDirectoryFactory -Dsolr.lock.type=hdfs -Dsolr.hdfs.home=hdfs://localhost:8020/solr -Dsolr.hdfs.confdir=/etc/hadoop_conf/conf" - -# To use ZooKeeper security, try: -# export JAVA_OPTS="-DzkACLProvider=org.apache.solr.common.cloud.VMParamsAllAndReadonlyDigestZkACLProvider -DzkCredentialsProvider=org.apache.solr.common.cloud.VMParamsSingleSetCredentialsDigestZkCredentialsProvider -DzkDigestUsername=admin-user -DzkDigestPassword=admin-password -DzkDigestReadonlyUsername=readonly-user -DzkDigestReadonlyPassword=readonly-password" -# -# To create a collection, curl "localhost:8901/solr/admin/collections?action=CREATE&name=collection1&numShards=2&replicationFactor=1&maxShardsPerNode=10" -# To add a document, curl http://localhost:8901/solr/collection1/update -H 'Content-type:application/json' -d '[{"id" : "book1"}]' - -numServers=$1 -numShards=$2 - -baseJettyPort=8900 -baseStopPort=9900 - -zkAddress=localhost:9900/solr - -die () { - echo >&2 "$@" - exit 1 -} - -[ "$#" -eq 1 ] || die "1 argument required, $# provided, usage: solrcloud-start.sh [numServers]" - -cd .. - -for (( i=1; i <= $numServers; i++ )) -do - echo "try to remove existing directory: server$i" - rm -r -f server$i -done - - -rm -r -f dist -rm -r -f build -rm -r -f server/solr/zoo_data -rm -f server/server.log - -ant -f ../build.xml clean -ant server dist - -for (( i=1; i <= $numServers; i++ )) -do - echo "create server$i" - cp -r -f server server$i -done - -rm -r -f serverzk -cp -r -f server serverzk -cp core/src/test-files/solr/solr-no-core.xml serverzk/solr/solr.xml -rm -r -f serverzk/solr/collection1/core.properties -cd serverzk -stopPort=1313 -jettyPort=8900 -exec -a jettyzk java -Xmx512m $JAVA_OPTS -Djetty.port=$jettyPort -DhostPort=$jettyPort -DzkRun=localhost:9900/solr -DzkHost=$zkAddress -DzkRunOnly=true -jar start.jar --module=http STOP.PORT=$stopPort STOP.KEY=key jetty.base=. 1>serverzk.log 2>&1 & -cd .. - -# upload config files -java -classpath "server/solr-webapp/webapp/WEB-INF/lib/*:server/lib/ext/*" $JAVA_OPTS org.apache.solr.cloud.ZkCLI -zkhost $zkAddress -cmd upconfig --confdir server/solr/configsets/basic_configs/conf --confname basic_configs - -cd server - -for (( i=1; i <= $numServers; i++ )) -do - echo "starting server$i" - cd ../server$i - stopPort=`expr $baseStopPort + $i` - jettyPort=`expr $baseJettyPort + $i` - exec -a jetty java -Xmx1g $JAVA_OPTS -Djetty.port=$jettyPort -DzkHost=$zkAddress -jar start.jar --module=http STOP.PORT=$stopPort STOP.KEY=key jetty.base=. 1>server$i.log 2>&1 & -done diff --git a/solr/cloud-dev/stop.sh b/solr/cloud-dev/stop.sh deleted file mode 100755 index 650219943d1e..000000000000 --- a/solr/cloud-dev/stop.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash - -numServers=$1 -baseJettyPort=8900 -baseStopPort=9900 - -die () { - echo >&2 "$@" - exit 1 -} - -[ "$#" -eq 1 ] || die "1 argument required, $# provided, usage: stop.sh {numServers}" - -cd ../server - -for (( i=1; i <= $numServers; i++ )) -do - stopPort=`expr $baseStopPort + $i` - echo "stopping server$i, stop port is $stopPort" - cd ../server$i - java -jar start.jar --module=http STOP.PORT=$stopPort STOP.KEY=key --stop -done - - -mkdir ../server-lastlogs - -for (( i=1; i <= $numServers; i++ )) -do - cd ../server$i - - jettyPort=`expr $baseJettyPort + $i` - echo "Make sure jetty stops and wait for it: $jettyPort" - - pid=`lsof -i:$jettyPort -sTCP:LISTEN -t` - echo "pid:$pid" - #kill $pid - #wait $pid - if [ ! -z "$pid" ] - then - while [ -e /proc/$pid ]; do sleep 1; done - fi - - # save the last shutdown logs - echo "copy server$i.log to lastlogs" - cp -r -f server$i.log ../server-lastlogs/server-last$i.log -done - -# stop zk runner -java -jar start.jar --module=http STOP.PORT=1313 STOP.KEY=key --stop - -echo "wait for port to be available: $baseJettyPort" - -pid=`lsof -i:$baseJettyPort -sTCP:LISTEN -t` -echo "pid:$pid" -#kill $pid -#wait $pid -if [ ! -z "$pid" ] -then - while [ -e /proc/$pid ]; do sleep 0.1; done -fi -nc -w 30 127.0.0.1 $baseJettyPort - -sleep 5 - \ No newline at end of file diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/facet/PivotNode.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/facet/PivotNode.java index ac3a4ba1abd8..bec388bf2cc9 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/facet/PivotNode.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/facet/PivotNode.java @@ -90,9 +90,9 @@ public void importPivot(DataInput input, Map pivot) throws IOException */ public void exportPivot(DataOutput output, Map pivot) throws IOException { output.writeInt(pivot.size()); - for (String pivotValue : pivot.keySet()) { - output.writeUTF(pivotValue); - exportPivotValue(output, pivot.get(pivotValue)); + for (Map.Entry entry : pivot.entrySet()) { + output.writeUTF(entry.getKey()); + exportPivotValue(output, entry.getValue()); } } /** diff --git a/solr/contrib/clustering/ivy.xml b/solr/contrib/clustering/ivy.xml index a799c79fbf42..1de378ceb890 100644 --- a/solr/contrib/clustering/ivy.xml +++ b/solr/contrib/clustering/ivy.xml @@ -27,7 +27,7 @@ - + diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java index 80d374eb5055..8b64b6ffb300 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java @@ -36,7 +36,7 @@ import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.handler.RequestHandlerBase; import org.apache.solr.metrics.MetricsMap; -import org.apache.solr.metrics.SolrMetrics; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.RawResponseWriter; import org.apache.solr.response.SolrQueryResponse; @@ -275,8 +275,8 @@ public boolean upload(SolrInputDocument document) { } @Override - public void initializeMetrics(SolrMetrics m) { - super.initializeMetrics(m); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + super.initializeMetrics(parentContext, scope); metrics = new MetricsMap((detailed, map) -> { if (importer != null) { DocBuilder.Statistics cumulative = importer.cumulativeStatistics; @@ -299,7 +299,7 @@ public void initializeMetrics(SolrMetrics m) { map.put(DataImporter.MSG.TOTAL_DOCS_SKIPPED, cumulative.skipDocCount); } }); - solrMetrics.gauge(this, metrics, true, "importer", getCategory().toString()); + solrMetricsContext.gauge(this, metrics, true, "importer", getCategory().toString(), scope); } // //////////////////////SolrInfoMBeans methods ////////////////////// diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserExplain.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserExplain.java index b4a821391f80..14fb7e1a0f9a 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserExplain.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserExplain.java @@ -18,20 +18,20 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestLTRQParserExplain extends TestRerankBase { - @BeforeClass - public static void setup() throws Exception { + @Before + public void setup() throws Exception { setuptest(true); loadFeatures("features-store-test-model.json"); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithFacet.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithFacet.java index db8fb38b3fc3..31c2b761a59e 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithFacet.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithFacet.java @@ -20,14 +20,14 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.ltr.feature.SolrFeature; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestLTRWithFacet extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "a1", "description", "E", "popularity", @@ -48,6 +48,12 @@ public static void before() throws Exception { "D", "popularity", "8")); assertU(commit()); } + + @After + public void after() throws Exception { + aftertest(); + } + @Test public void testRankingSolrFacet() throws Exception { @@ -91,13 +97,6 @@ public void testRankingSolrFacet() throws Exception { assertJQ("/query" + query.toQueryString(), "" + "/facet_counts/facet_fields/description==" + "['b', 4, 'e', 2, 'c', 1, 'd', 1]"); - // aftertest(); - - } - - @AfterClass - public static void after() throws Exception { - aftertest(); } } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithSort.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithSort.java index c3c9857b19ca..708fdc8105b6 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithSort.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithSort.java @@ -20,14 +20,14 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.ltr.feature.SolrFeature; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestLTRWithSort extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "a1", "description", "E", "popularity", "1")); @@ -47,6 +47,11 @@ public static void before() throws Exception { "D", "popularity", "8")); assertU(commit()); } + + @After + public void after() throws Exception { + aftertest(); + } @Test public void testRankingSolrSort() throws Exception { @@ -90,13 +95,6 @@ public void testRankingSolrSort() throws Exception { assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='1'"); assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/score==1.0"); - // aftertest(); - - } - - @AfterClass - public static void after() throws Exception { - aftertest(); } } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java index 1cffeccf3d89..9d22cf4e9f89 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java @@ -36,6 +36,7 @@ import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.Utils; +import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.ltr.feature.Feature; import org.apache.solr.ltr.feature.FeatureException; @@ -129,11 +130,15 @@ protected static void setupPersistenttest(boolean bulkIndex) throws Exception { } public static ManagedFeatureStore getManagedFeatureStore() { - return ManagedFeatureStore.getManagedFeatureStore(h.getCore()); + try (SolrCore core = jetty.getCoreContainer().getCore(DEFAULT_TEST_CORENAME)) { + return ManagedFeatureStore.getManagedFeatureStore(core); + } } public static ManagedModelStore getManagedModelStore() { - return ManagedModelStore.getManagedModelStore(h.getCore()); + try (SolrCore core = jetty.getCoreContainer().getCore(DEFAULT_TEST_CORENAME)) { + return ManagedModelStore.getManagedModelStore(core); + } } protected static SortedMap setupTestInit( @@ -192,7 +197,6 @@ protected static SortedMap setupTestInit( public static void setuptest(String solrconfig, String schema) throws Exception { - initCore(solrconfig, schema); SortedMap extraServlets = setupTestInit(solrconfig,schema,false); @@ -204,7 +208,6 @@ public static void setuptest(String solrconfig, String schema) public static void setupPersistentTest(String solrconfig, String schema) throws Exception { - initCore(solrconfig, schema); SortedMap extraServlets = setupTestInit(solrconfig,schema,true); diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestEdisMaxSolrFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestEdisMaxSolrFeature.java index 4fd77e317cda..e162c8c467f9 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestEdisMaxSolrFeature.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestEdisMaxSolrFeature.java @@ -19,14 +19,14 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestEdisMaxSolrFeature extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity", @@ -48,8 +48,8 @@ public static void before() throws Exception { assertU(commit()); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLogging.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLogging.java index 52fe70ef088b..e7af2506b5f7 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLogging.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLogging.java @@ -21,19 +21,19 @@ import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; import org.apache.solr.ltr.store.FeatureStore; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestFeatureLogging extends TestRerankBase { - @BeforeClass - public static void setup() throws Exception { + @Before + public void setup() throws Exception { setuptest(true); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldLengthFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldLengthFeature.java index 64f9778ca61d..b913d1bd72b7 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldLengthFeature.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldLengthFeature.java @@ -21,14 +21,14 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestFieldLengthFeature extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1", "description", "w1")); @@ -45,8 +45,8 @@ public static void before() throws Exception { assertU(commit()); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java index ceaf6e6c8fda..108044b5cbdc 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java @@ -22,16 +22,16 @@ import org.apache.solr.ltr.FeatureLoggerTestUtils; import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestFieldValueFeature extends TestRerankBase { private static final float FIELD_VALUE_FEATURE_DEFAULT_VAL = 0.0f; - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity", @@ -63,8 +63,8 @@ public static void before() throws Exception { new String[] {"popularity"}, "{\"weights\":{\"popularity\":1.0}}"); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFilterSolrFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFilterSolrFeature.java index d3c72109fa56..d42176e55e2f 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFilterSolrFeature.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFilterSolrFeature.java @@ -21,13 +21,13 @@ import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; import org.apache.solr.ltr.store.rest.ManagedFeatureStore; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestFilterSolrFeature extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity", @@ -49,8 +49,8 @@ public static void before() throws Exception { assertU(commit()); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java index a9395bf31a14..48c1262c547d 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java @@ -25,14 +25,14 @@ import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; import org.apache.solr.ltr.model.MultipleAdditiveTreesModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestNoMatchSolrFeature extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity", @@ -78,8 +78,8 @@ public static void before() throws Exception { "{\"weights\":{\"nomatchfeature4\":1.0}}"); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java index b0af388ddcab..8ff568426058 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java @@ -26,14 +26,14 @@ import org.apache.solr.ltr.FeatureLoggerTestUtils; import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestOriginalScoreFeature extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1")); @@ -47,8 +47,8 @@ public static void before() throws Exception { assertU(commit()); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestRankingFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestRankingFeature.java index 8db1a4bceb44..eab96105684e 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestRankingFeature.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestRankingFeature.java @@ -21,15 +21,15 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestRankingFeature extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity", @@ -51,8 +51,8 @@ public static void before() throws Exception { assertU(commit()); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } @@ -118,7 +118,6 @@ public void testRankingSolrFeature() throws Exception { "/error/msg/=='"+FeatureException.class.getName()+": " + "java.lang.UnsupportedOperationException: " + "Unable to extract feature for powdesS'"); - // aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScoreWithQ.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScoreWithQ.java index 8ca63124d72a..9b7e7683d162 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScoreWithQ.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScoreWithQ.java @@ -19,14 +19,14 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestUserTermScoreWithQ extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity", @@ -48,8 +48,8 @@ public static void before() throws Exception { assertU(commit()); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorerQuery.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorerQuery.java index 29a537223ae5..3b6b93d63924 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorerQuery.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorerQuery.java @@ -19,14 +19,14 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestUserTermScorerQuery extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity", @@ -48,8 +48,8 @@ public static void before() throws Exception { assertU(commit()); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorereQDF.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorereQDF.java index 62061ca36946..b5882d5e8e2a 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorereQDF.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorereQDF.java @@ -19,14 +19,14 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestUserTermScorereQDF extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity", @@ -48,8 +48,8 @@ public static void before() throws Exception { assertU(commit()); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestValueFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestValueFeature.java index 56f9efbcc117..8a3b014f05c8 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestValueFeature.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestValueFeature.java @@ -21,14 +21,14 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.model.LinearModel; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestValueFeature extends TestRerankBase { - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1")); @@ -42,8 +42,8 @@ public static void before() throws Exception { assertU(commit()); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestAdapterModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestAdapterModel.java index 09814011104c..b0f8b1f28fda 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestAdapterModel.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestAdapterModel.java @@ -38,7 +38,8 @@ import org.apache.solr.ltr.feature.FieldValueFeature; import org.apache.solr.ltr.norm.Normalizer; import org.apache.solr.ltr.store.rest.ManagedModelStore; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestAdapterModel extends TestRerankBase { @@ -46,15 +47,15 @@ public class TestAdapterModel extends TestRerankBase { private static int numDocs = 0; private static float scoreValue; - @BeforeClass - public static void setupBeforeClass() throws Exception { + @Before + public void setup() throws Exception { setuptest(false); - for (int ii=1; ii<=random().nextInt(10); ++ii) { + numDocs = random().nextInt(10); + for (int ii=1; ii <= numDocs; ++ii) { String id = Integer.toString(ii); assertU(adoc("id", id, "popularity", ii+"00")); - ++numDocs; } assertU(commit()); @@ -76,6 +77,10 @@ public static void setupBeforeClass() throws Exception { "{\"answerFileName\":\"" + scoreValueFile.getName() + "\"}"); assertJPut(ManagedModelStore.REST_END_POINT, modelJson, "/responseHeader/status==0"); } + @After + public void cleanup() throws Exception { + aftertest(); + } @Test public void test() throws Exception { diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestDefaultWrapperModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestDefaultWrapperModel.java index 25e716a8251e..00ed14d8ad55 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestDefaultWrapperModel.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestDefaultWrapperModel.java @@ -31,19 +31,19 @@ import org.apache.solr.ltr.feature.FieldValueFeature; import org.apache.solr.ltr.feature.ValueFeature; import org.apache.solr.ltr.store.rest.ManagedModelStore; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestDefaultWrapperModel extends TestRerankBase { final private static String featureStoreName = "test"; - private static String baseModelJson = null; private static File baseModelFile = null; static List features = null; - @BeforeClass - public static void setupBeforeClass() throws Exception { + @Before + public void setupBeforeClass() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity", "1")); assertU(adoc("id", "2", "title", "w2", "description", "w2", "popularity", "2")); @@ -58,7 +58,7 @@ public static void setupBeforeClass() throws Exception { features.add(getManagedFeatureStore().getFeatureStore("test").get("popularity")); features.add(getManagedFeatureStore().getFeatureStore("test").get("const")); - baseModelJson = getModelInJson("linear", LinearModel.class.getName(), + final String baseModelJson = getModelInJson("linear", LinearModel.class.getName(), new String[] {"popularity", "const"}, featureStoreName, "{\"weights\":{\"popularity\":-1.0, \"const\":1.0}}"); @@ -70,6 +70,13 @@ public static void setupBeforeClass() throws Exception { } baseModelFile.deleteOnExit(); } + + @After + public void cleanup() throws Exception { + features = null; + baseModelFile = null; + aftertest(); + } private static String getDefaultWrapperModelInJson(String wrapperModelName, String[] features, String params) { return getModelInJson(wrapperModelName, DefaultWrapperModel.class.getName(), diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestLinearModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestLinearModel.java index 0abba9105bfa..d5950e640f01 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestLinearModel.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestLinearModel.java @@ -29,7 +29,8 @@ import org.apache.solr.ltr.norm.Normalizer; import org.apache.solr.ltr.store.FeatureStore; import org.apache.solr.ltr.store.rest.ManagedModelStore; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestLinearModel extends TestRerankBase { @@ -58,15 +59,21 @@ public static Map makeFeatureWeights(List features) { static ManagedModelStore store = null; static FeatureStore fstore = null; - @BeforeClass - public static void setup() throws Exception { + @Before + public void setup() throws Exception { setuptest(true); // loadFeatures("features-store-test-model.json"); store = getManagedModelStore(); fstore = getManagedFeatureStore().getFeatureStore("test"); } - + @After + public void cleanup() throws Exception { + store = null; + fstore = null; + aftertest(); + } + @Test public void getInstanceTest() { final Map weights = new HashMap<>(); diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestMultipleAdditiveTreesModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestMultipleAdditiveTreesModel.java index d57d2f39cb32..8cb59f2dd82f 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestMultipleAdditiveTreesModel.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestMultipleAdditiveTreesModel.java @@ -18,17 +18,16 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.ltr.TestRerankBase; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; import static org.hamcrest.core.StringContains.containsString; public class TestMultipleAdditiveTreesModel extends TestRerankBase { - - @BeforeClass - public static void before() throws Exception { + @Before + public void before() throws Exception { setuptest(false); assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity","1")); @@ -39,8 +38,8 @@ public static void before() throws Exception { assertU(commit()); } - @AfterClass - public static void after() throws Exception { + @After + public void after() throws Exception { aftertest(); } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java index 9614733565fe..045c625a218d 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java @@ -28,8 +28,8 @@ import org.apache.solr.ltr.norm.IdentityNormalizer; import org.apache.solr.ltr.norm.Normalizer; import org.apache.solr.util.SolrPluginUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestNeuralNetworkModel extends TestRerankBase { @@ -44,13 +44,13 @@ public static LTRScoringModel createNeuralNetworkModel(String name, List createMap(String name, String className, Map params) { final Map map = new HashMap(); diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java index bd93027cbb45..0e829e68d95f 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java @@ -35,15 +35,20 @@ import org.apache.solr.ltr.model.LinearModel; import org.apache.solr.ltr.norm.Normalizer; import org.apache.solr.ltr.store.FeatureStore; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import org.junit.Test; public class TestModelManagerPersistence extends TestRerankBase { - @BeforeClass - public static void init() throws Exception { + @Before + public void init() throws Exception { setupPersistenttest(true); } + @After + public void cleanup() throws Exception { + aftertest(); + } // executed first @Test diff --git a/solr/contrib/prometheus-exporter/bin/solr-exporter b/solr/contrib/prometheus-exporter/bin/solr-exporter index ea349609ed44..9dc717ea7c8e 100755 --- a/solr/contrib/prometheus-exporter/bin/solr-exporter +++ b/solr/contrib/prometheus-exporter/bin/solr-exporter @@ -104,7 +104,23 @@ do CLASSPATH="$CLASSPATH":"$JAR" done -EXTRA_JVM_ARGUMENTS="-Xmx512m -Dlog4j.configurationFile=file:"$BASEDIR"/../../server/resources/log4j2-console.xml" +# Memory settings +JAVA_MEM_OPTS= +if [ -z "$JAVA_HEAP" ] && [ -n "$JAVA_MEM" ]; then + JAVA_MEM_OPTS="$JAVA_MEM" +else + JAVA_HEAP="${JAVA_HEAP:-512m}" + JAVA_MEM_OPTS="-Xms$JAVA_HEAP -Xmx$JAVA_HEAP" +fi + +# define default GC_TUNE +if [ -z ${GC_TUNE+x} ]; then + GC_TUNE='-XX:+UseG1GC' +else + GC_TUNE="$GC_TUNE" +fi + +EXTRA_JVM_ARGUMENTS="-Dlog4j.configurationFile=file:"$BASEDIR"/../../server/resources/log4j2-console.xml" # For Cygwin, switch paths to Windows format before running java if $cygwin; then @@ -115,7 +131,10 @@ if $cygwin; then [ -n "$REPO" ] && REPO=`cygpath --path --windows "$REPO"` fi -exec "$JAVACMD" $JAVA_OPTS \ +exec "$JAVACMD" \ + $JAVA_MEM_OPTS \ + $GC_TUNE \ + $JAVA_OPTS \ $EXTRA_JVM_ARGUMENTS \ -classpath "$CLASSPATH" \ -Dapp.name="solr-exporter" \ diff --git a/solr/contrib/prometheus-exporter/bin/solr-exporter.cmd b/solr/contrib/prometheus-exporter/bin/solr-exporter.cmd index 4ff47cf6d1f9..e5bd65ef8a4d 100644 --- a/solr/contrib/prometheus-exporter/bin/solr-exporter.cmd +++ b/solr/contrib/prometheus-exporter/bin/solr-exporter.cmd @@ -63,19 +63,22 @@ set BASEDIR=%~dp0.. :repoSetup +IF NOT "%JAVA_HEAP%"=="" set JAVA_MEM=-Xms%JAVA_HEAP% -Xmx%JAVA_HEAP% +IF "%JAVA_MEM%"=="" set JAVA_MEM=-Xms512m -Xmx512m +IF "%GC_TUNE%"=="" set GC_TUNE=-XX:+UseG1GC if "%JAVACMD%"=="" set JAVACMD=java if "%REPO%"=="" set REPO=%BASEDIR%\lib set CLASSPATH=%REPO%\*;%BASEDIR%\..\..\dist\solrj-lib\*;%BASEDIR%\..\..\dist\*;%BASEDIR%\lucene-libs\*;%BASEDIR%\..\..\server\solr-webapp\webapp\WEB-INF\lib\* -set EXTRA_JVM_ARGUMENTS=-Xmx512m -Dlog4j.configurationFile=file:///%BASEDIR%\..\..\server\resources\log4j2-console.xml +set EXTRA_JVM_ARGUMENTS=-Dlog4j.configurationFile=file:///%BASEDIR%\..\..\server\resources\log4j2-console.xml goto endInit @REM Reaching here means variables are defined and arguments have been captured :endInit -%JAVACMD% %JAVA_OPTS% %EXTRA_JVM_ARGUMENTS% -classpath "%CLASSPATH_PREFIX%;%CLASSPATH%" -Dapp.name="solr-exporter" -Dapp.repo="%REPO%" -Dbasedir="%BASEDIR%" org.apache.solr.prometheus.exporter.SolrExporter %CMD_LINE_ARGS% +%JAVACMD% %JAVA_MEM% %GC_TUNE% %JAVA_OPTS% %EXTRA_JVM_ARGUMENTS% -classpath "%CLASSPATH_PREFIX%;%CLASSPATH%" -Dapp.name="solr-exporter" -Dapp.repo="%REPO%" -Dbasedir="%BASEDIR%" org.apache.solr.prometheus.exporter.SolrExporter %CMD_LINE_ARGS% if ERRORLEVEL 1 goto error goto end diff --git a/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterIntegrationTest.java b/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterIntegrationTest.java index 402ade6eed40..ceb4a4e0819d 100644 --- a/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterIntegrationTest.java +++ b/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterIntegrationTest.java @@ -23,6 +23,7 @@ import org.junit.Before; import org.junit.Test; +@org.apache.lucene.util.LuceneTestCase.AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13786") @Slow public class SolrExporterIntegrationTest extends SolrExporterTestBase { diff --git a/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterTestBase.java b/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterTestBase.java index 3f43843bca61..a390be1f62f3 100644 --- a/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterTestBase.java +++ b/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterTestBase.java @@ -107,7 +107,7 @@ protected Map getAllMetrics() throws URISyntaxException, IOExcep String[] parts = currentLine.split(" "); - assertEquals("Metric must have name and value", 2, parts.length); + assertEquals("Metric must have name and value: " + currentLine, 2, parts.length); metrics.put(parts[0], Double.valueOf(parts[1])); } diff --git a/solr/core/ivy.xml b/solr/core/ivy.xml index 37b7d2cdaaac..cdf4dccd30b7 100644 --- a/solr/core/ivy.xml +++ b/solr/core/ivy.xml @@ -80,7 +80,6 @@ - diff --git a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java new file mode 100644 index 000000000000..e9073ae7bb15 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java @@ -0,0 +1,309 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.api; + + +import java.lang.invoke.MethodHandles; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.solr.client.solrj.SolrRequest; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.SpecProvider; +import org.apache.solr.common.util.CommandOperation; +import org.apache.solr.common.util.Utils; +import org.apache.solr.common.util.ValidatingJsonMap; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.response.SolrQueryResponse; +import org.apache.solr.security.AuthorizationContext; +import org.apache.solr.security.PermissionNameProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class implements an Api just from an annotated java class + * The class must have an annotation {@link EndPoint} + * Each method must have an annotation {@link Command} + * The methods that implement a command should have the first 2 parameters + * {@link SolrQueryRequest} and {@link SolrQueryResponse} or it may optionally + * have a third parameter which could be a java class annotated with jackson annotations. + * The third parameter is only valid if it is using a json command payload + */ + +public class AnnotatedApi extends Api implements PermissionNameProvider { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + public static final String ERR ="Error executing commands :"; + private EndPoint endPoint; + private Map commands = new HashMap<>(); + private final Api fallback; + + public AnnotatedApi(Object obj) { + this(obj, null); + } + + public AnnotatedApi(Object obj, Api fallback) { + super(readSpec(obj.getClass())); + this.fallback = fallback; + Class klas = obj.getClass(); + if (!Modifier.isPublic(klas.getModifiers())) { + throw new RuntimeException(obj.getClass().getName() + " is not public"); + } + + endPoint = klas.getAnnotation(EndPoint.class); + + for (Method m : klas.getDeclaredMethods()) { + Command command = m.getAnnotation(Command.class); + if (command == null) continue; + + if (commands.containsKey(command.name())) { + throw new RuntimeException("Duplicate commands " + command.name()); + } + commands.put(command.name(), new Cmd(command, obj, m)); + } + + } + + @Override + public Name getPermissionName(AuthorizationContext request) { + return endPoint.permission(); + } + + private static SpecProvider readSpec(Class klas) { + EndPoint endPoint = (EndPoint) klas.getAnnotation(EndPoint.class); + if (endPoint == null) + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Invalid class : " + klas.getName()); + return () -> { + Map map = new LinkedHashMap(); + List methods = new ArrayList<>(); + for (SolrRequest.METHOD method : endPoint.method()) { + methods.add(method.name()); + } + map.put("methods", methods); + map.put("url", new ValidatingJsonMap(Collections.singletonMap("paths", Arrays.asList(endPoint.path())))); + Map cmds = new HashMap<>(); + + for (Method method : klas.getMethods()) { + Command command = method.getAnnotation(Command.class); + if (command != null && !command.name().isEmpty()) { + cmds.put(command.name(), AnnotatedApi.createSchema(method)); + } + } + if (!cmds.isEmpty()) { + map.put("commands", cmds); + } + return new ValidatingJsonMap(map); + }; + + + } + + + @Override + public void call(SolrQueryRequest req, SolrQueryResponse rsp) { + if (commands.size() == 1) { + Cmd cmd = commands.get(""); + if (cmd != null) { + cmd.invoke(req, rsp, null); + return; + } + } + + List cmds = req.getCommands(false); + boolean allExists = true; + for (CommandOperation cmd : cmds) { + if (!commands.containsKey(cmd.name)) { + cmd.addError("No such command supported: " + cmd.name); + allExists = false; + } + } + if (!allExists) { + if (fallback != null) { + fallback.call(req, rsp); + return; + } else { + throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "Error processing commands", + CommandOperation.captureErrors(cmds)); + } + } + + for (CommandOperation cmd : cmds) { + commands.get(cmd.name).invoke(req, rsp, cmd); + } + + List errs = CommandOperation.captureErrors(cmds); + if (!errs.isEmpty()) { + log.error(ERR+ Utils.toJSONString(errs)); + throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, ERR , errs); + } + + } + + class Cmd { + final Command command; + final Method method; + final Object obj; + ObjectMapper mapper = new ObjectMapper(); + int paramsCount; + Class c; + boolean isWrappedInPayloadObj = false; + + + Cmd(Command command, Object obj, Method method) { + if (Modifier.isPublic(method.getModifiers())) { + this.command = command; + this.obj = obj; + this.method = method; + Class[] parameterTypes = method.getParameterTypes(); + paramsCount = parameterTypes.length; + if (parameterTypes[0] != SolrQueryRequest.class || parameterTypes[1] != SolrQueryResponse.class) { + throw new RuntimeException("Invalid params for method " + method); + } + if (parameterTypes.length == 3) { + Type t = method.getGenericParameterTypes()[2]; + if (t instanceof ParameterizedType) { + ParameterizedType typ = (ParameterizedType) t; + if (typ.getRawType() == PayloadObj.class) { + isWrappedInPayloadObj = true; + Type t1 = typ.getActualTypeArguments()[0]; + if (t1 instanceof ParameterizedType) { + ParameterizedType parameterizedType = (ParameterizedType) t1; + c = (Class) parameterizedType.getRawType(); + } else { + c = (Class) typ.getActualTypeArguments()[0]; + } + } + } else { + c = (Class) t; + } + + } + if (parameterTypes.length > 3) { + throw new RuntimeException("Invalid params count for method " + method); + + } + } else { + throw new RuntimeException(method.toString() + " is not a public static method"); + } + + } + + void invoke(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation cmd) { + try { + if (paramsCount == 2) { + method.invoke(obj, req, rsp); + } else { + Object o = cmd.getCommandData(); + if (o instanceof Map && c != null) { + o = mapper.readValue(Utils.toJSONString(o), c); + } + if (isWrappedInPayloadObj) { + PayloadObj payloadObj = new PayloadObj<>(cmd.name, cmd.getCommandData(), o); + method.invoke(obj, req, rsp, payloadObj); + } else { + method.invoke(obj, req, rsp, o); + } + if (cmd.hasError()) { + throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "Error executing command", + CommandOperation.captureErrors(Collections.singletonList(cmd))); + } + } + + + } catch (SolrException se) { + throw se; + } catch (InvocationTargetException ite) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, ite.getCause()); + } catch (Exception e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); + } + + } + } + + private static final Map primitives = new HashMap<>(); + + static { + primitives.put(String.class, "string"); + primitives.put(Integer.class, "integer"); + primitives.put(int.class, "integer"); + primitives.put(Float.class, "number"); + primitives.put(float.class, "number"); + primitives.put(Double.class, "number"); + primitives.put(double.class, "number"); + primitives.put(Boolean.class, "boolean"); + primitives.put(List.class, "array"); + } + + + public static Map createSchema(Method m) { + Type[] types = m.getGenericParameterTypes(); + if (types.length == 3) { + return createSchemaFromType(types[2]); + + } + return null; + } + + private static Map createSchemaFromType(Type t) { + Map map = new LinkedHashMap<>(); + if (t instanceof ParameterizedType) { + ParameterizedType typ = (ParameterizedType) t; + if (typ.getRawType() == PayloadObj.class) { + t = typ.getActualTypeArguments()[0]; + } + } + + if (primitives.containsKey(t)) { + map.put("type", primitives.get(t)); + } else if (t instanceof ParameterizedType && ((ParameterizedType) t).getRawType() == List.class) { + Type typ = ((ParameterizedType) t).getActualTypeArguments()[0]; + map.put("type", "array"); + map.put("items", createSchemaFromType(typ)); + } else { + createObjectSchema((Class) t, map); + } + return map; + } + + private static void createObjectSchema(Class klas, Map map) { + map.put("type", "object"); + Map props = new HashMap<>(); + map.put("properties", props); + for (Field fld : klas.getDeclaredFields()) { + JsonProperty p = fld.getAnnotation(JsonProperty.class); + if (p == null) continue; + props.put(p.value(), createSchemaFromType(fld.getGenericType())); + } + } + + +} diff --git a/solr/core/src/java/org/apache/solr/api/ApiBag.java b/solr/core/src/java/org/apache/solr/api/ApiBag.java index bfeb0efa88db..8a3f9727d165 100644 --- a/solr/core/src/java/org/apache/solr/api/ApiBag.java +++ b/solr/core/src/java/org/apache/solr/api/ApiBag.java @@ -230,28 +230,22 @@ public Api lookup(String path, String httpMethod, Map parts) { } public static class ReqHandlerToApi extends Api implements PermissionNameProvider { - PluginBag.PluginHolder rh; + SolrRequestHandler rh; public ReqHandlerToApi(SolrRequestHandler rh, SpecProvider spec) { - super(spec); - this.rh = new PluginBag.PluginHolder(new PluginInfo(SolrRequestHandler.TYPE, Collections.emptyMap()),rh ); - } - - public ReqHandlerToApi(PluginBag.PluginHolder rh, SpecProvider spec) { super(spec); this.rh = rh; } @Override public void call(SolrQueryRequest req, SolrQueryResponse rsp) { - rh.get().handleRequest(req, rsp); + rh.handleRequest(req, rsp); } @Override public Name getPermissionName(AuthorizationContext ctx) { - SolrRequestHandler handler = rh.get(); - if (handler instanceof PermissionNameProvider) { - return ((PermissionNameProvider) handler).getPermissionName(ctx); + if (rh instanceof PermissionNameProvider) { + return ((PermissionNameProvider) rh).getPermissionName(ctx); } return null; } @@ -345,22 +339,22 @@ public String toString() { } public static class LazyLoadedApi extends Api { + + private final PluginBag.PluginHolder holder; private Api delegate; protected LazyLoadedApi(SpecProvider specProvider, PluginBag.PluginHolder lazyPluginHolder) { super(specProvider); - delegate = new ReqHandlerToApi(lazyPluginHolder, spec); + this.holder = lazyPluginHolder; } @Override public void call(SolrQueryRequest req, SolrQueryResponse rsp) { + if (!holder.isLoaded()) { + delegate = new ReqHandlerToApi(holder.get(), ApiBag.EMPTY_SPEC); + } delegate.call(req, rsp); } - - @Override - public ValidatingJsonMap getSpec() { - return super.getSpec(); - } } } diff --git a/solr/core/src/java/org/apache/solr/api/Command.java b/solr/core/src/java/org/apache/solr/api/Command.java new file mode 100644 index 000000000000..25de0773fccd --- /dev/null +++ b/solr/core/src/java/org/apache/solr/api/Command.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.api; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +public @interface Command { + /**if this is not a json command , leave it empty. + * Keep in mind that you cannot have duplicates. + * Only one method per name + * + */ + String name() default ""; + +} diff --git a/solr/core/src/java/org/apache/solr/api/EndPoint.java b/solr/core/src/java/org/apache/solr/api/EndPoint.java new file mode 100644 index 000000000000..6cbe5002a2e3 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/api/EndPoint.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.api; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.apache.solr.client.solrj.SolrRequest; +import org.apache.solr.security.PermissionNameProvider; + +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE}) +public @interface EndPoint { + SolrRequest.METHOD[] method(); + + String[] path(); + + PermissionNameProvider.Name permission(); +} diff --git a/solr/core/src/java/org/apache/solr/api/PayloadObj.java b/solr/core/src/java/org/apache/solr/api/PayloadObj.java new file mode 100644 index 000000000000..c09c4422a92e --- /dev/null +++ b/solr/core/src/java/org/apache/solr/api/PayloadObj.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.api; + +import org.apache.solr.common.util.CommandOperation; + +public class PayloadObj extends CommandOperation { + + private T obj; + + + public PayloadObj(String operationName, Object metaData, T obj) { + super(operationName, metaData); + this.obj = obj; + } + + public T get(){ + return obj; + } +} diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java index 81cf374156c1..4a5b45d2cb58 100644 --- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java +++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java @@ -16,6 +16,8 @@ */ package org.apache.solr.client.solrj.embedded; +import static org.apache.solr.common.params.CommonParams.PATH; + import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -24,8 +26,8 @@ import java.util.Collections; import java.util.HashSet; import java.util.Set; +import java.util.function.Supplier; -import com.google.common.base.Strings; import org.apache.commons.io.output.ByteArrayOutputStream; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrRequest; @@ -57,8 +59,6 @@ import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.servlet.SolrRequestParsers; -import static org.apache.solr.common.params.CommonParams.PATH; - /** * SolrClient that connects directly to a CoreContainer. * @@ -69,12 +69,27 @@ public class EmbeddedSolrServer extends SolrClient { protected final CoreContainer coreContainer; protected final String coreName; private final SolrRequestParsers _parser; + private final RequestWriterSupplier supplier; + + public enum RequestWriterSupplier { + JavaBin(() -> new BinaryRequestWriter()), XML(() -> new RequestWriter()); + + private Supplier supplier; + + private RequestWriterSupplier(final Supplier supplier) { + this.supplier = supplier; + } + + public RequestWriter newRequestWriter() { + return supplier.get(); + } + } /** * Create an EmbeddedSolrServer using a given solr home directory * * @param solrHome the solr home directory - * @param defaultCoreName the core to route requests to by default + * @param defaultCoreName the core to route requests to by default (optional) */ public EmbeddedSolrServer(Path solrHome, String defaultCoreName) { this(load(new CoreContainer(SolrXmlConfig.fromSolrHome(solrHome))), defaultCoreName); @@ -84,7 +99,7 @@ public EmbeddedSolrServer(Path solrHome, String defaultCoreName) { * Create an EmbeddedSolrServer using a NodeConfig * * @param nodeConfig the configuration - * @param defaultCoreName the core to route requests to by default + * @param defaultCoreName the core to route requests to by default (optional) */ public EmbeddedSolrServer(NodeConfig nodeConfig, String defaultCoreName) { this(load(new CoreContainer(nodeConfig)), defaultCoreName); @@ -109,17 +124,33 @@ public EmbeddedSolrServer(SolrCore core) { * {@link #close()} is called. * * @param coreContainer the core container - * @param coreName the core to route requests to by default + * @param coreName the core to route requests to by default (optional) */ public EmbeddedSolrServer(CoreContainer coreContainer, String coreName) { + this(coreContainer, coreName, RequestWriterSupplier.JavaBin); + } + + /** + * Create an EmbeddedSolrServer wrapping a CoreContainer. + *

+ * Note that EmbeddedSolrServer will shutdown the wrapped CoreContainer when {@link #close()} is called. + * + * @param coreContainer + * the core container + * @param coreName + * the core to route requests to by default + * @param supplier + * the supplier used to create a {@link RequestWriter} + */ + public EmbeddedSolrServer(CoreContainer coreContainer, String coreName, + RequestWriterSupplier supplier) { if (coreContainer == null) { throw new NullPointerException("CoreContainer instance required"); } - if (Strings.isNullOrEmpty(coreName)) - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Core name cannot be empty"); this.coreContainer = coreContainer; this.coreName = coreName; _parser = new SolrRequestParsers(null); + this.supplier = supplier; } // TODO-- this implementation sends the response to XML and then parses it. @@ -150,8 +181,13 @@ public NamedList request(SolrRequest request, String coreName) throws So } } - if (coreName == null) + if (coreName == null) { coreName = this.coreName; + if (coreName == null) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "No core specified on request and no default core has been set."); + } + } // Check for cores action SolrQueryRequest req = null; @@ -240,32 +276,41 @@ public void writeResults(ResultContext ctx, JavaBinCodec codec) throws IOExcepti private Set getContentStreams(SolrRequest request) throws IOException { if (request.getMethod() == SolrRequest.METHOD.GET) return null; if (request instanceof ContentStreamUpdateRequest) { - ContentStreamUpdateRequest csur = (ContentStreamUpdateRequest) request; - Collection cs = csur.getContentStreams(); + final ContentStreamUpdateRequest csur = (ContentStreamUpdateRequest) request; + final Collection cs = csur.getContentStreams(); if (cs != null) return new HashSet<>(cs); } - RequestWriter.ContentWriter contentWriter = request.getContentWriter(CommonParams.JAVABIN_MIME); - final String cType = contentWriter == null ? CommonParams.JAVABIN_MIME : contentWriter.getContentType(); - return Collections.singleton(new ContentStreamBase() { + final RequestWriter.ContentWriter contentWriter = request.getContentWriter(null); + + String cType; + final BAOS baos = new BAOS(); + if (contentWriter != null) { + contentWriter.write(baos); + cType = contentWriter.getContentType(); + } else { + final RequestWriter rw = supplier.newRequestWriter(); + cType = rw.getUpdateContentType(); + rw.write(request, baos); + } + + final byte[] buf = baos.toByteArray(); + if (buf.length > 0) { + return Collections.singleton(new ContentStreamBase() { - @Override - public InputStream getStream() throws IOException { - BAOS baos = new BAOS(); - if (contentWriter != null) { - contentWriter.write(baos); - } else { - new BinaryRequestWriter().write(request, baos); + @Override + public InputStream getStream() throws IOException { + return new ByteArrayInputStream(buf); } - return new ByteArrayInputStream(baos.toByteArray()); - } - @Override - public String getContentType() { - return cType; + @Override + public String getContentType() { + return cType; + } + }); + } - } - }); + return null; } private JavaBinCodec createJavaBinCodec(final StreamingResponseCallback callback, final BinaryResponseWriter.Resolver resolver) { diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java index d03b2b2f3d2e..dd8de23dd16b 100644 --- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java +++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java @@ -490,10 +490,10 @@ public void start(boolean reusePort) throws Exception { Map prevContext = MDC.getCopyOfContextMap(); MDC.clear(); - log.info("Start Jetty (original configured port={})", this.config.port); - try { int port = reusePort && jettyPort != -1 ? jettyPort : this.config.port; + log.info("Start Jetty (configured port={}, binding port={})", this.config.port, port); + // if started before, make a new server if (startedBefore) { diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java index ea6886f2cbff..0bf1ec75cc31 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java @@ -50,7 +50,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; @@ -69,6 +68,7 @@ import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.impl.HttpSolrClient.Builder; import org.apache.solr.client.solrj.impl.SolrClientCloudManager; +import org.apache.solr.client.solrj.impl.ZkClientClusterStateProvider; import org.apache.solr.client.solrj.request.CoreAdminRequest.WaitForState; import org.apache.solr.cloud.overseer.OverseerAction; import org.apache.solr.cloud.overseer.SliceMutator; @@ -747,7 +747,7 @@ public SolrCloudManager getSolrCloudManager() { if (cloudManager != null) { return cloudManager; } - cloudSolrClient = new CloudSolrClient.Builder(Collections.singletonList(zkServerAddress), Optional.empty()).withSocketTimeout(30000).withConnectionTimeout(15000) + cloudSolrClient = new CloudSolrClient.Builder(new ZkClientClusterStateProvider(zkStateReader)).withSocketTimeout(30000).withConnectionTimeout(15000) .withHttpClient(cc.getUpdateShardHandler().getDefaultHttpClient()) .withConnectionTimeout(15000).withSocketTimeout(30000).build(); cloudManager = new SolrClientCloudManager(new ZkDistributedQueueFactory(zkClient), cloudSolrClient); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java index 6bb26537b208..872068dbf73a 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java @@ -162,9 +162,10 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul Map collectionParams = new HashMap<>(); Map collectionProps = message.getProperties(); - for (String propName : collectionProps.keySet()) { + for (Map.Entry entry : collectionProps.entrySet()) { + String propName = entry.getKey(); if (propName.startsWith(ZkController.COLLECTION_PARAM_PREFIX)) { - collectionParams.put(propName.substring(ZkController.COLLECTION_PARAM_PREFIX.length()), (String) collectionProps.get(propName)); + collectionParams.put(propName.substring(ZkController.COLLECTION_PARAM_PREFIX.length()), (String) entry.getValue()); } } diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateShardCmd.java index 83ff70c8ca9e..994a3e7d1411 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateShardCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateShardCmd.java @@ -93,10 +93,13 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul message = message.plus(ZkStateReader.SHARED_SHARD_NAME, sharedShardName); } - ZkStateReader zkStateReader = ocmh.zkStateReader; + //ZkStateReader zkStateReader = ocmh.zkStateReader; ocmh.overseer.offerStateUpdate(Utils.toJSON(message)); // wait for a while until we see the shard - ocmh.waitForNewShard(collectionName, sliceName); + //ocmh.waitForNewShard(collectionName, sliceName); + // wait for a while until we see the shard and update the local view of the cluster state + clusterState = ocmh.waitForNewShard(collectionName, sliceName); + String async = message.getStr(ASYNC); ZkNodeProps addReplicasProps = new ZkNodeProps( COLLECTION_PROP, collectionName, @@ -114,7 +117,8 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul if (async != null) addReplicasProps.getProperties().put(ASYNC, async); final NamedList addResult = new NamedList(); try { - ocmh.addReplica(zkStateReader.getClusterState(), addReplicasProps, addResult, () -> { + //ocmh.addReplica(zkStateReader.getClusterState(), addReplicasProps, addResult, () -> { + ocmh.addReplica(clusterState, addReplicasProps, addResult, () -> { Object addResultFailure = addResult.get("failure"); if (addResultFailure != null) { SimpleOrderedMap failure = (SimpleOrderedMap) results.get("failure"); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java index 4e462f638291..9d5a049b0609 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java @@ -218,7 +218,9 @@ private void moveHdfsReplica(ClusterState clusterState, NamedList results, Strin WAIT_FOR_FINAL_STATE, String.valueOf(waitForFinalState), SKIP_CREATE_REPLICA_IN_CLUSTER_STATE, skipCreateReplicaInClusterState, CoreAdminParams.ULOG_DIR, ulogDir.substring(0, ulogDir.lastIndexOf(UpdateLog.TLOG_NAME)), - CoreAdminParams.DATA_DIR, dataDir); + CoreAdminParams.DATA_DIR, dataDir, + ZkStateReader.REPLICA_TYPE, replica.getType().name()); + if(async!=null) addReplicasProps.getProperties().put(ASYNC, async); NamedList addResult = new NamedList(); try { @@ -272,7 +274,9 @@ private void moveNormalReplica(ClusterState clusterState, NamedList results, Str COLLECTION_PROP, coll.getName(), SHARD_ID_PROP, slice.getName(), CoreAdminParams.NODE, targetNode, - CoreAdminParams.NAME, newCoreName); + CoreAdminParams.NAME, newCoreName, + ZkStateReader.REPLICA_TYPE, replica.getType().name()); + if (async != null) addReplicasProps.getProperties().put(ASYNC, async); NamedList addResult = new NamedList(); SolrCloseableLatch countDownLatch = new SolrCloseableLatch(1, ocmh); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java index a7ff948ba08a..ddfe0c075f47 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java @@ -575,12 +575,14 @@ String waitForCoreNodeName(String collectionName, String msgNodeName, String msg throw new SolrException(ErrorCode.SERVER_ERROR, "Could not find coreNodeName"); } - void waitForNewShard(String collectionName, String sliceName) throws KeeperException, InterruptedException { + ClusterState waitForNewShard(String collectionName, String sliceName) throws KeeperException, InterruptedException { log.debug("Waiting for slice {} of collection {} to be available", sliceName, collectionName); RTimer timer = new RTimer(); int retryCount = 320; while (retryCount-- > 0) { - DocCollection collection = zkStateReader.getClusterState().getCollection(collectionName); + ClusterState clusterState = zkStateReader.getClusterState(); + DocCollection collection = clusterState.getCollection(collectionName); + if (collection == null) { throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to find collection: " + collectionName + " in clusterstate"); @@ -589,7 +591,7 @@ void waitForNewShard(String collectionName, String sliceName) throws KeeperExcep if (slice != null) { log.debug("Waited for {}ms for slice {} of collection {} to be available", timer.getTime(), sliceName, collectionName); - return; + return clusterState; } Thread.sleep(1000); } diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java index 40dd86447744..2c2f1d0eebc4 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java @@ -324,11 +324,8 @@ public boolean split(ClusterState clusterState, ZkNodeProps message, NamedList props = new HashMap<>(aliases.getCollectionAliasProperties(aliasName)); + start = DateTimeFormatter.ISO_INSTANT.format(startTime); + props.put(ROUTER_START, start); + + // This could race, but it only occurs when the alias is first used and the values produced + // should all be identical and who wins won't matter (baring cases of Date Math involving seconds, + // which is pretty far fetched). Putting this in a separate thread to ensure that any failed + // races don't cause documents to get rejected. + core.runAsync(() -> zkStateReader.aliasesManager.applyModificationAndExportToZk( + (a) -> aliases.cloneWithCollectionAliasProperties(aliasName, props))); + + } if (docTimestamp.isBefore(startTime)) { throw new SolrException(BAD_REQUEST, "The document couldn't be routed because " + docTimestamp + " is before the start time for this alias " +start+")"); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java index 6179bcc58036..2a7e026e2ad1 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java @@ -22,6 +22,8 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @@ -38,6 +40,8 @@ import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.Utils; +import org.apache.solr.core.SolrResourceLoader; +import org.apache.solr.util.TestInjection; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; @@ -52,6 +56,24 @@ public class ExecutePlanAction extends TriggerActionBase { private static final String PREFIX = "op-"; static final int DEFAULT_TASK_TIMEOUT_SECONDS = 120; + public static final String TASK_TIMEOUT_SECONDS = "taskTimeoutSeconds"; + public static final String TASK_TIMEOUT_FAIL = "taskTimeoutFail"; + + int taskTimeoutSeconds; + boolean taskTimeoutFail; + + public ExecutePlanAction() { + TriggerUtils.validProperties(validProperties, TASK_TIMEOUT_SECONDS, TASK_TIMEOUT_FAIL); + } + + @Override + public void configure(SolrResourceLoader loader, SolrCloudManager cloudManager, Map properties) throws TriggerValidationException { + super.configure(loader, cloudManager, properties); + String str = String.valueOf(properties.getOrDefault(TASK_TIMEOUT_SECONDS, DEFAULT_TASK_TIMEOUT_SECONDS)); + taskTimeoutSeconds = Integer.parseInt(str); + str = String.valueOf(properties.getOrDefault(TASK_TIMEOUT_FAIL, false)); + taskTimeoutFail = Boolean.parseBoolean(str); + } @Override public void process(TriggerEvent event, ActionContext context) throws Exception { @@ -63,11 +85,11 @@ public void process(TriggerEvent event, ActionContext context) throws Exception return; } try { + int counter = 0; for (SolrRequest operation : operations) { log.debug("Executing operation: {}", operation.getParams()); try { SolrResponse response = null; - int counter = 0; if (operation instanceof CollectionAdminRequest.AsyncCollectionAdminRequest) { CollectionAdminRequest.AsyncCollectionAdminRequest req = (CollectionAdminRequest.AsyncCollectionAdminRequest) operation; // waitForFinalState so that the end effects of operations are visible @@ -77,16 +99,34 @@ public void process(TriggerEvent event, ActionContext context) throws Exception log.trace("Saved requestId: {} in znode: {}", asyncId, znode); // TODO: find a better way of using async calls using dataProvider API !!! req.setAsyncId(asyncId); - SolrResponse asyncResponse = cloudManager.request(req); - if (asyncResponse.getResponse().get("error") != null) { - throw new IOException("" + asyncResponse.getResponse().get("error")); + if (TestInjection.delayInExecutePlanAction != null) { + cloudManager.getTimeSource().sleep(TestInjection.delayInExecutePlanAction); + } + CollectionAdminRequest.RequestStatusResponse statusResponse = null; + RequestStatusState state = RequestStatusState.FAILED; + if (!TestInjection.failInExecutePlanAction) { + SolrResponse asyncResponse = cloudManager.request(req); + if (asyncResponse.getResponse().get("error") != null) { + throw new IOException("" + asyncResponse.getResponse().get("error")); + } + asyncId = (String)asyncResponse.getResponse().get("requestid"); + statusResponse = waitForTaskToFinish(cloudManager, asyncId, + taskTimeoutSeconds, TimeUnit.SECONDS); } - asyncId = (String)asyncResponse.getResponse().get("requestid"); - CollectionAdminRequest.RequestStatusResponse statusResponse = waitForTaskToFinish(cloudManager, asyncId, - DEFAULT_TASK_TIMEOUT_SECONDS, TimeUnit.SECONDS); if (statusResponse != null) { - RequestStatusState state = statusResponse.getRequestStatus(); + state = statusResponse.getRequestStatus(); + // overwrite to test a long-running task + if (TestInjection.delayInExecutePlanAction != null && + TestInjection.delayInExecutePlanAction > TimeUnit.MILLISECONDS.convert(taskTimeoutSeconds, TimeUnit.SECONDS)) { + state = RequestStatusState.RUNNING; + } + if (TestInjection.failInExecutePlanAction) { + state = RequestStatusState.FAILED; + } + // should we accept partial success here? i.e. some operations won't be completed + // successfully but the event processing will still be declared a success if (state == RequestStatusState.COMPLETED || state == RequestStatusState.FAILED || state == RequestStatusState.NOT_FOUND) { + // remove pending task marker for this request try { cloudManager.getDistribStateManager().removeData(znode, -1); } catch (Exception e) { @@ -95,7 +135,26 @@ public void process(TriggerEvent event, ActionContext context) throws Exception } response = statusResponse; } + if (state == RequestStatusState.RUNNING || state == RequestStatusState.SUBMITTED) { + String msg = String.format(Locale.ROOT, "Task %s is still running after " + taskTimeoutSeconds + " seconds. Consider increasing " + + TASK_TIMEOUT_SECONDS + " action property or `waitFor` of the trigger %s. Operation: %s", + asyncId, event.source, req); + if (taskTimeoutFail) { + throw new IOException(msg); + } else { + log.warn(msg); + } + } else if (state == RequestStatusState.FAILED) { + // remove it as a pending task + try { + cloudManager.getDistribStateManager().removeData(znode, -1); + } catch (Exception e) { + log.warn("Unexpected exception while trying to delete znode: " + znode, e); + } + throw new IOException("Task " + asyncId + " failed: " + (statusResponse != null ? statusResponse : " timed out. Operation: " + req)); + } } else { + // generic response - can't easily determine success or failure response = cloudManager.request(operation); } NamedList result = response.getResponse(); @@ -105,6 +164,7 @@ public void process(TriggerEvent event, ActionContext context) throws Exception responses.add(result); return responses; }); + counter++; } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unexpected exception executing operation: " + operation.getParams(), e); @@ -160,12 +220,14 @@ static CollectionAdminRequest.RequestStatusResponse waitForTaskToFinish(SolrClou } cloudManager.getTimeSource().sleep(5000); } - log.debug("Task with requestId={} did not complete within 5 minutes. Last state={}", requestId, state); + log.debug("Task with requestId={} did not complete within {} seconds. Last state={}", timeoutSeconds, requestId, state); return statusResponse; } /** - * Saves the given asyncId in ZK as a persistent sequential node. + * Saves the given asyncId in ZK as a persistent sequential node. This allows us to wait for the completion + * of pending tasks from this event in {@link ScheduledTriggers} + * before starting the actions of the next event. * * @return the path of the newly created node in ZooKeeper */ diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java index b499d8de9707..6d0b8aa53274 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java @@ -37,6 +37,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACTIVE; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE; + /** * This plan simply removes nodeAdded and nodeLost markers from Zookeeper if their TTL has * expired. These markers are used by {@link NodeAddedTrigger} and {@link NodeLostTrigger} to @@ -105,12 +108,14 @@ private void cleanupMarkers(String path, long currentTimeNs, Set cleaned log.trace(" -- ignore {}: either missing or unsupported format", markerPath); return; } + boolean activeMarker = payload.getOrDefault(MARKER_STATE, MARKER_ACTIVE) + .equals(MARKER_ACTIVE); long timestamp = ((Number)payload.get("timestamp")).longValue(); long delta = TimeUnit.NANOSECONDS.toSeconds(currentTimeNs - timestamp); - if (delta > cleanupTTL) { + if (delta > cleanupTTL || !activeMarker) { try { stateManager.removeData(markerPath, -1); - log.trace(" -- remove {}, delta={}, ttl={}", markerPath, delta, cleanupTTL); + log.trace(" -- remove {}, delta={}, ttl={}, active={}", markerPath, delta, cleanupTTL, activeMarker); cleanedUp.add(m); } catch (NoSuchElementException nse) { // someone already removed it - ignore @@ -121,7 +126,7 @@ private void cleanupMarkers(String path, long currentTimeNs, Set cleaned log.error("Marker znode should be empty but it's not! Ignoring {} ({})", markerPath, ne.toString()); } } else { - log.trace(" -- keep {}, delta={}, ttl={}", markerPath, delta, cleanupTTL); + log.trace(" -- keep {}, delta={}, ttl={}, active={}", markerPath, delta, cleanupTTL, activeMarker); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java index d2d1ae7bfea6..f32669c7e8db 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java @@ -24,7 +24,6 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TreeMap; diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java index 6b87fc323827..e150bf981f5b 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java @@ -17,6 +17,7 @@ package org.apache.solr.cloud.autoscaling; +import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collection; @@ -36,10 +37,15 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.CollectionParams; +import org.apache.solr.common.util.Utils; import org.apache.solr.core.SolrResourceLoader; +import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACTIVE; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_INACTIVE; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE; import static org.apache.solr.common.params.AutoScalingParams.PREFERRED_OP; import static org.apache.solr.common.params.AutoScalingParams.REPLICA_TYPE; @@ -71,6 +77,16 @@ public void init() throws Exception { try { List added = stateManager.listData(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH); added.forEach(n -> { + String markerPath = ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + n; + try { + Map markerData = Utils.getJson(stateManager, markerPath); + // skip inactive markers + if (markerData.getOrDefault(MARKER_STATE, MARKER_ACTIVE).equals(MARKER_INACTIVE)) { + return; + } + } catch (InterruptedException | IOException | KeeperException e) { + log.debug("-- ignoring marker " + markerPath + " state due to error", e); + } // don't add nodes that have since gone away if (lastLiveNodes.contains(n) && !nodeNameVsTimeAdded.containsKey(n)) { // since {@code #restoreState(AutoScaling.Trigger)} is called first, the timeAdded for a node may also be restored diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java index 047db9061e8e..a1b9168b66d6 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java @@ -17,6 +17,7 @@ package org.apache.solr.cloud.autoscaling; +import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collection; @@ -36,10 +37,15 @@ import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.CollectionParams; +import org.apache.solr.common.util.Utils; import org.apache.solr.core.SolrResourceLoader; +import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACTIVE; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_INACTIVE; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE; import static org.apache.solr.common.params.AutoScalingParams.PREFERRED_OP; /** @@ -68,6 +74,16 @@ public void init() throws Exception { try { List lost = stateManager.listData(ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH); lost.forEach(n -> { + String markerPath = ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + n; + try { + Map markerData = Utils.getJson(stateManager, markerPath); + // skip inactive markers + if (markerData.getOrDefault(MARKER_STATE, MARKER_ACTIVE).equals(MARKER_INACTIVE)) { + return; + } + } catch (InterruptedException | IOException | KeeperException e) { + log.debug("-- ignoring marker " + markerPath + " state due to error", e); + } // don't add nodes that have since came back if (!lastLiveNodes.contains(n) && !nodeNameVsTimeRemoved.containsKey(n)) { // since {@code #restoreState(AutoScaling.Trigger)} is called first, the timeRemoved for a node may also be restored @@ -149,7 +165,9 @@ public void run() { Set newLiveNodes = new HashSet<>(cloudManager.getClusterStateProvider().getLiveNodes()); log.debug("Running NodeLostTrigger: {} with currently live nodes: {} and last live nodes: {}", name, newLiveNodes.size(), lastLiveNodes.size()); - + log.trace("Current Live Nodes for {}: {}", name, newLiveNodes); + log.trace("Last Live Nodes for {}: {}", name, lastLiveNodes); + // have any nodes that we were tracking been added to the cluster? // if so, remove them from the tracking map Set trackingKeySet = nodeNameVsTimeRemoved.keySet(); @@ -191,6 +209,7 @@ public void run() { log.debug("NodeLostTrigger processor for lost nodes: {} is not ready, will try later", nodeNames); } } else { + log.debug("NodeLostTrigger firing, but no processor - so removing lost nodes: {}", nodeNames); nodeNames.forEach(n -> { nodeNameVsTimeRemoved.remove(n); }); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java index 575862700a36..a73743c8e872 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java @@ -22,12 +22,14 @@ import java.net.ConnectException; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; +import org.apache.solr.client.solrj.cloud.DistribStateManager; import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig; import org.apache.solr.client.solrj.cloud.autoscaling.BadVersionException; import org.apache.solr.client.solrj.cloud.SolrCloudManager; @@ -55,6 +57,11 @@ public class OverseerTriggerThread implements Runnable, SolrCloseable { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + public static final String MARKER_STATE = "state"; + public static final String MARKER_ACTIVE = "active"; + public static final String MARKER_INACTIVE = "inactive"; + + private final SolrCloudManager cloudManager; private final CloudConfig cloudConfig; @@ -252,20 +259,31 @@ public void run() { throw new IllegalStateException("Caught AlreadyClosedException from ScheduledTriggers, but we're not closed yet!", e); } } - log.debug("-- cleaning old nodeLost / nodeAdded markers"); - removeMarkers(ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH); - removeMarkers(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH); + log.debug("-- deactivating old nodeLost / nodeAdded markers"); + deactivateMarkers(ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH); + deactivateMarkers(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH); processedZnodeVersion = znodeVersion; } } - private void removeMarkers(String path) { + private void deactivateMarkers(String path) { + DistribStateManager stateManager = cloudManager.getDistribStateManager(); try { - cloudManager.getDistribStateManager().removeRecursively(path, true, false); + List markers = stateManager.listData(path); + for (String marker : markers) { + String markerPath = path + "/" + marker; + try { + Map markerMap = new HashMap<>(Utils.getJson(stateManager, markerPath)); + markerMap.put(MARKER_STATE, MARKER_INACTIVE); + stateManager.setData(markerPath, Utils.toJSON(markerMap), -1); + } catch (NoSuchElementException e) { + // ignore - already deleted + } + } } catch (NoSuchElementException e) { // ignore } catch (Exception e) { - log.warn("Error removing old markers", e); + log.warn("Error deactivating old markers", e); } } diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java index 6e27599b4d6e..a029ac30f6dc 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java @@ -155,7 +155,7 @@ public class SimCloudManager implements SolrCloudManager { private boolean useSystemCollection = true; private static int nodeIdPort = 10000; - public static int DEFAULT_FREE_DISK = 1024; // 1000 GiB + public static int DEFAULT_FREE_DISK = 10240; // 10 TiB public static int DEFAULT_TOTAL_DISK = 10240; // 10 TiB public static long DEFAULT_IDX_SIZE_BYTES = 10240; // 10 kiB @@ -382,6 +382,10 @@ public static Map createNodeValues(String nodeName) { return values; } + public void disableMetricsHistory() { + metricsHistoryHandler.close(); + } + public String dumpClusterState(boolean withCollections) throws Exception { StringBuilder sb = new StringBuilder(); sb.append("#######################################\n"); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java index d12f3b7757b4..f1dda228ae86 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java @@ -130,7 +130,7 @@ public class SimClusterStateProvider implements ClusterStateProvider { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - public static final long DEFAULT_DOC_SIZE_BYTES = 500; + public static final long DEFAULT_DOC_SIZE_BYTES = 2048; private static final String BUFFERED_UPDATES = "__buffered_updates__"; @@ -1541,17 +1541,18 @@ public UpdateResponse simUpdate(UpdateRequest req) throws SolrException, Interru throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Collection not set"); } ensureSystemCollection(collection); - DocCollection coll = getClusterState().getCollection(collection); DocRouter router = coll.getRouter(); List deletes = req.getDeleteById(); + Map freediskDeltaPerNode = new HashMap<>(); if (deletes != null && !deletes.isEmpty()) { + Map deletesPerShard = new HashMap<>(); + Map indexSizePerShard = new HashMap<>(); for (String id : deletes) { Slice s = router.getTargetSlice(id, null, null, req.getParams(), coll); Replica leader = s.getLeader(); if (leader == null) { - log.debug("-- no leader in " + s); - continue; + throw new IOException("-- no leader in " + s); } cloudManager.getMetricManager().registry(createRegistryName(collection, s.getName(), leader)).counter("UPDATE./update.requests").inc(); ReplicaInfo ri = getReplicaInfo(leader); @@ -1560,6 +1561,13 @@ public UpdateResponse simUpdate(UpdateRequest req) throws SolrException, Interru log.debug("-- attempting to delete nonexistent doc " + id + " from " + s.getLeader()); continue; } + + // this is somewhat wrong - we should wait until buffered updates are applied + // but this way the freedisk changes are much easier to track + s.getReplicas().forEach(r -> + freediskDeltaPerNode.computeIfAbsent(r.getNodeName(), node -> new AtomicLong(0)) + .addAndGet(DEFAULT_DOC_SIZE_BYTES)); + AtomicLong bufferedUpdates = (AtomicLong)sliceProperties.get(collection).get(s.getName()).get(BUFFERED_UPDATES); if (bufferedUpdates != null) { if (bufferedUpdates.get() > 0) { @@ -1569,19 +1577,33 @@ public UpdateResponse simUpdate(UpdateRequest req) throws SolrException, Interru } continue; } + deletesPerShard.computeIfAbsent(s.getName(), slice -> new AtomicLong(0)).incrementAndGet(); + Number indexSize = (Number)ri.getVariable(Type.CORE_IDX.metricsAttribute); + if (indexSize != null) { + indexSizePerShard.put(s.getName(), indexSize); + } + } + if (!deletesPerShard.isEmpty()) { lock.lockInterruptibly(); try { - simSetShardValue(collection, s.getName(), "SEARCHER.searcher.deletedDocs", 1, true, false); - simSetShardValue(collection, s.getName(), "SEARCHER.searcher.numDocs", -1, true, false); - Number indexSize = (Number)ri.getVariable(Type.CORE_IDX.metricsAttribute); - if (indexSize != null && indexSize.longValue() > SimCloudManager.DEFAULT_IDX_SIZE_BYTES) { - indexSize = indexSize.longValue() - DEFAULT_DOC_SIZE_BYTES; - simSetShardValue(collection, s.getName(), Type.CORE_IDX.metricsAttribute, - new AtomicLong(indexSize.longValue()), false, false); - simSetShardValue(collection, s.getName(), Variable.coreidxsize, - new AtomicDouble((Double)Type.CORE_IDX.convertVal(indexSize)), false, false); - } else { - throw new Exception("unexpected indexSize ri=" + ri); + for (Map.Entry entry : deletesPerShard.entrySet()) { + String shard = entry.getKey(); + simSetShardValue(collection, shard, "SEARCHER.searcher.deletedDocs", entry.getValue().get(), true, false); + simSetShardValue(collection, shard, "SEARCHER.searcher.numDocs", -entry.getValue().get(), true, false); + Number indexSize = indexSizePerShard.get(shard); + long delSize = DEFAULT_DOC_SIZE_BYTES * entry.getValue().get(); + if (indexSize != null) { + indexSize = indexSize.longValue() - delSize; + if (indexSize.longValue() < SimCloudManager.DEFAULT_IDX_SIZE_BYTES) { + indexSize = SimCloudManager.DEFAULT_IDX_SIZE_BYTES; + } + simSetShardValue(collection, shard, Type.CORE_IDX.metricsAttribute, + new AtomicLong(indexSize.longValue()), false, false); + simSetShardValue(collection, shard, Variable.coreidxsize, + new AtomicDouble((Double)Type.CORE_IDX.convertVal(indexSize)), false, false); + } else { + throw new Exception("unexpected indexSize for collection=" + collection + ", shard=" + shard + ": " + indexSize); + } } } catch (Exception e) { throw new IOException(e); @@ -1596,11 +1618,11 @@ public UpdateResponse simUpdate(UpdateRequest req) throws SolrException, Interru if (!"*:*".equals(q)) { throw new UnsupportedOperationException("Only '*:*' query is supported in deleteByQuery"); } + //log.debug("-- req delByQ " + collection); for (Slice s : coll.getSlices()) { Replica leader = s.getLeader(); if (leader == null) { - log.debug("-- no leader in " + s); - continue; + throw new IOException("-- no leader in " + s); } cloudManager.getMetricManager().registry(createRegistryName(collection, s.getName(), leader)).counter("UPDATE./update.requests").inc(); @@ -1611,6 +1633,16 @@ public UpdateResponse simUpdate(UpdateRequest req) throws SolrException, Interru } lock.lockInterruptibly(); try { + Number indexSize = (Number)ri.getVariable(Type.CORE_IDX.metricsAttribute); + if (indexSize != null) { + long delta = indexSize.longValue() < SimCloudManager.DEFAULT_IDX_SIZE_BYTES ? 0 : + indexSize.longValue() - SimCloudManager.DEFAULT_IDX_SIZE_BYTES; + s.getReplicas().forEach(r -> + freediskDeltaPerNode.computeIfAbsent(r.getNodeName(), node -> new AtomicLong(0)) + .addAndGet(delta)); + } else { + throw new RuntimeException("Missing index size in " + ri); + } simSetShardValue(collection, s.getName(), "SEARCHER.searcher.deletedDocs", new AtomicLong(numDocs.longValue()), false, false); simSetShardValue(collection, s.getName(), "SEARCHER.searcher.numDocs", new AtomicLong(0), false, false); simSetShardValue(collection, s.getName(), Type.CORE_IDX.metricsAttribute, @@ -1640,6 +1672,7 @@ public UpdateResponse simUpdate(UpdateRequest req) throws SolrException, Interru } } if (docCount > 0) { + //log.debug("-- req update " + collection + " / " + docCount); // this approach to updating counters and metrics drastically increases performance // of bulk updates, because simSetShardValue is relatively costly @@ -1686,13 +1719,16 @@ public UpdateResponse simUpdate(UpdateRequest req) throws SolrException, Interru Slice s = slices[i]; Replica leader = s.getLeader(); if (leader == null) { - log.debug("-- no leader in " + s); - continue; + throw new IOException("-- no leader in " + s); } metricUpdates.computeIfAbsent(s.getName(), sh -> new HashMap<>()) .computeIfAbsent(leader.getCoreName(), cn -> new AtomicLong()) .addAndGet(perSlice[i]); modified = true; + long perSliceCount = perSlice[i]; + s.getReplicas().forEach(r -> + freediskDeltaPerNode.computeIfAbsent(r.getNodeName(), node -> new AtomicLong(0)) + .addAndGet(-perSliceCount * DEFAULT_DOC_SIZE_BYTES)); AtomicLong bufferedUpdates = (AtomicLong)sliceProperties.get(collection).get(s.getName()).get(BUFFERED_UPDATES); if (bufferedUpdates != null) { bufferedUpdates.addAndGet(perSlice[i]); @@ -1711,13 +1747,15 @@ public UpdateResponse simUpdate(UpdateRequest req) throws SolrException, Interru Slice s = coll.getRouter().getTargetSlice(id, doc, null, null, coll); Replica leader = s.getLeader(); if (leader == null) { - log.debug("-- no leader in " + s); - continue; + throw new IOException("-- no leader in " + s); } metricUpdates.computeIfAbsent(s.getName(), sh -> new HashMap<>()) .computeIfAbsent(leader.getCoreName(), cn -> new AtomicLong()) .incrementAndGet(); modified = true; + s.getReplicas().forEach(r -> + freediskDeltaPerNode.computeIfAbsent(r.getNodeName(), node -> new AtomicLong()) + .addAndGet(-DEFAULT_DOC_SIZE_BYTES)); AtomicLong bufferedUpdates = (AtomicLong)sliceProperties.get(collection).get(s.getName()).get(BUFFERED_UPDATES); if (bufferedUpdates != null) { bufferedUpdates.incrementAndGet(); @@ -1755,6 +1793,32 @@ public UpdateResponse simUpdate(UpdateRequest req) throws SolrException, Interru lock.unlock(); } } + if (!freediskDeltaPerNode.isEmpty()) { + SimNodeStateProvider nodeStateProvider = cloudManager.getSimNodeStateProvider(); + freediskDeltaPerNode.forEach((node, delta) -> { + if (delta.get() == 0) { + return; + } + try { + // this method does its own locking to prevent races + nodeStateProvider.simUpdateNodeValue(node, Type.FREEDISK.tagName, val -> { + if (val == null) { + throw new RuntimeException("no freedisk for node " + node); + } + double freedisk = ((Number) val).doubleValue(); + double deltaGB = (Double) Type.FREEDISK.convertVal(delta.get()); + freedisk += deltaGB; + if (freedisk < 0) { + log.warn("-- freedisk=" + freedisk + " - ran out of disk space on node " + node); + freedisk = 0; + } + return freedisk; + }); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } SolrParams params = req.getParams(); if (params != null && (params.getBool(UpdateParams.OPTIMIZE, false) || params.getBool(UpdateParams.EXPUNGE_DELETES, false))) { lock.lockInterruptibly(); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java index 9a5656e35419..e1df6fd21586 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java @@ -28,6 +28,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -80,6 +81,23 @@ public Object simGetNodeValue(String node, String key) { return values.get(key); } + /** + * Atomically update a node value. + * @param node node id + * @param key property name + * @param updater updater function + * @return previous property value or null if property or node didn't exist. + */ + public Object simUpdateNodeValue(String node, String key, Function updater) throws InterruptedException { + lock.lockInterruptibly(); + try { + Map values = nodeValues.computeIfAbsent(node, n -> new ConcurrentHashMap<>()); + return values.put(key, updater.apply(values.get(key))); + } finally { + lock.unlock(); + } + } + /** * Set node values. * NOTE: if values contain 'nodeRole' key then /roles.json is updated. diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java index acfaa0f17bb8..1c5d606e9b98 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java @@ -16,6 +16,9 @@ */ package org.apache.solr.cloud.autoscaling.sim; +import java.lang.invoke.MethodHandles; +import java.net.MalformedURLException; +import java.net.URL; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -25,11 +28,13 @@ import java.util.Map; import java.util.Set; import java.util.TreeMap; +import java.util.TreeSet; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import org.apache.solr.client.solrj.cloud.SolrCloudManager; import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig; +import org.apache.solr.client.solrj.cloud.autoscaling.Cell; import org.apache.solr.client.solrj.cloud.autoscaling.Policy; import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo; import org.apache.solr.client.solrj.cloud.autoscaling.Row; @@ -41,11 +46,17 @@ import org.apache.solr.common.params.CollectionAdminParams; import org.apache.solr.common.params.CoreAdminParams; import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.util.Utils; +import org.apache.solr.util.RedactionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Various utility methods useful for autoscaling simulations and snapshots. */ public class SimUtils { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + public static final Set COMMON_REPLICA_TAGS = new HashSet<>(Arrays.asList( Variable.Type.CORE_IDX.metricsAttribute, @@ -231,10 +242,13 @@ public static Map calculateStats(SolrCloudManager cloudManager, for (Row row : rows) { Map nodeStat = nodeStats.computeIfAbsent(row.node, n -> new LinkedHashMap<>()); nodeStat.put("isLive", row.isLive()); - nodeStat.put("freedisk", row.getVal("freedisk", 0)); - nodeStat.put("totaldisk", row.getVal("totaldisk", 0)); + for (Cell cell : row.getCells()) { + nodeStat.put(cell.getName(), cell.getValue()); + } +// nodeStat.put("freedisk", row.getVal("freedisk", 0)); +// nodeStat.put("totaldisk", row.getVal("totaldisk", 0)); int cores = ((Number)row.getVal("cores", 0)).intValue(); - nodeStat.put("cores", cores); +// nodeStat.put("cores", cores); coreStats.computeIfAbsent(cores, num -> new AtomicInteger()).incrementAndGet(); Map>> collReplicas = new TreeMap<>(); // check consistency @@ -351,4 +365,32 @@ public static ModifiableSolrParams v2AdminRequestToV1Params(V2Request req) { params.add(CoreAdminParams.ACTION, a); return params; } + + /** + * Prepare collection and node / host names for redaction. + * @param clusterState cluster state + */ + public static RedactionUtils.RedactionContext getRedactionContext(ClusterState clusterState) { + RedactionUtils.RedactionContext ctx = new RedactionUtils.RedactionContext(); + TreeSet names = new TreeSet<>(clusterState.getLiveNodes()); + for (String nodeName : names) { + String urlString = Utils.getBaseUrlForNodeName(nodeName, "http"); + try { + URL u = new URL(urlString); + // protocol format + String hostPort = u.getHost() + ":" + u.getPort(); + ctx.addName(u.getHost() + ":" + u.getPort(), RedactionUtils.NODE_REDACTION_PREFIX); + // node name format + ctx.addEquivalentName(hostPort, u.getHost() + "_" + u.getPort() + "_", RedactionUtils.NODE_REDACTION_PREFIX); + } catch (MalformedURLException e) { + log.warn("Invalid URL for node name " + nodeName + ", replacing including protocol and path", e); + ctx.addName(urlString, RedactionUtils.NODE_REDACTION_PREFIX); + ctx.addEquivalentName(urlString, Utils.getBaseUrlForNodeName(nodeName, "https"), RedactionUtils.NODE_REDACTION_PREFIX); + } + } + names.clear(); + names.addAll(clusterState.getCollectionStates().keySet()); + names.forEach(n -> ctx.addName(n, RedactionUtils.COLL_REDACTION_PREFIX)); + return ctx; + } } diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java index a0a20fda65b1..c821b57c744d 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java @@ -46,11 +46,13 @@ import org.apache.solr.client.solrj.cloud.autoscaling.Suggester; import org.apache.solr.client.solrj.impl.ClusterStateProvider; import org.apache.solr.client.solrj.request.V2Request; +import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.params.CollectionAdminParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.ObjectCache; import org.apache.solr.common.util.TimeSource; import org.apache.solr.common.util.Utils; +import org.apache.solr.util.RedactionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -71,8 +73,9 @@ public class SnapshotCloudManager implements SolrCloudManager { public static final String DISTRIB_STATE_KEY = "distribState"; public static final String AUTOSCALING_STATE_KEY = "autoscalingState"; public static final String STATISTICS_STATE_KEY = "statistics"; + public static final String AUTOSCALING_JSON_KEY = "autoscaling"; - private static final List REQUIRED_KEYS = Arrays.asList( + public static final List REQUIRED_KEYS = Arrays.asList( MANAGER_STATE_KEY, CLUSTER_STATE_KEY, NODE_STATE_KEY, @@ -93,16 +96,25 @@ public SnapshotCloudManager(Map snapshot) throws Exception { (Map)snapshot.getOrDefault(MANAGER_STATE_KEY, Collections.emptyMap()), (Map)snapshot.getOrDefault(CLUSTER_STATE_KEY, Collections.emptyMap()), (Map)snapshot.getOrDefault(NODE_STATE_KEY, Collections.emptyMap()), - (Map)snapshot.getOrDefault(DISTRIB_STATE_KEY, Collections.emptyMap()) + (Map)snapshot.getOrDefault(DISTRIB_STATE_KEY, Collections.emptyMap()), + (Map)snapshot.getOrDefault(AUTOSCALING_JSON_KEY, Collections.emptyMap()) ); } - public void saveSnapshot(File targetDir, boolean withAutoscaling) throws Exception { - Map snapshot = getSnapshot(withAutoscaling); + public void saveSnapshot(File targetDir, boolean withAutoscaling, boolean redact) throws Exception { + Map snapshot = getSnapshot(withAutoscaling, redact); + ClusterState clusterState = getClusterStateProvider().getClusterState(); + RedactionUtils.RedactionContext ctx = SimUtils.getRedactionContext(clusterState); targetDir.mkdirs(); for (Map.Entry e : snapshot.entrySet()) { FileOutputStream out = new FileOutputStream(new File(targetDir, e.getKey() + ".json")); - IOUtils.write(Utils.toJSON(e.getValue()), out); + if (redact) { + String data = Utils.toJSONString(e.getValue()); + data = RedactionUtils.redactNames(ctx.getRedactions(), data); + IOUtils.write(data.getBytes("UTF-8"), out); + } else { + IOUtils.write(Utils.toJSON(e.getValue()), out); + } out.flush(); out.close(); } @@ -116,15 +128,19 @@ public static SnapshotCloudManager readSnapshot(File sourceDir) throws Exception throw new Exception("Source path is not a directory: " + sourceDir); } Map snapshot = new HashMap<>(); + List allKeys = new ArrayList<>(REQUIRED_KEYS); + allKeys.add(AUTOSCALING_JSON_KEY); int validData = 0; - for (String key : REQUIRED_KEYS) { + for (String key : allKeys) { File src = new File(sourceDir, key + ".json"); if (src.exists()) { InputStream is = new FileInputStream(src); Map data = (Map)Utils.fromJSON(is); is.close(); snapshot.put(key, data); - validData++; + if (REQUIRED_KEYS.contains(key)) { + validData++; + } } } if (validData < REQUIRED_KEYS.size()) { @@ -134,7 +150,7 @@ public static SnapshotCloudManager readSnapshot(File sourceDir) throws Exception } private void init(Map managerState, Map clusterState, Map nodeState, - Map distribState) throws Exception { + Map distribState, Map autoscalingJson) throws Exception { Objects.requireNonNull(managerState); Objects.requireNonNull(clusterState); Objects.requireNonNull(nodeState); @@ -142,20 +158,24 @@ private void init(Map managerState, Map clusterS this.timeSource = TimeSource.get((String)managerState.getOrDefault("timeSource", "simTime:50")); this.clusterStateProvider = new SnapshotClusterStateProvider(clusterState); this.nodeStateProvider = new SnapshotNodeStateProvider(nodeState); - this.distribStateManager = new SnapshotDistribStateManager(distribState); + if (autoscalingJson == null || autoscalingJson.isEmpty()) { + this.distribStateManager = new SnapshotDistribStateManager(distribState); + } else { + this.distribStateManager = new SnapshotDistribStateManager(distribState, new AutoScalingConfig(autoscalingJson)); + } SimUtils.checkConsistency(this, null); } - public Map getSnapshot(boolean withAutoscaling) throws Exception { + public Map getSnapshot(boolean withAutoscaling, boolean redact) throws Exception { Map snapshot = new LinkedHashMap<>(4); Map managerState = new HashMap<>(); managerState.put("timeSource", timeSource.toString()); snapshot.put(MANAGER_STATE_KEY, managerState); - + RedactionUtils.RedactionContext ctx = redact ? SimUtils.getRedactionContext(clusterStateProvider.getClusterState()) : null; snapshot.put(CLUSTER_STATE_KEY, clusterStateProvider.getSnapshot()); snapshot.put(NODE_STATE_KEY, nodeStateProvider.getSnapshot()); - snapshot.put(DISTRIB_STATE_KEY, distribStateManager.getSnapshot()); + snapshot.put(DISTRIB_STATE_KEY, distribStateManager.getSnapshot(ctx)); if (withAutoscaling) { AutoScalingConfig config = distribStateManager.getAutoScalingConfig(); Policy.Session session = config.getPolicy().createSession(this); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java index 93a413dd4d2b..bb15abf4c1e9 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java @@ -18,11 +18,15 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; +import java.nio.charset.Charset; import java.util.HashMap; +import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; +import java.util.Set; +import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.solr.client.solrj.cloud.DistribStateManager; @@ -35,6 +39,7 @@ import org.apache.solr.common.params.AutoScalingParams; import org.apache.solr.common.util.Base64; import org.apache.solr.common.util.Utils; +import org.apache.solr.util.RedactionUtils; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.Op; @@ -73,6 +78,14 @@ public SnapshotDistribStateManager(DistribStateManager other, AutoScalingConfig * @param snapshot previous snapshot created using this class. */ public SnapshotDistribStateManager(Map snapshot) { + this(snapshot, null); + } + /** + * Populate this instance from a previously generated snapshot. + * @param snapshot previous snapshot created using this class. + * @param config optional config to override the one from snapshot, may be null + */ + public SnapshotDistribStateManager(Map snapshot, AutoScalingConfig config) { snapshot.forEach((path, value) -> { Map map = (Map)value; Number version = (Number)map.getOrDefault("version", 0); @@ -85,16 +98,35 @@ public SnapshotDistribStateManager(Map snapshot) { } dataMap.put(path, new VersionedData(version.intValue(), bytes, mode, owner)); }); + if (config != null) { // overwrite existing + VersionedData vd = new VersionedData(config.getZkVersion(), Utils.toJSON(config), CreateMode.PERSISTENT, "0"); + dataMap.put(ZkStateReader.SOLR_AUTOSCALING_CONF_PATH, vd); + } log.debug("- loaded snapshot of {} resources", dataMap.size()); } + // content of these nodes is a UTF-8 String and it needs to be redacted + private static final Set REDACTED = new HashSet<>(); + static { + REDACTED.add(Pattern.compile("/aliases\\.json")); + REDACTED.add(Pattern.compile("/autoscaling\\.json")); + REDACTED.add(Pattern.compile("/clusterstate\\.json")); + REDACTED.add(Pattern.compile("/collections/.*?/state\\.json")); + REDACTED.add(Pattern.compile("/collections/.*?/leaders/shard.*?/leader")); + REDACTED.add(Pattern.compile("/overseer_elect/leader")); + } /** * Create a snapshot of all content in this instance. */ - public Map getSnapshot() { + public Map getSnapshot(RedactionUtils.RedactionContext ctx) { Map snapshot = new LinkedHashMap<>(); dataMap.forEach((path, vd) -> { Map data = new HashMap<>(); + if (vd.getData() != null && ctx != null && REDACTED.stream().anyMatch(p -> p.matcher(path).matches())) { + String str = new String(vd.getData(), Charset.forName("UTF-8")); + str = RedactionUtils.redactNames(ctx.getRedactions(), str); + vd = new VersionedData(vd.getVersion(), str.getBytes(Charset.forName("UTF-8")), vd.getMode(), vd.getOwner()); + } vd.toMap(data); snapshot.put(path, data); }); diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java index 8d22dbb4aabb..5a49635ab658 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java @@ -158,12 +158,26 @@ public Map getSnapshot() { @Override public Map getNodeValues(String node, Collection tags) { - return nodeValues.getOrDefault(node, Collections.emptyMap()); + return new LinkedHashMap<>(nodeValues.getOrDefault(node, Collections.emptyMap())); } @Override public Map>> getReplicaInfo(String node, Collection keys) { - return replicaInfos.getOrDefault(node, Collections.emptyMap()); + Map>> result = new LinkedHashMap<>(); + Map>> infos = replicaInfos.getOrDefault(node, Collections.emptyMap()); + // deep copy + infos.forEach((coll, shards) -> { + shards.forEach((shard, replicas) -> { + replicas.forEach(ri -> { + List myReplicas = result + .computeIfAbsent(coll, c -> new LinkedHashMap<>()) + .computeIfAbsent(shard, s -> new ArrayList<>()); + ReplicaInfo myReplica = (ReplicaInfo)ri.clone(); + myReplicas.add(myReplica); + }); + }); + }); + return result; } public ReplicaInfo getReplicaInfo(String collection, String coreNode) { @@ -171,7 +185,7 @@ public ReplicaInfo getReplicaInfo(String collection, String coreNode) { for (List perShard : perNode.getOrDefault(collection, Collections.emptyMap()).values()) { for (ReplicaInfo ri : perShard) { if (ri.getName().equals(coreNode)) { - return ri; + return (ReplicaInfo)ri.clone(); } } } diff --git a/solr/core/src/java/org/apache/solr/core/BlobRepository.java b/solr/core/src/java/org/apache/solr/core/BlobRepository.java index ea2f6d791266..24bb88e08070 100644 --- a/solr/core/src/java/org/apache/solr/core/BlobRepository.java +++ b/solr/core/src/java/org/apache/solr/core/BlobRepository.java @@ -16,20 +16,17 @@ */ package org.apache.solr.core; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; import java.io.InputStream; import java.lang.invoke.MethodHandles; +import java.math.BigInteger; import java.nio.ByteBuffer; -import java.nio.file.Path; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Random; import java.util.Set; @@ -37,14 +34,10 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Pattern; -import org.apache.commons.codec.digest.DigestUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; -import org.apache.lucene.util.IOUtils; -import org.apache.solr.api.Api; -import org.apache.solr.api.V2HttpCall; import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; @@ -52,32 +45,22 @@ import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.CollectionAdminParams; -import org.apache.solr.common.params.CommonParams; -import org.apache.solr.common.params.ModifiableSolrParams; -import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.StrUtils; import org.apache.solr.common.util.Utils; -import org.apache.solr.handler.RequestHandlerBase; -import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.response.SolrQueryResponse; -import org.apache.solr.security.AuthorizationContext; -import org.apache.solr.security.PermissionNameProvider; import org.apache.solr.util.SimplePostTool; import org.apache.zookeeper.server.ByteBufferInputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.SolrException.ErrorCode.BAD_REQUEST; import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR; import static org.apache.solr.common.SolrException.ErrorCode.SERVICE_UNAVAILABLE; import static org.apache.solr.common.cloud.ZkStateReader.BASE_URL_PROP; -import static org.apache.solr.handler.ReplicationHandler.FILE_STREAM; /** * The purpose of this class is to store the Jars loaded in memory and to keep only one copy of the Jar in a single node. */ public class BlobRepository { - private static final long MAX_JAR_SIZE = Long.parseLong(System.getProperty("runtime.lib.size", String.valueOf(5 * 1024 * 1024))); + private static final long MAX_JAR_SIZE = Long.parseLong(System.getProperty("runtme.lib.size", String.valueOf(5 * 1024 * 1024))); private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); static final Random RANDOM; static final Pattern BLOB_KEY_PATTERN_CHECKER = Pattern.compile(".*/\\d+"); @@ -105,14 +88,6 @@ public BlobRepository(CoreContainer coreContainer) { this.coreContainer = coreContainer; } - public Collection getFiles() { - return Arrays.asList(getBlobsPath().toFile().list()); - } - - public Path getBlobsPath() { - return SolrResourceLoader.getBlobsDirPath(this.coreContainer.getResourceLoader().getInstancePath()); - } - // I wanted to {@link SolrCore#loadDecodeAndCacheBlob(String, Decoder)} below but precommit complains /** @@ -141,12 +116,12 @@ BlobContentRef getBlobIncRef(String key, Decoder decoder) { return getBlobIncRef(key.concat(decoder.getName()), () -> addBlob(key, decoder)); } - BlobContentRef getBlobIncRef(String key, Decoder decoder, String url, String sha256) { + BlobContentRef getBlobIncRef(String key, Decoder decoder, String url, String sha512) { StringBuffer keyBuilder = new StringBuffer(key); if (decoder != null) keyBuilder.append(decoder.getName()); - keyBuilder.append("/").append(sha256); + keyBuilder.append("/").append(sha512); - return getBlobIncRef(keyBuilder.toString(), () -> new BlobContent<>(key, fetchBlobAndVerify(key, url, sha256), decoder)); + return getBlobIncRef(keyBuilder.toString(), () -> new BlobContent<>(key, fetchBlobAndVerify(key, url, sha512), decoder)); } // do the actual work returning the appropriate type... @@ -191,80 +166,34 @@ private BlobContent addBlob(String key, Decoder decoder) { return aBlob; } - static String INVALID_JAR_MSG = "Invalid jar from {0} , expected sha256 hash : {1} , actual : {2}"; - - private ByteBuffer fetchBlobAndVerify(String key, String url, String sha256) throws IOException { - ByteBuffer byteBuffer = null; - if (sha256 != null) { - byteBuffer = getFromLocalFs(sha256); - } - if (byteBuffer == null) byteBuffer = getAndValidate(key, url, sha256); - return byteBuffer; - } + static String INVALID_JAR_MSG = "Invalid jar from {0} , expected sha512 hash : {1} , actual : {2}"; - private ByteBuffer getAndValidate(String key, String url, String sha256) throws IOException { + private ByteBuffer fetchBlobAndVerify(String key, String url, String sha512) { ByteBuffer byteBuffer = fetchFromUrl(key, url); - String computedDigest = sha256Digest(byteBuffer); - if (!computedDigest.equals(sha256)) { - throw new SolrException(SERVER_ERROR, StrUtils.formatString(INVALID_JAR_MSG, url, sha256, computedDigest)); - } - File file = new File(getBlobsPath().toFile(), sha256); - try (FileOutputStream fos = new FileOutputStream(file)) { - fos.write(byteBuffer.array(), byteBuffer.arrayOffset(), byteBuffer.limit()); - IOUtils.fsync(file.toPath(), false); - } - return byteBuffer; - } + String computedDigest = sha512Digest(byteBuffer); + if (!computedDigest.equals(sha512)) { + throw new SolrException(SERVER_ERROR, StrUtils.formatString(INVALID_JAR_MSG, url, sha512, computedDigest)); - public String putBlob(InputStream is) throws SolrException { - byte[] b = new byte[(int) MAX_JAR_SIZE + 1]; - String sha256 = null; - try { - int sz = is.read(b); - - if (sz > MAX_JAR_SIZE) - throw new SolrException(BAD_REQUEST, "size is more than permitted , use system property runtime.lib.size to change it"); - sha256 = sha256Digest(ByteBuffer.wrap(b, 0, sz)); - File file = new File(getBlobsPath().toFile(), sha256); - try (FileOutputStream fos = new FileOutputStream(file)) { - fos.write(b, 0, sz); - } - IOUtils.fsync(file.toPath(), false); - } catch (IOException e) { - throw new SolrException(BAD_REQUEST, e); - } - return sha256; - - } - - private ByteBuffer getFromLocalFs(String sha256) throws IOException { - Path p = getBlobsPath(); - File f = new File(p.toFile(), sha256); - if (!f.exists()) return null; - byte[] b = new byte[(int) f.length()]; - try (FileInputStream fis = new FileInputStream(f)) { - fis.read(b); - ByteBuffer byteBuffer = ByteBuffer.wrap(b); - if (sha256.equals(sha256Digest(byteBuffer))) { - return byteBuffer; - } else { - return null; - - } } + return byteBuffer; } - public static String sha256Digest(ByteBuffer buf) { + public static String sha512Digest(ByteBuffer byteBuffer) { + MessageDigest digest = null; try { - return DigestUtils.sha256Hex(new ByteBufferInputStream(ByteBuffer.wrap( buf.array(), buf.arrayOffset(), buf.limit()))); - } catch (IOException e) { - throw new RuntimeException("Unable to compute sha256", e); + digest = MessageDigest.getInstance("SHA-512"); + } catch (NoSuchAlgorithmException e) { + //unlikely + throw new SolrException(SERVER_ERROR, e); } + digest.update(byteBuffer); + return String.format( + Locale.ROOT, + "%0128x", + new BigInteger(1, digest.digest())); } - - /** * Package local for unit tests only please do not use elsewhere */ @@ -285,14 +214,13 @@ ByteBuffer fetchFromUrl(String key, String url) { entity = response.getEntity(); int statusCode = response.getStatusLine().getStatusCode(); if (statusCode != 200) { - throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "no such resource available: " + key + ", url : " + url); + throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "no such blob or version available: " + key); } try (InputStream is = entity.getContent()) { b = SimplePostTool.inputStreamToByteArray(is, MAX_JAR_SIZE); } } catch (Exception e) { - log.error("Error loading resource " + url, e); if (e instanceof SolrException) { throw (SolrException) e; } else { @@ -353,68 +281,6 @@ public void decrementBlobRefCount(BlobContentRef ref) { } } - BlobRead blobRead = new BlobRead(); - - - class BlobRead extends RequestHandlerBase implements PermissionNameProvider { - - - @Override - public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) { - - } - - @Override - public String getDescription() { - return "List fetch blobs"; - } - - @Override - public Name getPermissionName(AuthorizationContext request) { - return null; - } - - @Override - public Collection getApis() { - return Collections.singleton(new Api(Utils.getSpec("node.blob.GET")) { - @Override - public void call(SolrQueryRequest req, SolrQueryResponse rsp) { - String sha256 = ((V2HttpCall) req.getHttpSolrCall()).getUrlParts().get("sha256"); - if (sha256 == null) { - rsp.add("blob", getFiles()); - } else { - try { - ByteBuffer buf = getFromLocalFs(sha256); - if(buf == null){ - throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "No such blob"); - } else { - ModifiableSolrParams solrParams = new ModifiableSolrParams(); - solrParams.add(CommonParams.WT, FILE_STREAM); - req.setParams( SolrParams.wrapDefaults(solrParams, req.getParams())); - rsp.add(FILE_STREAM, (SolrCore.RawWriter) os -> os.write(buf.array(), buf.arrayOffset(), buf.limit())); - } - - } catch (IOException e) { - throw new SolrException(SERVER_ERROR,e); - } - } - - } - }); - } - - @Override - public Boolean registerV1() { - return Boolean.FALSE; - } - - @Override - public Boolean registerV2() { - return Boolean.TRUE; - } - } - - public static class BlobContent { public final String key; private final T content; // holds byte buffer or cached object, holding both is a waste of memory @@ -468,7 +334,7 @@ default String getName() { public static class BlobContentRef { public final BlobContent blob; - public BlobContentRef(BlobContent blob) { + private BlobContentRef(BlobContent blob) { this.blob = blob; } } diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java index 58b0a7d29fff..06f1fc8ef07b 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java +++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java @@ -82,7 +82,6 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Replica.State; import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.params.CollectionAdminParams; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.IOUtils; import org.apache.solr.common.util.SolrjNamedThreadFactory; @@ -112,6 +111,7 @@ import org.apache.solr.metrics.SolrCoreMetricManager; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.search.SolrFieldCacheBean; @@ -211,7 +211,9 @@ public CoreLoadFailure(CoreDescriptor cd, Exception loadFailure) { protected volatile SolrMetricManager metricManager; - protected volatile String metricTag = Integer.toHexString(hashCode()); + protected volatile String metricTag = SolrMetricProducer.getUniqueMetricTag(this, null); + + protected volatile SolrMetricsContext solrMetricsContext; protected MetricsHandler metricsHandler; @@ -223,8 +225,6 @@ public CoreLoadFailure(CoreDescriptor cd, Exception loadFailure) { protected SharedStoreManager sharedStoreManager; - private final PackageManager clusterPropertiesListener = new PackageManager(this); - // Bits for the state variable. public final static long LOAD_COMPLETE = 0x1L; @@ -608,6 +608,8 @@ public void load() { } metricManager = new SolrMetricManager(loader, cfg.getMetricsConfig()); + String registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.node); + solrMetricsContext = new SolrMetricsContext(metricManager, registryName, metricTag); coreContainerWorkExecutor = MetricUtils.instrumentedExecutorService( coreContainerWorkExecutor, null, @@ -621,7 +623,7 @@ public void load() { } updateShardHandler = new UpdateShardHandler(cfg.getUpdateShardHandlerConfig()); - updateShardHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, "updateShardHandler"); + updateShardHandler.initializeMetrics(solrMetricsContext, "updateShardHandler"); solrCores.load(loader); @@ -632,10 +634,11 @@ public void load() { zkSys.initZooKeeper(this, solrHome, cfg.getCloudConfig()); if (isZooKeeperAware()) { - getZkController().getZkStateReader().registerClusterPropertiesListener(clusterPropertiesListener); pkiAuthenticationPlugin = new PKIAuthenticationPlugin(this, zkSys.getZkController().getNodeName(), (PublicKeyHandler) containerHandlers.get(PublicKeyHandler.PATH)); - pkiAuthenticationPlugin.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, "/authentication/pki"); + // use deprecated API for back-compat, remove in 9.0 + pkiAuthenticationPlugin.initializeMetrics( + solrMetricsContext.metricManager, solrMetricsContext.registry, solrMetricsContext.tag, "/authentication/pki"); TracerConfigurator.loadTracer(loader, cfg.getTracerConfiguratorPluginInfo(), getZkController().getZkStateReader()); } @@ -645,8 +648,6 @@ public void load() { reloadSecurityProperties(); this.backupRepoFactory = new BackupRepositoryFactory(cfg.getBackupRepositoryPlugins()); - containerHandlers.put("/ext", clusterPropertiesListener.extHandler); - containerHandlers.put("/blob-get", blobRepository.blobRead); createHandler(ZK_PATH, ZookeeperInfoHandler.class.getName(), ZookeeperInfoHandler.class); createHandler(ZK_STATUS_PATH, ZookeeperStatusHandler.class.getName(), ZookeeperStatusHandler.class); collectionsHandler = createHandler(COLLECTIONS_HANDLER_PATH, cfg.getCollectionsHandlerClass(), CollectionsHandler.class); @@ -667,7 +668,7 @@ public void load() { metricsCollectorHandler.init(null); containerHandlers.put(AUTHZ_PATH, securityConfHandler); - securityConfHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, AUTHZ_PATH); + securityConfHandler.initializeMetrics(solrMetricsContext, AUTHZ_PATH); containerHandlers.put(AUTHC_PATH, securityConfHandler); @@ -682,22 +683,20 @@ public void load() { // initialize gauges for reporting the number of cores and disk total/free - String registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.node); - String metricTag = Integer.toHexString(hashCode()); - metricManager.registerGauge(null, registryName, () -> solrCores.getCores().size(), - metricTag, true, "loaded", SolrInfoBean.Category.CONTAINER.toString(), "cores"); - metricManager.registerGauge(null, registryName, () -> solrCores.getLoadedCoreNames().size() - solrCores.getCores().size(), - metricTag, true, "lazy", SolrInfoBean.Category.CONTAINER.toString(), "cores"); - metricManager.registerGauge(null, registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(), - metricTag, true, "unloaded", SolrInfoBean.Category.CONTAINER.toString(), "cores"); + solrMetricsContext.gauge(null, () -> solrCores.getCores().size(), + true, "loaded", SolrInfoBean.Category.CONTAINER.toString(), "cores"); + solrMetricsContext.gauge(null, () -> solrCores.getLoadedCoreNames().size() - solrCores.getCores().size(), + true, "lazy", SolrInfoBean.Category.CONTAINER.toString(), "cores"); + solrMetricsContext.gauge(null, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(), + true, "unloaded", SolrInfoBean.Category.CONTAINER.toString(), "cores"); Path dataHome = cfg.getSolrDataHome() != null ? cfg.getSolrDataHome() : cfg.getCoreRootDirectory(); - metricManager.registerGauge(null, registryName, () -> dataHome.toFile().getTotalSpace(), - metricTag, true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs"); - metricManager.registerGauge(null, registryName, () -> dataHome.toFile().getUsableSpace(), - metricTag, true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs"); - metricManager.registerGauge(null, registryName, () -> dataHome.toAbsolutePath().toString(), - metricTag, true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs"); - metricManager.registerGauge(null, registryName, () -> { + solrMetricsContext.gauge(null, () -> dataHome.toFile().getTotalSpace(), + true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs"); + solrMetricsContext.gauge(null, () -> dataHome.toFile().getUsableSpace(), + true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs"); + solrMetricsContext.gauge(null, () -> dataHome.toAbsolutePath().toString(), + true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs"); + solrMetricsContext.gauge(null, () -> { try { return org.apache.lucene.util.IOUtils.spins(dataHome.toAbsolutePath()); } catch (IOException e) { @@ -705,14 +704,14 @@ public void load() { return true; } }, - metricTag, true, "spins", SolrInfoBean.Category.CONTAINER.toString(), "fs"); - metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(), - metricTag, true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot"); - metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(), - metricTag, true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot"); - metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toAbsolutePath().toString(), - metricTag, true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot"); - metricManager.registerGauge(null, registryName, () -> { + true, "spins", SolrInfoBean.Category.CONTAINER.toString(), "fs"); + solrMetricsContext.gauge(null, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(), + true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot"); + solrMetricsContext.gauge(null, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(), + true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot"); + solrMetricsContext.gauge(null, () -> cfg.getCoreRootDirectory().toAbsolutePath().toString(), + true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot"); + solrMetricsContext.gauge(null, () -> { try { return org.apache.lucene.util.IOUtils.spins(cfg.getCoreRootDirectory().toAbsolutePath()); } catch (IOException e) { @@ -720,15 +719,15 @@ public void load() { return true; } }, - metricTag, true, "spins", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot"); + true, "spins", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot"); // add version information - metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getSpecificationVersion(), - metricTag, true, "specification", SolrInfoBean.Category.CONTAINER.toString(), "version"); - metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getImplementationVersion(), - metricTag, true, "implementation", SolrInfoBean.Category.CONTAINER.toString(), "version"); + solrMetricsContext.gauge(null, () -> this.getClass().getPackage().getSpecificationVersion(), + true, "specification", SolrInfoBean.Category.CONTAINER.toString(), "version"); + solrMetricsContext.gauge(null, () -> this.getClass().getPackage().getImplementationVersion(), + true, "implementation", SolrInfoBean.Category.CONTAINER.toString(), "version"); SolrFieldCacheBean fieldCacheBean = new SolrFieldCacheBean(); - fieldCacheBean.initializeMetrics(metricManager, registryName, metricTag, null); + fieldCacheBean.initializeMetrics(solrMetricsContext, null); if (isZooKeeperAware()) { metricManager.loadClusterReporters(metricReporters, this); @@ -818,7 +817,7 @@ public void load() { // initialize this handler here when SolrCloudManager is ready autoScalingHandler = new AutoScalingHandler(getZkController().getSolrCloudManager(), loader); containerHandlers.put(AutoScalingHandler.HANDLER_PATH, autoScalingHandler); - autoScalingHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, AutoScalingHandler.HANDLER_PATH); + autoScalingHandler.initializeMetrics(solrMetricsContext, AutoScalingHandler.HANDLER_PATH); } // This is a bit redundant but these are two distinct concepts for all they're accomplished at the same time. status |= LOAD_COMPLETE | INITIAL_CORE_LOAD_COMPLETE; @@ -849,7 +848,7 @@ private void createMetricsHistoryHandler() { name = "localhost"; } cloudManager = null; - client = new EmbeddedSolrServer(this, CollectionAdminParams.SYSTEM_COLL) { + client = new EmbeddedSolrServer(this, null) { @Override public void close() throws IOException { // do nothing - we close the container ourselves @@ -866,7 +865,7 @@ public void close() throws IOException { metricsHistoryHandler = new MetricsHistoryHandler(name, metricsHandler, client, cloudManager, initArgs); containerHandlers.put(METRICS_HISTORY_PATH, metricsHistoryHandler); - metricsHistoryHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, METRICS_HISTORY_PATH); + metricsHistoryHandler.initializeMetrics(solrMetricsContext, METRICS_HISTORY_PATH); } public void securityNodeChanged() { @@ -1563,7 +1562,7 @@ public void reload(String name) { } catch (SolrCoreState.CoreIsClosedException e) { throw e; } catch (Exception e) { - coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, e)); + coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, (Exception) e)); throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to reload core [" + cd.getName() + "]", e); } finally { if (!success && newCore != null && newCore.getOpenCount() > 0) { @@ -1803,19 +1802,13 @@ protected T createHandler(String path, String handlerClass, Class clazz) containerHandlers.put(path, (SolrRequestHandler) handler); } if (handler instanceof SolrMetricProducer) { - ((SolrMetricProducer) handler).initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, path); + // use deprecated method for back-compat, remove in 9.0 + ((SolrMetricProducer) handler).initializeMetrics(solrMetricsContext.metricManager, + solrMetricsContext.registry, solrMetricsContext.tag, path); } return handler; } - public PluginBag getContainerHandlers() { - return containerHandlers; - } - - public PackageManager getPackageManager(){ - return clusterPropertiesListener; - } - public CoreAdminHandler getMultiCoreHandler() { return coreAdminHandler; } diff --git a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java index 8b2ba5d0189e..ac5923602b1d 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java +++ b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java @@ -203,9 +203,9 @@ public CoreDescriptor(String name, Path instanceDir, Map corePro coreProperties.putAll(defaultProperties); coreProperties.put(CORE_NAME, name); - for (String propname : coreProps.keySet()) { - - String propvalue = coreProps.get(propname); + for (Map.Entry entry : coreProps.entrySet()) { + String propname = entry.getKey(); + String propvalue = entry.getValue(); if (isUserDefinedProperty(propname)) originalExtraProperties.put(propname, propvalue); diff --git a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java index 464b03012253..942f429eafff 100644 --- a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java +++ b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java @@ -53,8 +53,8 @@ import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.IOUtils; import org.apache.solr.common.util.NamedList; -import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.store.blockcache.BlockCache; import org.apache.solr.store.blockcache.BlockDirectory; import org.apache.solr.store.blockcache.BlockDirectoryCache; @@ -141,6 +141,13 @@ public void close() throws IOException { } tmpFsCache.invalidateAll(); tmpFsCache.cleanUp(); + try { + SolrMetricProducer.super.close(); + MetricsHolder.metrics.close(); + LocalityHolder.reporter.close(); + } catch (Exception e) { + throw new IOException(e); + } } private final static class LocalityHolder { @@ -497,9 +504,9 @@ private void initKerberos() { } @Override - public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) { - MetricsHolder.metrics.initializeMetrics(manager, registry, tag, scope); - LocalityHolder.reporter.initializeMetrics(manager, registry, tag, scope); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + MetricsHolder.metrics.initializeMetrics(parentContext, scope); + LocalityHolder.reporter.initializeMetrics(parentContext, scope); } @Override diff --git a/solr/core/src/java/org/apache/solr/core/MemClassLoader.java b/solr/core/src/java/org/apache/solr/core/MemClassLoader.java index 56ff087a300a..d1a3a7cc4952 100644 --- a/solr/core/src/java/org/apache/solr/core/MemClassLoader.java +++ b/solr/core/src/java/org/apache/solr/core/MemClassLoader.java @@ -26,7 +26,6 @@ import java.security.ProtectionDomain; import java.security.cert.Certificate; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -44,28 +43,20 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private boolean allJarsLoaded = false; private final SolrResourceLoader parentLoader; - private List libs = new ArrayList<>(); + private List libs = new ArrayList<>(); private Map classCache = new HashMap<>(); private List errors = new ArrayList<>(); - public MemClassLoader(List libs, SolrResourceLoader resourceLoader) { + public MemClassLoader(List libs, SolrResourceLoader resourceLoader) { this.parentLoader = resourceLoader; this.libs = libs; } - public int getZnodeVersion(){ - int result = -1; - for (RuntimeLib lib : libs) { - if(lib.znodeVersion > result) result = lib.znodeVersion; - } - return result; - } - synchronized void loadRemoteJars() { if (allJarsLoaded) return; int count = 0; - for (RuntimeLib lib : libs) { + for (PluginBag.RuntimeLib lib : libs) { if (lib.getUrl() != null) { try { lib.loadJar(); @@ -79,13 +70,10 @@ synchronized void loadRemoteJars() { if (count == libs.size()) allJarsLoaded = true; } - public Collection getErrors(){ - return errors; - } public synchronized void loadJars() { if (allJarsLoaded) return; - for (RuntimeLib lib : libs) { + for (PluginBag.RuntimeLib lib : libs) { try { lib.loadJar(); lib.verify(); @@ -145,7 +133,7 @@ private ByteBuffer getByteBuffer(String name, AtomicReference jarName) t String path = name.replace('.', '/').concat(".class"); ByteBuffer buf = null; - for (RuntimeLib lib : libs) { + for (PluginBag.RuntimeLib lib : libs) { try { buf = lib.getFileContent(path); if (buf != null) { @@ -162,7 +150,7 @@ private ByteBuffer getByteBuffer(String name, AtomicReference jarName) t @Override public void close() throws Exception { - for (RuntimeLib lib : libs) { + for (PluginBag.RuntimeLib lib : libs) { try { lib.close(); } catch (Exception e) { @@ -188,7 +176,6 @@ public Class findClass(String cname, Class expectedType) { try { return findClass(cname).asSubclass(expectedType); } catch (Exception e) { - log.error("Error loading class from runtime libs ", e); if (e instanceof SolrException) { throw (SolrException) e; } else { diff --git a/solr/core/src/java/org/apache/solr/core/PackageManager.java b/solr/core/src/java/org/apache/solr/core/PackageManager.java deleted file mode 100644 index 7eb00a537352..000000000000 --- a/solr/core/src/java/org/apache/solr/core/PackageManager.java +++ /dev/null @@ -1,370 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.solr.core; - -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -import org.apache.lucene.analysis.util.ResourceLoader; -import org.apache.solr.api.Api; -import org.apache.solr.api.V2HttpCall; -import org.apache.solr.common.MapWriter; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.cloud.ClusterPropertiesListener; -import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.params.CoreAdminParams; -import org.apache.solr.common.util.StrUtils; -import org.apache.solr.common.util.Utils; -import org.apache.solr.handler.RequestHandlerBase; -import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.request.SolrRequestHandler; -import org.apache.solr.response.SolrQueryResponse; -import org.apache.solr.schema.FieldType; -import org.apache.solr.security.AuthorizationContext; -import org.apache.solr.security.PermissionNameProvider; -import org.apache.solr.util.plugin.PluginInfoInitialized; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.solr.common.params.CommonParams.NAME; -import static org.apache.solr.common.params.CommonParams.PACKAGE; -import static org.apache.solr.common.params.CommonParams.VERSION; -import static org.apache.solr.core.RuntimeLib.SHA256; - -public class PackageManager implements ClusterPropertiesListener { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private final CoreContainer coreContainer; - - private Map pkgs = new HashMap<>(); - - final ExtHandler extHandler; - private int myversion = -1; - - public int getZNodeVersion(String pkg) { - Package p = pkgs.get(pkg); - return p == null ? -1 : p.lib.getZnodeVersion(); - } - public RuntimeLib getLib(String name){ - Package p = pkgs.get(name); - return p == null? null: p.lib; - } - - static class Package implements MapWriter { - final RuntimeLib lib; - final MemClassLoader loader; - final String name; - - @Override - public void writeMap(EntryWriter ew) throws IOException { - lib.writeMap(ew); - } - - Package(RuntimeLib lib, MemClassLoader loader, int zkVersion, String name) { - this.lib = lib; - this.loader = loader; - this.name = name; - } - - public String getName() { - return name; - } - - - public boolean isModified(Map map) { - return (!Objects.equals(lib.getSha256(), (map).get(SHA256)) || - !Objects.equals(lib.getSig(), (map).get(SHA256))); - } - } - - PackageManager(CoreContainer coreContainer) { - this.coreContainer = coreContainer; - extHandler = new ExtHandler(this); - } - - - public T newInstance(String cName, Class expectedType, String pkg) { - try { - return coreContainer.getResourceLoader().newInstance(cName, expectedType, - null, new Class[]{CoreContainer.class}, new Object[]{coreContainer}); - } catch (SolrException e) { - Package p = pkgs.get(pkg); - - if (p != null) { - try { - Class klas = p.loader.findClass(cName, expectedType); - try { - return klas.getConstructor(CoreContainer.class).newInstance(coreContainer); - } catch (NoSuchMethodException ex) { - return klas.getConstructor().newInstance(); - } - } catch (Exception ex) { - if (!p.loader.getErrors().isEmpty()) { - //some libraries were no loaded due to some errors. May the class was there in those libraries - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "There were errors loading some libraries: " + StrUtils.join(p.loader.getErrors(), ','), ex); - } - //there were no errors in loading any libraries. The class was probably not suppoed to be there in those libraries - // so throw the original exception - throw e; - } - } else { - throw e; - } - } - } - - @Override - public boolean onChange(Map properties) { - log.info("clusterprops.json changed , version {}", coreContainer.getZkController().getZkStateReader().getClusterPropsVersion()); - int v = coreContainer.getZkController().getZkStateReader().getClusterPropsVersion(); - boolean modified = updatePackages(properties, v); - extHandler.updateReqHandlers(properties, modified); - for (SolrCore core : coreContainer.solrCores.getCores()) { - pkgs.forEach((s, pkg) -> core.packageUpdated(pkg.lib)); - } - myversion = v; - return false; - } - - - private boolean updatePackages(Map properties, int ver) { - Map m = (Map) properties.getOrDefault(PACKAGE, Collections.emptyMap()); - if (pkgs.isEmpty() && m.isEmpty()) return false; - boolean[] needsReload = new boolean[1]; - if (m.size() == pkgs.size()) { - m.forEach((k, v) -> { - if (v instanceof Map) { - Package pkg = pkgs.get(k); - if (pkg == null || pkg.isModified((Map) v)) { - needsReload[0] = true; - } - } - }); - } else { - needsReload[0] = true; - } - if (needsReload[0]) { - createNewClassLoaders(m, ver); - } - return needsReload[0]; - } - - public ResourceLoader getResourceLoader(String pkg) { - Package p = pkgs.get(pkg); - return p == null ? coreContainer.getResourceLoader() : p.loader; - } - - void createNewClassLoaders(Map m, int ver) { - boolean[] loadedAll = new boolean[1]; - loadedAll[0] = true; - Map newPkgs = new LinkedHashMap<>(); - m.forEach((k, v) -> { - if (v instanceof Map) { - Map map = new HashMap((Map) v); - map.put(CoreAdminParams.NAME, String.valueOf(k)); - String name = (String) k; - Package existing = pkgs.get(name); - if (existing != null && !existing.isModified(map)) { - //this package has not changed - newPkgs.put(name, existing); - } - - RuntimeLib lib = new RuntimeLib(coreContainer); - lib.znodeVersion = ver; - try { - lib.init(new PluginInfo(RuntimeLib.TYPE, map)); - if (lib.getUrl() == null) { - log.error("Unable to initialize runtimeLib : " + Utils.toJSONString(v)); - loadedAll[0] = false; - } - lib.loadJar(); - - newPkgs.put(name, new Package(lib, - new MemClassLoader(Collections.singletonList(lib), coreContainer.getResourceLoader()), - ver, name)); - } catch (Exception e) { - log.error("error loading a runtimeLib " + Utils.toJSONString(v), e); - loadedAll[0] = false; - - } - } - }); - - if (loadedAll[0]) { - log.info("Libraries changed. New memclassloader created with jars {}", - newPkgs.values().stream().map(it -> it.lib.getUrl()).collect(Collectors.toList())); - this.pkgs = newPkgs; - - } - } - - static class ExtHandler extends RequestHandlerBase implements PermissionNameProvider { - final PackageManager packageManager; - - private Map customHandlers = new HashMap<>(); - - ExtHandler(PackageManager packageManager) { - this.packageManager = packageManager; - } - - - @Override - public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) { - int v = req.getParams().getInt(ConfigOverlay.ZNODEVER, -1); - if (v >= 0) { - log.debug("expected version : {} , my version {}", v, packageManager.myversion); - ZkStateReader zkStateReader = packageManager.coreContainer.getZkController().getZkStateReader(); - try { - zkStateReader.forceRefreshClusterProps(v); - } catch (SolrException e) { - log.error("Error refreshing state ", e); - throw e; - } - } - rsp.add("metadata", (MapWriter) ew -> ew.putIfNotNull(VERSION, - packageManager.coreContainer.getZkController().zkStateReader.getClusterPropsVersion())); - rsp.add(RuntimeLib.TYPE, packageManager.pkgs.values()); - rsp.add(SolrRequestHandler.TYPE, customHandlers.values()); - - } - - @Override - public Collection getApis() { - return Collections.singleton(new Api(Utils.getSpec("node.ext")) { - @Override - public void call(SolrQueryRequest req, SolrQueryResponse rsp) { - String name = ((V2HttpCall) req.getHttpSolrCall()).getUrlParts().get("handlerName"); - if (name == null) { - handleRequestBody(req, rsp); - return; - } - Handler handler = customHandlers.get(name); - if (handler == null) { - String err = StrUtils.formatString(" No such handler: {0}, available handlers : {1}", name, customHandlers.keySet()); - log.error(err); - throw new SolrException(SolrException.ErrorCode.NOT_FOUND, err); - } - handler.handler.handleRequest(req, rsp); - } - }); - } - - private void updateReqHandlers(Map properties, boolean forceReload) { - Map m = (Map) properties.getOrDefault(SolrRequestHandler.TYPE, Collections.emptyMap()); - if (m.isEmpty() && customHandlers.isEmpty()) return; - boolean hasChanged = true; - if (customHandlers.size() == m.size() && customHandlers.keySet().containsAll(m.keySet())) hasChanged = false; - if (forceReload || hasChanged) { - log.debug("RequestHandlers being reloaded : {}", m.keySet()); - Map newCustomHandlers = new HashMap<>(); - m.forEach((k, v) -> { - if (v instanceof Map) { - Map metaData = (Map) v; - Handler existing = customHandlers.get(k); - String name = (String) k; - if (existing == null || existing.shouldReload(metaData, packageManager.pkgs)) { - String klas = (String) metaData.get(FieldType.CLASS_NAME); - if (klas != null) { - String pkg = (String) metaData.get(PACKAGE); - SolrRequestHandler inst = packageManager.newInstance(klas, SolrRequestHandler.class, pkg); - if (inst instanceof PluginInfoInitialized) { - ((PluginInfoInitialized) inst).init(new PluginInfo(SolrRequestHandler.TYPE, metaData)); - } - Package p = packageManager.pkgs.get(pkg); - newCustomHandlers.put(name, new Handler(inst, pkg, p == null ? -1 : p.lib.getZnodeVersion(), metaData, name)); - } else { - log.error("Invalid requestHandler {}", Utils.toJSONString(v)); - } - - } else { - newCustomHandlers.put(name, existing); - } - - } else { - log.error("Invalid data for requestHandler : {} , {}", k, v); - } - }); - - log.debug("Registering request handlers {} ", newCustomHandlers.keySet()); - Map old = customHandlers; - customHandlers = newCustomHandlers; - old.forEach((s, h) -> PluginBag.closeQuietly(h)); - } - } - - @Override - public String getDescription() { - return "Custom Handlers"; - } - - - @Override - public Boolean registerV1() { - return Boolean.FALSE; - } - - @Override - public Boolean registerV2() { - return Boolean.TRUE; - } - - @Override - public Name getPermissionName(AuthorizationContext request) { - if (request.getResource().endsWith("/node/ext")) return Name.COLL_READ_PERM; - return Name.CUSTOM_PERM; - } - - static class Handler implements MapWriter { - final SolrRequestHandler handler; - final String pkg; - final int zkversion; - final Map meta; - final String name; - - @Override - public void writeMap(EntryWriter ew) throws IOException { - ew.put(NAME, name); - ew.put(ConfigOverlay.ZNODEVER, zkversion); - meta.forEach(ew.getBiConsumer()); - } - - Handler(SolrRequestHandler handler, String pkg, int version, Map meta, String name) { - this.handler = handler; - this.pkg = pkg; - this.zkversion = version; - this.meta = Utils.getDeepCopy(meta, 3); - this.name = name; - } - - public boolean shouldReload(Map metaData, Map pkgs) { - Package p = pkgs.get(pkg); - //the metadata is same and the package has not changed since we last loaded - return !meta.equals(metaData) || p == null || p.lib.getZnodeVersion() > zkversion; - } - } - } - -} diff --git a/solr/core/src/java/org/apache/solr/core/PluginBag.java b/solr/core/src/java/org/apache/solr/core/PluginBag.java index dd15a1a26c29..fa2c3e30b7ba 100644 --- a/solr/core/src/java/org/apache/solr/core/PluginBag.java +++ b/solr/core/src/java/org/apache/solr/core/PluginBag.java @@ -16,8 +16,11 @@ */ package org.apache.solr.core; +import java.io.ByteArrayInputStream; import java.io.IOException; import java.lang.invoke.MethodHandles; +import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -27,21 +30,24 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; import org.apache.lucene.analysis.util.ResourceLoader; import org.apache.lucene.analysis.util.ResourceLoaderAware; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; import org.apache.solr.api.ApiSupport; -import org.apache.solr.common.MapWriter; +import org.apache.solr.cloud.CloudUtil; import org.apache.solr.common.SolrException; -import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.StrUtils; import org.apache.solr.handler.RequestHandlerBase; import org.apache.solr.handler.component.SearchComponent; import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.update.processor.UpdateRequestProcessorChain; import org.apache.solr.update.processor.UpdateRequestProcessorFactory; +import org.apache.solr.util.CryptoKeys; +import org.apache.solr.util.SimplePostTool; import org.apache.solr.util.plugin.NamedListInitializedPlugin; import org.apache.solr.util.plugin.PluginInfoInitialized; import org.apache.solr.util.plugin.SolrCoreAware; @@ -50,6 +56,7 @@ import static java.util.Collections.singletonMap; import static org.apache.solr.api.ApiBag.HANDLER_NAME; +import static org.apache.solr.common.params.CommonParams.NAME; /** * This manages the lifecycle of a set of plugin of the same type . @@ -117,36 +124,24 @@ public Set checkContains(Collection names) { return result; } - private static T createInitInstance(PluginInfo pluginInfo, SolrConfig.SolrPluginInfo pluginMeta, - SolrCore core, ResourceLoader resourceLoader, - boolean isRuntimeLib) { - T localInst = null; - try { - localInst = (T) SolrCore.createInstance(pluginInfo.className, pluginMeta.clazz, pluginMeta.getCleanTag(), core, resourceLoader); - } catch (SolrException e) { - if (isRuntimeLib && !(resourceLoader instanceof MemClassLoader)) { - throw new SolrException(SolrException.ErrorCode.getErrorCode(e.code()), - e.getMessage() + ". runtime library loading is not enabled, start Solr with -Denable.runtime.lib=true", - e.getCause()); - } - throw e; - + public PluginHolder createPlugin(PluginInfo info) { + if ("true".equals(String.valueOf(info.attributes.get("runtimeLib")))) { + log.debug(" {} : '{}' created with runtimeLib=true ", meta.getCleanTag(), info.name); + LazyPluginHolder holder = new LazyPluginHolder<>(meta, info, core, RuntimeLib.isEnabled() ? + core.getMemClassLoader() : + core.getResourceLoader(), true); + return meta.clazz == UpdateRequestProcessorFactory.class ? + (PluginHolder) new UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder(holder) : + holder; + } else if ("lazy".equals(info.attributes.get("startup")) && meta.options.contains(SolrConfig.PluginOpts.LAZY)) { + log.debug("{} : '{}' created with startup=lazy ", meta.getCleanTag(), info.name); + return new LazyPluginHolder(meta, info, core, core.getResourceLoader(), false); + } else { + T inst = core.createInstance(info.className, (Class) meta.clazz, meta.getCleanTag(), null, core.getResourceLoader()); + initInstance(inst, info); + return new PluginHolder<>(info, inst); } - initInstance(localInst, pluginInfo); - if (localInst instanceof SolrCoreAware) { - SolrResourceLoader.assertAwareCompatibility(SolrCoreAware.class, localInst); - ((SolrCoreAware) localInst).inform(core); - } - if (localInst instanceof ResourceLoaderAware) { - SolrResourceLoader.assertAwareCompatibility(ResourceLoaderAware.class, localInst); - try { - ((ResourceLoaderAware) localInst).inform(core.getResourceLoader()); - } catch (IOException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "error initializing component", e); - } - } - return localInst; } /** make a plugin available in an alternate name. This is an internal API and not for public use @@ -197,7 +192,6 @@ public T put(String name, T plugin) { PluginHolder pluginHolder = new PluginHolder<>(null, plugin); pluginHolder.registerAPI = false; PluginHolder old = put(name, pluginHolder); - if(old != null) closeQuietly(old); return old == null ? null : old.get(); } @@ -237,11 +231,15 @@ public PluginHolder put(String name, PluginHolder plugin) { apiBag.registerLazy((PluginHolder) plugin, plugin.pluginInfo); } } - if(disableHandler == null) disableHandler = Boolean.FALSE; + if (disableHandler == null) disableHandler = Boolean.FALSE; PluginHolder old = null; - if(!disableHandler) old = registry.put(name, plugin); + if (!disableHandler) old = registry.put(name, plugin); if (plugin.pluginInfo != null && plugin.pluginInfo.isDefault()) setDefault(name); if (plugin.isLoaded()) registerMBean(plugin.get(), core, name); + // old instance has been replaced - close it to prevent mem leaks + if (old != null && old != plugin) { + closeQuietly(old); + } return old; } @@ -338,52 +336,13 @@ public static void closeQuietly(Object inst) { } } - public PluginHolder createPlugin(PluginInfo info) { - String pkg = info.attributes.get(CommonParams.PACKAGE); - if (pkg != null) { - log.debug(" {} : '{}' created with package={} ", meta.getCleanTag(), info.name, pkg); - PluginHolder holder = new PackagePluginHolder(info, core, meta); - return meta.clazz == UpdateRequestProcessorFactory.class ? - (PluginHolder) new UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder((PluginHolder) holder) : - holder; - - } else if (info.isRuntimePlugin()) { - log.debug(" {} : '{}' created with runtimeLib=true ", meta.getCleanTag(), info.name); - LazyPluginHolder holder = new LazyPluginHolder<>(meta, info, core, RuntimeLib.isEnabled() ? - core.getMemClassLoader() : - core.getResourceLoader(), true); - - return meta.clazz == UpdateRequestProcessorFactory.class ? - (PluginHolder) new UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder((PluginHolder) holder) : - holder; - } else if ("lazy".equals(info.attributes.get("startup")) && meta.options.contains(SolrConfig.PluginOpts.LAZY)) { - log.debug("{} : '{}' created with startup=lazy ", meta.getCleanTag(), info.name); - return new LazyPluginHolder(meta, info, core, core.getResourceLoader(), false); - } else { - T inst = SolrCore.createInstance(info.className, (Class) meta.clazz, meta.getCleanTag(), null, core.getResourceLoader()); - initInstance(inst, info); - return new PluginHolder<>(info, inst); - } - } - - public Api v2lookup(String path, String method, Map parts) { - if (apiBag == null) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "this should not happen, looking up for v2 API at the wrong place"); - } - return apiBag.lookup(path, method, parts); - } - - public ApiBag getApiBag() { - return apiBag; - } - /** * An indirect reference to a plugin. It just wraps a plugin instance. * subclasses may choose to lazily load the plugin */ public static class PluginHolder implements AutoCloseable { + private T inst; protected final PluginInfo pluginInfo; - T inst; boolean registerAPI = false; public PluginHolder(PluginInfo info) { @@ -411,7 +370,7 @@ public void close() throws Exception { // can close() be called concurrently with other methods? if (isLoaded()) { T myInst = get(); - closeQuietly(myInst); + if (myInst != null && myInst instanceof AutoCloseable) ((AutoCloseable) myInst).close(); } } @@ -479,62 +438,209 @@ private synchronized boolean createInst() { MemClassLoader loader = (MemClassLoader) resourceLoader; loader.loadJars(); } - lazyInst = createInitInstance(pluginInfo,pluginMeta,core,resourceLoader, isRuntimeLib); + Class clazz = (Class) pluginMeta.clazz; + T localInst = null; + try { + localInst = core.createInstance(pluginInfo.className, clazz, pluginMeta.getCleanTag(), null, resourceLoader); + } catch (SolrException e) { + if (isRuntimeLib && !(resourceLoader instanceof MemClassLoader)) { + throw new SolrException(SolrException.ErrorCode.getErrorCode(e.code()), + e.getMessage() + ". runtime library loading is not enabled, start Solr with -Denable.runtime.lib=true", + e.getCause()); + } + throw e; + + + } + initInstance(localInst, pluginInfo); + if (localInst instanceof SolrCoreAware) { + SolrResourceLoader.assertAwareCompatibility(SolrCoreAware.class, localInst); + ((SolrCoreAware) localInst).inform(core); + } + if (localInst instanceof ResourceLoaderAware) { + SolrResourceLoader.assertAwareCompatibility(ResourceLoaderAware.class, localInst); + try { + ((ResourceLoaderAware) localInst).inform(core.getResourceLoader()); + } catch (IOException e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "error initializing component", e); + } + } + lazyInst = localInst; // only assign the volatile until after the plugin is completely ready to use return true; } } - public class PackagePluginHolder extends PluginHolder { - private final SolrCore core; - private final SolrConfig.SolrPluginInfo pluginMeta; - private final PackageManager packageManager; - private final String pkg; - private RuntimeLib runtimeLib; + /** + * This represents a Runtime Jar. A jar requires two details , name and version + */ + public static class RuntimeLib implements PluginInfoInitialized, AutoCloseable { + private String name, version, sig, sha512, url; + private BlobRepository.BlobContentRef jarContent; + private final CoreContainer coreContainer; + private boolean verified = false; - public PackagePluginHolder(PluginInfo info, SolrCore core, SolrConfig.SolrPluginInfo pluginMeta) { - super(info); - this.core = core; - this.pluginMeta = pluginMeta; - this.pkg = info.attributes.get(CommonParams.PACKAGE); - this.core.addPackageListener(new SolrCore.PkgListener() { - @Override - public String packageName() { - return pkg; + @Override + public void init(PluginInfo info) { + name = info.attributes.get(NAME); + url = info.attributes.get("url"); + sig = info.attributes.get("sig"); + if(url == null) { + Object v = info.attributes.get("version"); + if (name == null || v == null) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "runtimeLib must have name and version"); } + version = String.valueOf(v); + } else { + sha512 = info.attributes.get("sha512"); + if(sha512 == null){ + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "runtimeLib with url must have a 'sha512' attribute"); + } + ByteBuffer buf = null; + buf = coreContainer.getBlobRepository().fetchFromUrl(name, url); - @Override - public PluginInfo pluginInfo() { - return info; + String digest = BlobRepository.sha512Digest(buf); + if(!sha512.equals(digest)) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString(BlobRepository.INVALID_JAR_MSG, url, sha512, digest) ); } + log.info("dynamic library verified {}, sha512: {}", url, sha512); + + } + + } + + public RuntimeLib(SolrCore core) { + coreContainer = core.getCoreContainer(); + } + + public String getUrl(){ + return url; + } + + void loadJar() { + if (jarContent != null) return; + synchronized (this) { + if (jarContent != null) return; + + jarContent = url == null? + coreContainer.getBlobRepository().getBlobIncRef(name + "/" + version): + coreContainer.getBlobRepository().getBlobIncRef(name, null,url,sha512); + + } + } + + public static boolean isEnabled() { + return Boolean.getBoolean("enable.runtime.lib"); + } + + public String getName() { + return name; + } + + public String getVersion() { + return version; + } + + public String getSig() { + return sig; + + } + + public ByteBuffer getFileContent(String entryName) throws IOException { + if (jarContent == null) + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "jar not available: " + name ); + return getFileContent(jarContent.blob, entryName); + + } - @Override - public MapWriter lib() { - return runtimeLib; + public ByteBuffer getFileContent(BlobRepository.BlobContent blobContent, String entryName) throws IOException { + ByteBuffer buff = blobContent.get(); + ByteArrayInputStream zipContents = new ByteArrayInputStream(buff.array(), buff.arrayOffset(), buff.limit()); + ZipInputStream zis = new ZipInputStream(zipContents); + try { + ZipEntry entry; + while ((entry = zis.getNextEntry()) != null) { + if (entryName == null || entryName.equals(entry.getName())) { + SimplePostTool.BAOS out = new SimplePostTool.BAOS(); + byte[] buffer = new byte[2048]; + int size; + while ((size = zis.read(buffer, 0, buffer.length)) != -1) { + out.write(buffer, 0, size); + } + out.close(); + return out.getByteBuffer(); + } } + } finally { + zis.closeEntry(); + } + return null; + } + + + @Override + public void close() throws Exception { + if (jarContent != null) coreContainer.getBlobRepository().decrementBlobRefCount(jarContent); + } - @Override - public void changed(RuntimeLib lib) { - int myVersion = runtimeLib == null? -1 : runtimeLib.znodeVersion; - if(lib.getZnodeVersion() > myVersion) reload(); + public static List getLibObjects(SolrCore core, List libs) { + List l = new ArrayList<>(libs.size()); + for (PluginInfo lib : libs) { + RuntimeLib rtl = new RuntimeLib(core); + try { + rtl.init(lib); + } catch (Exception e) { + log.error("error loading runtime library", e); } - }); - this.packageManager = core.getCoreContainer().getPackageManager(); - reload(); + l.add(rtl); + } + return l; } + public void verify() throws Exception { + if (verified) return; + if (jarContent == null) { + log.error("Calling verify before loading the jar"); + return; + } - private void reload() { - if(inst == null) log.info("reloading plugin {} ", pluginInfo.name); - inst = createInitInstance(pluginInfo, pluginMeta, - core, packageManager.getResourceLoader(this.pkg), true); - this.runtimeLib = packageManager.getLib(pkg); + if (!coreContainer.isZooKeeperAware()) + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Signing jar is possible only in cloud"); + Map keys = CloudUtil.getTrustedKeys(coreContainer.getZkController().getZkClient(), "exe"); + if (keys.isEmpty()) { + if (sig == null) { + verified = true; + return; + } else { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No public keys are available in ZK to verify signature for runtime lib " + name); + } + } else if (sig == null) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString("runtimelib {0} should be signed with one of the keys in ZK /keys/exe ", name)); + } + try { + String matchedKey = new CryptoKeys(keys).verify(sig, jarContent.blob.get()); + if (matchedKey == null) + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No key matched signature for jar : " + name + " version: " + version); + log.info("Jar {} signed with {} successfully verified", name, matchedKey); + } catch (Exception e) { + if (e instanceof SolrException) throw e; + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error verifying key ", e); + } } + } + public Api v2lookup(String path, String method, Map parts) { + if (apiBag == null) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "this should not happen, looking up for v2 API at the wrong place"); + } + return apiBag.lookup(path, method, parts); } + public ApiBag getApiBag() { + return apiBag; + } } diff --git a/solr/core/src/java/org/apache/solr/core/PluginInfo.java b/solr/core/src/java/org/apache/solr/core/PluginInfo.java index e25bd92fed2a..1bc85aeb0cd2 100644 --- a/solr/core/src/java/org/apache/solr/core/PluginInfo.java +++ b/solr/core/src/java/org/apache/solr/core/PluginInfo.java @@ -16,23 +16,14 @@ */ package org.apache.solr.core; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - import org.apache.solr.common.MapSerializable; -import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.util.DOMUtil; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.w3c.dom.Node; import org.w3c.dom.NodeList; +import java.util.*; + import static java.util.Arrays.asList; import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableMap; @@ -40,26 +31,23 @@ import static org.apache.solr.schema.FieldType.CLASS_NAME; /** - * An Object which represents a Plugin of any type + * An Object which represents a Plugin of any type + * */ public class PluginInfo implements MapSerializable { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - public final String name, className, type; public final NamedList initArgs; public final Map attributes; public final List children; private boolean isFromSolrConfig; - public List pathInConfig; - public PluginInfo(String type, Map attrs, NamedList initArgs, List children) { this.type = type; this.name = attrs.get(NAME); this.className = attrs.get(CLASS_NAME); this.initArgs = initArgs; attributes = unmodifiableMap(attrs); - this.children = children == null ? Collections.emptyList() : unmodifiableList(children); + this.children = children == null ? Collections.emptyList(): unmodifiableList(children); isFromSolrConfig = false; } @@ -74,7 +62,7 @@ public PluginInfo(Node node, String err, boolean requireName, boolean requireCla isFromSolrConfig = true; } - public PluginInfo(String type, Map map) { + public PluginInfo(String type, Map map) { LinkedHashMap m = new LinkedHashMap<>(map); initArgs = new NamedList(); for (Map.Entry entry : map.entrySet()) { @@ -99,7 +87,7 @@ public PluginInfo(String type, Map map) { this.name = (String) m.get(NAME); this.className = (String) m.get(CLASS_NAME); attributes = unmodifiableMap(m); - this.children = Collections.emptyList(); + this.children = Collections.emptyList(); isFromSolrConfig = true; } @@ -114,7 +102,7 @@ private List loadSubPlugins(Node node) { PluginInfo pluginInfo = new PluginInfo(nd, null, false, false); if (pluginInfo.isEnabled()) children.add(pluginInfo); } - return children.isEmpty() ? Collections.emptyList() : unmodifiableList(children); + return children.isEmpty() ? Collections.emptyList() : unmodifiableList(children); } @Override @@ -129,37 +117,37 @@ public String toString() { return sb.toString(); } - public boolean isEnabled() { + public boolean isEnabled(){ String enable = attributes.get("enable"); - return enable == null || Boolean.parseBoolean(enable); + return enable == null || Boolean.parseBoolean(enable); } public boolean isDefault() { return Boolean.parseBoolean(attributes.get("default")); } - public PluginInfo getChild(String type) { + public PluginInfo getChild(String type){ List l = getChildren(type); - return l.isEmpty() ? null : l.get(0); + return l.isEmpty() ? null:l.get(0); } public Map toMap(Map map) { map.putAll(attributes); Map m = map; - if (initArgs != null) m.putAll(initArgs.asMap(3)); - if (children != null) { + if(initArgs!=null ) m.putAll(initArgs.asMap(3)); + if(children != null){ for (PluginInfo child : children) { Object old = m.get(child.name); - if (old == null) { + if(old == null){ m.put(child.name, child.toMap(new LinkedHashMap<>())); } else if (old instanceof List) { List list = (List) old; list.add(child.toMap(new LinkedHashMap<>())); - } else { + } else { ArrayList l = new ArrayList(); l.add(old); l.add(child.toMap(new LinkedHashMap<>())); - m.put(child.name, l); + m.put(child.name,l); } } @@ -167,47 +155,36 @@ public Map toMap(Map map) { return m; } - /** - * Filter children by type - * + /**Filter children by type * @param type The type name. must not be null * @return The mathcing children */ - public List getChildren(String type) { - if (children.isEmpty()) return children; + public List getChildren(String type){ + if(children.isEmpty()) return children; List result = new ArrayList<>(); - for (PluginInfo child : children) if (type.equals(child.type)) result.add(child); + for (PluginInfo child : children) if(type.equals(child.type)) result.add(child); return result; } - - public static final PluginInfo EMPTY_INFO = new PluginInfo("", Collections.emptyMap(), new NamedList(), Collections.emptyList()); + public static final PluginInfo EMPTY_INFO = new PluginInfo("",Collections.emptyMap(), new NamedList(),Collections.emptyList()); private static final HashSet NL_TAGS = new HashSet<> - (asList("lst", "arr", - "bool", - "str", - "int", "long", - "float", "double")); + (asList("lst", "arr", + "bool", + "str", + "int", "long", + "float", "double")); public static final String DEFAULTS = "defaults"; public static final String APPENDS = "appends"; public static final String INVARIANTS = "invariants"; - public boolean isFromSolrConfig() { + public boolean isFromSolrConfig(){ return isFromSolrConfig; } - public PluginInfo copy() { PluginInfo result = new PluginInfo(type, attributes, initArgs != null ? initArgs.clone() : null, children); result.isFromSolrConfig = isFromSolrConfig; - result.pathInConfig = pathInConfig; return result; } - - public boolean isRuntimePlugin() { - return "true".equals(String.valueOf(attributes.get(RuntimeLib.TYPE))) - || (attributes.get(CommonParams.PACKAGE) != null); - } - } diff --git a/solr/core/src/java/org/apache/solr/core/RuntimeLib.java b/solr/core/src/java/org/apache/solr/core/RuntimeLib.java deleted file mode 100644 index 1e1f5f708f84..000000000000 --- a/solr/core/src/java/org/apache/solr/core/RuntimeLib.java +++ /dev/null @@ -1,227 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.solr.core; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; - -import org.apache.solr.cloud.CloudUtil; -import org.apache.solr.common.MapWriter; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.util.StrUtils; -import org.apache.solr.util.CryptoKeys; -import org.apache.solr.util.SimplePostTool; -import org.apache.solr.util.plugin.PluginInfoInitialized; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.solr.common.params.CommonParams.NAME; - -/** - * This represents a Runtime Jar. A jar requires two details , name and version - */ -public class RuntimeLib implements PluginInfoInitialized, AutoCloseable, MapWriter { - public static final String TYPE = "runtimeLib"; - public static final String SHA256 = "sha256"; - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private final CoreContainer coreContainer; - private String name, version, sig, sha256, url; - private BlobRepository.BlobContentRef jarContent; - private boolean verified = false; - int znodeVersion = -1; - - @Override - public void writeMap(EntryWriter ew) throws IOException { - ew.putIfNotNull(NAME, name); - ew.putIfNotNull("url", url); - ew.putIfNotNull(version, version); - ew.putIfNotNull("sha256", sha256); - ew.putIfNotNull("sig", sig); - if (znodeVersion > -1) { - ew.put(ConfigOverlay.ZNODEVER, znodeVersion); - } - } - public int getZnodeVersion(){ - return znodeVersion; - } - - public RuntimeLib(CoreContainer coreContainer) { - this.coreContainer = coreContainer; - } - - public static boolean isEnabled() { - return "true".equals(System.getProperty("enable.runtime.lib")); - } - - public static List getLibObjects(SolrCore core, List libs) { - List l = new ArrayList<>(libs.size()); - for (PluginInfo lib : libs) { - RuntimeLib rtl = new RuntimeLib(core.getCoreContainer()); - try { - rtl.init(lib); - } catch (Exception e) { - log.error("error loading runtime library", e); - } - l.add(rtl); - } - return l; - } - - @Override - public void init(PluginInfo info) { - name = info.attributes.get(NAME); - url = info.attributes.get("url"); - sig = info.attributes.get("sig"); - if (url == null) { - Object v = info.attributes.get("version"); - if (name == null || v == null) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "runtimeLib must have name and version"); - } - version = String.valueOf(v); - } else { - sha256 = info.attributes.get(SHA256); - if (sha256 == null) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "runtimeLib with url must have a 'sha256' attribute"); - } - ByteBuffer buf = coreContainer.getBlobRepository().fetchFromUrl(name, url); - - String digest = BlobRepository.sha256Digest(buf); - if (!sha256.equals(digest)) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString(BlobRepository.INVALID_JAR_MSG, url, sha256, digest)); - } - verifyJarSignature(buf); - - log.debug("dynamic library verified {}, sha256: {}", url, sha256); - - } - - } - - public String getUrl() { - return url; - } - - void loadJar() { - if (jarContent != null) return; - synchronized (this) { - if (jarContent != null) return; - - jarContent = url == null ? - coreContainer.getBlobRepository().getBlobIncRef(name + "/" + version) : - coreContainer.getBlobRepository().getBlobIncRef(name, null, url, sha256); - - } - } - - public String getName() { - return name; - } - - public String getVersion() { - return version; - } - - public String getSig() { - return sig; - - } - - public String getSha256() { - return sha256; - } - - public ByteBuffer getFileContent(String entryName) throws IOException { - if (jarContent == null) - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "jar not available: " + name); - return getFileContent(jarContent.blob, entryName); - - } - - public ByteBuffer getFileContent(BlobRepository.BlobContent blobContent, String entryName) throws IOException { - ByteBuffer buff = blobContent.get(); - ByteArrayInputStream zipContents = new ByteArrayInputStream(buff.array(), buff.arrayOffset(), buff.limit()); - ZipInputStream zis = new ZipInputStream(zipContents); - try { - ZipEntry entry; - while ((entry = zis.getNextEntry()) != null) { - if (entryName == null || entryName.equals(entry.getName())) { - SimplePostTool.BAOS out = new SimplePostTool.BAOS(); - byte[] buffer = new byte[2048]; - int size; - while ((size = zis.read(buffer, 0, buffer.length)) != -1) { - out.write(buffer, 0, size); - } - out.close(); - return out.getByteBuffer(); - } - } - } finally { - zis.closeEntry(); - } - return null; - } - - @Override - public void close() throws Exception { - if (jarContent != null) coreContainer.getBlobRepository().decrementBlobRefCount(jarContent); - } - - public void verify() throws Exception { - if (verified) return; - if (jarContent == null) { - log.error("Calling verify before loading the jar"); - return; - } - - if (!coreContainer.isZooKeeperAware()) - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Signing jar is possible only in cloud"); - verifyJarSignature(jarContent.blob.get()); - } - - void verifyJarSignature(ByteBuffer buf) { - Map keys = CloudUtil.getTrustedKeys(coreContainer.getZkController().getZkClient(), "exe"); - if (keys.isEmpty()) { - if (sig == null) { - verified = true; - return; - } else { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No public keys are available in ZK to verify signature for runtime lib " + name); - } - } else if (sig == null) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString("runtimelib {0} should be signed with one of the keys in ZK /keys/exe ", name)); - } - - try { - String matchedKey = new CryptoKeys(keys).verify(sig, buf); - if (matchedKey == null) - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No key matched signature for jar : " + name + " version: " + version); - log.info("Jar {} signed with {} successfully verified", name, matchedKey); - } catch (Exception e) { - log.error("Signature verifying error ", e); - if (e instanceof SolrException) throw (SolrException) e; - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error verifying key ", e); - } - } -} diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java index d9da8f36b0d5..5a62695c5037 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java +++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java @@ -29,7 +29,6 @@ import java.nio.file.Paths; import java.text.ParseException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; @@ -56,12 +55,12 @@ import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.util.IOUtils; -import org.apache.solr.common.util.StrUtils; import org.apache.solr.handler.component.SearchComponent; import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.response.QueryResponseWriter; import org.apache.solr.response.transform.TransformerFactory; import org.apache.solr.rest.RestManager; +import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.IndexSchemaFactory; import org.apache.solr.search.CacheConfig; import org.apache.solr.search.FastLRUCache; @@ -207,7 +206,7 @@ public SolrConfig(SolrResourceLoader loader, String name, InputSource is) getOverlay();//just in case it is not initialized getRequestParams(); initLibs(); - luceneMatchVersion = SolrConfig.parseLuceneVersionString(getVal("luceneMatchVersion", true)); + luceneMatchVersion = SolrConfig.parseLuceneVersionString(getVal(IndexSchema.LUCENE_MATCH_VERSION_PARAM, true)); log.info("Using Lucene MatchVersion: {}", luceneMatchVersion); String indexConfigPrefix; @@ -272,8 +271,7 @@ public SolrConfig(SolrResourceLoader loader, String name, InputSource is) args.put("size", "10000"); args.put("initialSize", "10"); args.put("showItems", "-1"); - args.put("class", FastLRUCache.class.getName()); - conf = new CacheConfig(args,"query/fieldValueCache"); + conf = new CacheConfig(FastLRUCache.class, args, null); } fieldValueCacheConfig = conf; useColdSearcher = getBool("query/useColdSearcher", false); @@ -296,11 +294,11 @@ public SolrConfig(SolrResourceLoader loader, String name, InputSource is) slowQueryThresholdMillis = getInt("query/slowQueryThresholdMillis", -1); for (SolrPluginInfo plugin : plugins) loadPluginInfo(plugin); - Map userCacheConfigs = CacheConfig.getConfigs(this, "query/cache"); + Map userCacheConfigs = CacheConfig.getMultipleConfigs(this, "query/cache"); List caches = getPluginInfos(SolrCache.class.getName()); if (!caches.isEmpty()) { for (PluginInfo c : caches) { - userCacheConfigs.put(c.name, new CacheConfig(c.attributes, StrUtils.join(c.pathInConfig, '/'))); + userCacheConfigs.put(c.name, CacheConfig.getConfig(this, "cache", c.attributes, null)); } } this.userCacheConfigs = Collections.unmodifiableMap(userCacheConfigs); @@ -374,17 +372,17 @@ public static final Version parseLuceneVersionString(final String matchVersion) .add(new SolrPluginInfo(TransformerFactory.class, "transformer", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK)) .add(new SolrPluginInfo(SearchComponent.class, "searchComponent", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK)) .add(new SolrPluginInfo(UpdateRequestProcessorFactory.class, "updateProcessor", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK)) - .add(new SolrPluginInfo(SolrCache.class, SolrCache.TYPE, REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK)) - // TODO: WTF is up with queryConverter??? - // it apparently *only* works as a singleton? - SOLR-4304 - // and even then -- only if there is a single SpellCheckComponent - // because of queryConverter.setIndexAnalyzer + .add(new SolrPluginInfo(SolrCache.class, "cache", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK)) + // TODO: WTF is up with queryConverter??? + // it apparently *only* works as a singleton? - SOLR-4304 + // and even then -- only if there is a single SpellCheckComponent + // because of queryConverter.setIndexAnalyzer .add(new SolrPluginInfo(QueryConverter.class, "queryConverter", REQUIRE_NAME, REQUIRE_CLASS)) - .add(new SolrPluginInfo(RuntimeLib.class, RuntimeLib.TYPE, REQUIRE_NAME, MULTI_OK)) - // this is hackish, since it picks up all SolrEventListeners, - // regardless of when/how/why they are used (or even if they are - // declared outside of the appropriate context) but there's no nice - // way around that in the PluginInfo framework + .add(new SolrPluginInfo(PluginBag.RuntimeLib.class, "runtimeLib", REQUIRE_NAME, MULTI_OK)) + // this is hackish, since it picks up all SolrEventListeners, + // regardless of when/how/why they are used (or even if they are + // declared outside of the appropriate context) but there's no nice + // way around that in the PluginInfo framework .add(new SolrPluginInfo(InitParams.class, InitParams.TYPE, MULTI_OK, REQUIRE_NAME_IN_OVERLAY)) .add(new SolrPluginInfo(SolrEventListener.class, "//listener", REQUIRE_CLASS, MULTI_OK, REQUIRE_NAME_IN_OVERLAY)) @@ -534,9 +532,6 @@ public List readPluginInfos(String tag, boolean requireName, boolean NodeList nodes = (NodeList) evaluate(tag, XPathConstants.NODESET); for (int i = 0; i < nodes.getLength(); i++) { PluginInfo pluginInfo = new PluginInfo(nodes.item(i), "[solrconfig.xml] " + tag, requireName, requireClass); - if (requireName) { - pluginInfo.pathInConfig = Arrays.asList(tag, pluginInfo.name); - } if (pluginInfo.isEnabled()) result.add(pluginInfo); } return result; @@ -610,7 +605,7 @@ public Map toMap(Map map) { "cacheControl", cacheControlHeader); } - public enum LastModFrom { + public static enum LastModFrom { OPENTIME, DIRLASTMOD, BOGUS; /** @@ -762,24 +757,20 @@ public List getPluginInfos(String type) { Map infos = overlay.getNamedPlugins(info.getCleanTag()); if (!infos.isEmpty()) { LinkedHashMap map = new LinkedHashMap<>(); - if (result != null) { - for (PluginInfo pluginInfo : result) { - //just create a UUID for the time being so that map key is not null - String name = pluginInfo.name == null ? - UUID.randomUUID().toString().toLowerCase(Locale.ROOT) : - pluginInfo.name; - map.put(name, pluginInfo); - } + if (result != null) for (PluginInfo pluginInfo : result) { + //just create a UUID for the time being so that map key is not null + String name = pluginInfo.name == null ? + UUID.randomUUID().toString().toLowerCase(Locale.ROOT) : + pluginInfo.name; + map.put(name, pluginInfo); } for (Map.Entry e : infos.entrySet()) { - PluginInfo value = new PluginInfo(info.getCleanTag(), e.getValue()); - value.pathInConfig = Arrays.asList(info.getCleanTag(),e.getKey()); - map.put(e.getKey(), value); + map.put(e.getKey(), new PluginInfo(info.getCleanTag(), e.getValue())); } result = new ArrayList<>(map.values()); } } - return result == null ? Collections.emptyList() : result; + return result == null ? Collections.emptyList() : result; } public PluginInfo getPluginInfo(String type) { @@ -896,7 +887,7 @@ public String get(String path, String def) { @Override public Map toMap(Map result) { if (getZnodeVersion() > -1) result.put(ZNODEVER, getZnodeVersion()); - result.put("luceneMatchVersion", luceneMatchVersion); + result.put(IndexSchema.LUCENE_MATCH_VERSION_PARAM, luceneMatchVersion); result.put("updateHandler", getUpdateHandlerInfo()); Map m = new LinkedHashMap(); result.put("query", m); @@ -954,7 +945,7 @@ public Map toMap(Map result) { private void addCacheConfig(Map queryMap, CacheConfig... cache) { if (cache == null) return; - for (CacheConfig config : cache) if (config != null) queryMap.put(config.getName(), config); + for (CacheConfig config : cache) if (config != null) queryMap.put(config.getNodeName(), config); } diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java index 259ff1685ece..aff53a355b94 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrCore.java +++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java @@ -79,7 +79,6 @@ import org.apache.solr.cloud.CloudDescriptor; import org.apache.solr.cloud.RecoveryStrategy; import org.apache.solr.cloud.ZkSolrResourceLoader; -import org.apache.solr.common.MapWriter; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.cloud.ClusterState; @@ -109,8 +108,8 @@ import org.apache.solr.handler.component.SearchComponent; import org.apache.solr.logging.MDCLoggingContext; import org.apache.solr.metrics.SolrCoreMetricManager; -import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.response.BinaryResponseWriter; @@ -194,8 +193,6 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab private boolean isReloaded = false; - private StatsCache statsCache; - private final SolrConfig solrConfig; private final SolrResourceLoader resourceLoader; private volatile IndexSchema schema; @@ -234,22 +231,17 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab private final CoreContainer coreContainer; private Set metricNames = ConcurrentHashMap.newKeySet(); - private final String metricTag = getUniqueMetricTag(null); + private final String metricTag = SolrMetricProducer.getUniqueMetricTag(this, null); + private final SolrMetricsContext solrMetricsContext; public volatile boolean searchEnabled = true; public volatile boolean indexEnabled = true; public volatile boolean readOnly = false; - private List packageListeners = new ArrayList<>(); - public Set getMetricNames() { return metricNames; } - public List getPackageListeners(){ - return Collections.unmodifiableList(packageListeners); - } - public Date getStartTimeStamp() { return startTime; } @@ -360,26 +352,6 @@ public String getIndexDir() { } } - void packageUpdated(RuntimeLib lib) { - for (PkgListener listener : packageListeners) { - if(lib.getName().equals(listener.packageName())) listener.changed(lib); - } - } - public void addPackageListener(PkgListener listener){ - packageListeners.add(listener); - } - - public interface PkgListener { - - String packageName(); - - PluginInfo pluginInfo(); - - void changed(RuntimeLib lib); - - MapWriter lib(); - } - /** * Returns the indexdir as given in index.properties. If index.properties exists in dataDir and @@ -865,7 +837,7 @@ private UpdateHandler createReloadedUpdateHandler(String className, String msg, for (Constructor con : cons) { Class[] types = con.getParameterTypes(); if (types.length == 2 && types[0] == SolrCore.class && types[1] == UpdateHandler.class) { - return (UpdateHandler) con.newInstance(this, updateHandler); + return UpdateHandler.class.cast(con.newInstance(this, updateHandler)); } } throw new SolrException(ErrorCode.SERVER_ERROR, "Error Instantiating " + msg + ", " + className + " could not find proper constructor for " + UpdateHandler.class.getName()); @@ -885,12 +857,7 @@ private UpdateHandler createReloadedUpdateHandler(String className, String msg, public T createInitInstance(PluginInfo info, Class cast, String msg, String defClassName) { if (info == null) return null; - String pkg = info.attributes.get(CommonParams.PACKAGE); - ResourceLoader resourceLoader = pkg != null? - coreContainer.getPackageManager().getResourceLoader(pkg): - getResourceLoader(); - - T o = createInstance(info.className == null ? defClassName : info.className, cast, msg, this, resourceLoader); + T o = createInstance(info.className == null ? defClassName : info.className, cast, msg, this, getResourceLoader()); if (o instanceof PluginInfoInitialized) { ((PluginInfoInitialized) o).init(info); } else if (o instanceof NamedListInitializedPlugin) { @@ -953,6 +920,7 @@ public SolrCore(CoreContainer coreContainer, String name, String dataDir, SolrCo this.configSetProperties = configSetProperties; // Initialize the metrics manager this.coreMetricManager = initCoreMetricManager(config); + solrMetricsContext = coreMetricManager.getSolrMetricsContext(); this.coreMetricManager.loadReporters(); if (updateHandler == null) { @@ -974,15 +942,13 @@ public SolrCore(CoreContainer coreContainer, String name, String dataDir, SolrCo checkVersionFieldExistsInSchema(schema, coreDescriptor); - SolrMetricManager metricManager = coreContainer.getMetricManager(); - // initialize searcher-related metrics - initializeMetrics(metricManager, coreMetricManager.getRegistryName(), metricTag, null); + initializeMetrics(solrMetricsContext, null); SolrFieldCacheBean solrFieldCacheBean = new SolrFieldCacheBean(); // this is registered at the CONTAINER level because it's not core-specific - for now we // also register it here for back-compat - solrFieldCacheBean.initializeMetrics(metricManager, coreMetricManager.getRegistryName(), metricTag, "core"); + solrFieldCacheBean.initializeMetrics(solrMetricsContext, "core"); infoRegistry.put("fieldCache", solrFieldCacheBean); initSchema(config, schema); @@ -998,7 +964,7 @@ public SolrCore(CoreContainer coreContainer, String name, String dataDir, SolrCo this.codec = initCodec(solrConfig, this.schema); memClassLoader = new MemClassLoader( - RuntimeLib.getLibObjects(this, solrConfig.getPluginInfos(RuntimeLib.class.getName())), + PluginBag.RuntimeLib.getLibObjects(this, solrConfig.getPluginInfos(PluginBag.RuntimeLib.class.getName())), getResourceLoader()); initIndex(prev != null, reload); @@ -1014,8 +980,6 @@ public SolrCore(CoreContainer coreContainer, String name, String dataDir, SolrCo reqHandlers = new RequestHandlers(this); reqHandlers.initHandlersFromConfig(solrConfig); - statsCache = initStatsCache(); - // cause the executor to stall so firstSearcher events won't fire // until after inform() has been called for all components. // searchExecutor must be single-threaded for this to work @@ -1051,8 +1015,9 @@ public SolrCore(CoreContainer coreContainer, String name, String dataDir, SolrCo // Allow the directory factory to report metrics if (directoryFactory instanceof SolrMetricProducer) { - ((SolrMetricProducer) directoryFactory).initializeMetrics(metricManager, coreMetricManager.getRegistryName(), - metricTag, "directoryFactory"); + // XXX use deprecated method for back-compat, remove in 9.0 + ((SolrMetricProducer) directoryFactory).initializeMetrics( + solrMetricsContext.metricManager, solrMetricsContext.registry, solrMetricsContext.tag, "directoryFactory"); } // seed version buckets with max from index during core initialization ... requires a searcher! @@ -1199,61 +1164,66 @@ private SolrCoreMetricManager initCoreMetricManager(SolrConfig config) { } @Override - public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) { - newSearcherCounter = manager.counter(this, registry, "new", Category.SEARCHER.toString()); - newSearcherTimer = manager.timer(this, registry, "time", Category.SEARCHER.toString(), "new"); - newSearcherWarmupTimer = manager.timer(this, registry, "warmup", Category.SEARCHER.toString(), "new"); - newSearcherMaxReachedCounter = manager.counter(this, registry, "maxReached", Category.SEARCHER.toString(), "new"); - newSearcherOtherErrorsCounter = manager.counter(this, registry, "errors", Category.SEARCHER.toString(), "new"); - - manager.registerGauge(this, registry, () -> name == null ? "(null)" : name, getMetricTag(), true, "coreName", Category.CORE.toString()); - manager.registerGauge(this, registry, () -> startTime, getMetricTag(), true, "startTime", Category.CORE.toString()); - manager.registerGauge(this, registry, () -> getOpenCount(), getMetricTag(), true, "refCount", Category.CORE.toString()); - manager.registerGauge(this, registry, () -> resourceLoader.getInstancePath().toString(), getMetricTag(), true, "instanceDir", Category.CORE.toString()); - manager.registerGauge(this, registry, () -> isClosed() ? "(closed)" : getIndexDir(), getMetricTag(), true, "indexDir", Category.CORE.toString()); - manager.registerGauge(this, registry, () -> isClosed() ? 0 : getIndexSize(), getMetricTag(), true, "sizeInBytes", Category.INDEX.toString()); - manager.registerGauge(this, registry, () -> isClosed() ? "(closed)" : NumberUtils.readableSize(getIndexSize()), getMetricTag(), true, "size", Category.INDEX.toString()); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + newSearcherCounter = parentContext.counter(this, "new", Category.SEARCHER.toString()); + newSearcherTimer = parentContext.timer(this, "time", Category.SEARCHER.toString(), "new"); + newSearcherWarmupTimer = parentContext.timer(this, "warmup", Category.SEARCHER.toString(), "new"); + newSearcherMaxReachedCounter = parentContext.counter(this, "maxReached", Category.SEARCHER.toString(), "new"); + newSearcherOtherErrorsCounter = parentContext.counter(this, "errors", Category.SEARCHER.toString(), "new"); + + parentContext.gauge(this, () -> name == null ? "(null)" : name, true, "coreName", Category.CORE.toString()); + parentContext.gauge(this, () -> startTime, true, "startTime", Category.CORE.toString()); + parentContext.gauge(this, () -> getOpenCount(), true, "refCount", Category.CORE.toString()); + parentContext.gauge(this, () -> resourceLoader.getInstancePath().toString(), true, "instanceDir", Category.CORE.toString()); + parentContext.gauge(this, () -> isClosed() ? "(closed)" : getIndexDir(), true, "indexDir", Category.CORE.toString()); + parentContext.gauge(this, () -> isClosed() ? 0 : getIndexSize(), true, "sizeInBytes", Category.INDEX.toString()); + parentContext.gauge(this, () -> isClosed() ? "(closed)" : NumberUtils.readableSize(getIndexSize()), true, "size", Category.INDEX.toString()); if (coreContainer != null) { - manager.registerGauge(this, registry, () -> coreContainer.getNamesForCore(this), getMetricTag(), true, "aliases", Category.CORE.toString()); + parentContext.gauge(this, () -> coreContainer.getNamesForCore(this), true, "aliases", Category.CORE.toString()); final CloudDescriptor cd = getCoreDescriptor().getCloudDescriptor(); if (cd != null) { - manager.registerGauge(this, registry, () -> { + parentContext.gauge(this, () -> { if (cd.getCollectionName() != null) { return cd.getCollectionName(); } else { return "_notset_"; } - }, getMetricTag(), true, "collection", Category.CORE.toString()); + }, true, "collection", Category.CORE.toString()); - manager.registerGauge(this, registry, () -> { + parentContext.gauge(this, () -> { if (cd.getShardId() != null) { return cd.getShardId(); } else { return "_auto_"; } - }, getMetricTag(), true, "shard", Category.CORE.toString()); + }, true, "shard", Category.CORE.toString()); } } // initialize disk total / free metrics Path dataDirPath = Paths.get(dataDir); File dataDirFile = dataDirPath.toFile(); - manager.registerGauge(this, registry, () -> dataDirFile.getTotalSpace(), getMetricTag(), true, "totalSpace", Category.CORE.toString(), "fs"); - manager.registerGauge(this, registry, () -> dataDirFile.getUsableSpace(), getMetricTag(), true, "usableSpace", Category.CORE.toString(), "fs"); - manager.registerGauge(this, registry, () -> dataDirPath.toAbsolutePath().toString(), getMetricTag(), true, "path", Category.CORE.toString(), "fs"); - manager.registerGauge(this, registry, () -> { + parentContext.gauge(this, () -> dataDirFile.getTotalSpace(), true, "totalSpace", Category.CORE.toString(), "fs"); + parentContext.gauge(this, () -> dataDirFile.getUsableSpace(), true, "usableSpace", Category.CORE.toString(), "fs"); + parentContext.gauge(this, () -> dataDirPath.toAbsolutePath().toString(), true, "path", Category.CORE.toString(), "fs"); + parentContext.gauge(this, () -> { try { return org.apache.lucene.util.IOUtils.spins(dataDirPath.toAbsolutePath()); } catch (IOException e) { // default to spinning return true; } - }, getMetricTag(), true, "spins", Category.CORE.toString(), "fs"); + }, true, "spins", Category.CORE.toString(), "fs"); } public String getMetricTag() { return metricTag; } + @Override + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; + } + private void checkVersionFieldExistsInSchema(IndexSchema schema, CoreDescriptor coreDescriptor) { if (null != coreDescriptor.getCloudDescriptor()) { // we are evidently running in cloud mode. @@ -1449,7 +1419,10 @@ public Codec getCodec() { return factory.getCodec(); } - private StatsCache initStatsCache() { + /** + * Create an instance of {@link StatsCache} using configured parameters. + */ + public StatsCache createStatsCache() { final StatsCache cache; PluginInfo pluginInfo = solrConfig.getPluginInfo(StatsCache.class.getName()); if (pluginInfo != null && pluginInfo.className != null && pluginInfo.className.length() > 0) { @@ -1463,13 +1436,6 @@ private StatsCache initStatsCache() { return cache; } - /** - * Get the StatsCache. - */ - public StatsCache getStatsCache() { - return statsCache; - } - /** * Load the request processors */ @@ -2448,6 +2414,7 @@ public RefCounted getSearcher(boolean forceNew, boolean retur if (!success) { newSearcherOtherErrorsCounter.inc(); + ; synchronized (searcherLock) { onDeckSearchers--; @@ -3150,7 +3117,8 @@ private static boolean checkStale(SolrZkClient zkClient, String zkPath, int curr try { Stat stat = zkClient.exists(zkPath, null, true); if (stat == null) { - return currentVersion > -1; + if (currentVersion > -1) return true; + return false; } if (stat.getVersion() > currentVersion) { log.debug("{} is stale will need an update from {} to {}", zkPath, currentVersion, stat.getVersion()); diff --git a/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java b/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java index bfb342889ed0..dc0f59910c5d 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java +++ b/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java @@ -21,6 +21,8 @@ import com.codahale.metrics.MetricRegistry; import org.apache.solr.metrics.SolrMetricManager; +import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.util.stats.MetricUtils; /** @@ -77,6 +79,10 @@ default Set getMetricNames() { * (default is null, which means no registry). */ default MetricRegistry getMetricRegistry() { + if (this instanceof SolrMetricProducer) { + SolrMetricsContext context = ((SolrMetricProducer)this).getSolrMetricsContext(); + return context != null ? context.getMetricRegistry() : null; + } return null; } diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java index b3dc5e450d98..f27edbc51495 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java +++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java @@ -576,8 +576,8 @@ public Class findClass(String cname, Class expectedType, Str } } } - - static final String[] empty = new String[0]; + + static final String empty[] = new String[0]; @Override public T newInstance(String name, Class expectedType) { @@ -808,7 +808,6 @@ public static Path locateSolrHome() { * manipulated using select Solr features (e.g. streaming expressions). */ public static final String USER_FILES_DIRECTORY = "userfiles"; - public static final String BLOBS_DIRECTORY = "blobs"; public static void ensureUserFilesDataDir(Path solrHome) { final Path userFilesPath = getUserFilesPath(solrHome); final File userFilesDirectory = new File(userFilesPath.toString()); @@ -824,28 +823,10 @@ public static void ensureUserFilesDataDir(Path solrHome) { } } - public static void ensureBlobsDir(Path solrHome) { - final Path blobsDir = getBlobsDirPath(solrHome); - final File blobsFilesDirectory = new File(blobsDir.toString()); - if (! blobsFilesDirectory.exists()) { - try { - final boolean created = blobsFilesDirectory.mkdir(); - if (! created) { - log.warn("Unable to create [{}] directory in SOLR_HOME [{}]. Features requiring this directory may fail.", BLOBS_DIRECTORY, solrHome); - } - } catch (Exception e) { - log.warn("Unable to create [" + BLOBS_DIRECTORY + "] directory in SOLR_HOME [" + solrHome + "]. Features requiring this directory may fail.", e); - } - } - } - - public static Path getBlobsDirPath(Path solrHome) { - return Paths.get(solrHome.toAbsolutePath().toString(), BLOBS_DIRECTORY).toAbsolutePath(); - } - public static Path getUserFilesPath(Path solrHome) { return Paths.get(solrHome.toAbsolutePath().toString(), USER_FILES_DIRECTORY).toAbsolutePath(); } + // Logs a message only once per startup private static void logOnceInfo(String key, String msg) { if (!loggedOnce.contains(key)) { @@ -942,7 +923,7 @@ public static void persistConfLocally(SolrResourceLoader loader, String resource throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg); } } - try (OutputStream out = new FileOutputStream(confFile)) { + try (OutputStream out = new FileOutputStream(confFile);) { out.write(content); } log.info("Written confile " + resourceName); diff --git a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java index ee3adaee52a5..d2d0d8a46125 100644 --- a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java @@ -20,7 +20,6 @@ import java.io.InputStream; import java.io.OutputStream; import java.lang.invoke.MethodHandles; -import java.math.BigInteger; import java.nio.ByteBuffer; import java.security.MessageDigest; import java.util.Collection; @@ -28,6 +27,7 @@ import java.util.List; import java.util.Map; +import org.apache.commons.codec.binary.Hex; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; @@ -114,7 +114,7 @@ public void handleRequestBody(final SolrQueryRequest req, SolrQueryResponse rsp) } MessageDigest m = MessageDigest.getInstance("MD5"); m.update(payload.array(), payload.position(), payload.limit()); - String md5 = new BigInteger(1, m.digest()).toString(16); + String md5 = new String(Hex.encodeHex(m.digest())); int duplicateCount = req.getSearcher().count(new TermQuery(new Term("md5", md5))); if (duplicateCount > 0) { diff --git a/solr/core/src/java/org/apache/solr/handler/CatStream.java b/solr/core/src/java/org/apache/solr/handler/CatStream.java index 177475505825..6a4752e6673d 100644 --- a/solr/core/src/java/org/apache/solr/handler/CatStream.java +++ b/solr/core/src/java/org/apache/solr/handler/CatStream.java @@ -125,13 +125,14 @@ public void close() throws IOException {} @Override public Tuple read() throws IOException { if (maxLines >= 0 && linesReturned >= maxLines) { - if (currentFileLines != null) currentFileLines.close(); + closeCurrentFileIfSet(); return createEofTuple(); } else if (currentFileHasMoreLinesToRead()) { return fetchNextLineFromCurrentFile(); } else if (advanceToNextFileWithData()) { return fetchNextLineFromCurrentFile(); } else { // No more data + closeCurrentFileIfSet(); return createEofTuple(); } } @@ -187,9 +188,7 @@ private List validateAndSetFilepathsInSandbox() { private boolean advanceToNextFileWithData() throws IOException { while (allFilesToCrawl.hasNext()) { - if (currentFileLines != null) { - currentFileLines.close(); - } + closeCurrentFileIfSet(); currentFilePath = allFilesToCrawl.next(); currentFileLines = FileUtils.lineIterator(new File(currentFilePath.absolutePath), "UTF-8"); if (currentFileLines.hasNext()) return true; @@ -221,6 +220,14 @@ private String getAbsolutePath(String pathRelativeToChroot) { return Paths.get(chroot, pathRelativeToChroot).toString(); } + private void closeCurrentFileIfSet() { + if (currentFilePath != null) { + currentFileLines.close(); + currentFilePath = null; + currentFileLines = null; + } + } + private void findReadableFiles(CrawlFile seed, List foundFiles) { final File entry = new File(seed.absolutePath); diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java index fef45838b54f..436cd74e45cf 100644 --- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java @@ -93,7 +93,7 @@ import org.apache.solr.core.backup.repository.LocalFileSystemRepository; import org.apache.solr.handler.IndexFetcher.IndexFetchResult; import org.apache.solr.metrics.MetricsMap; -import org.apache.solr.metrics.SolrMetrics; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.search.SolrIndexSearcher; @@ -865,20 +865,20 @@ private CommitVersionInfo getIndexVersion() { } @Override - public void initializeMetrics(SolrMetrics m) { - super.initializeMetrics(m); - solrMetrics.gauge(this, () -> (core != null && !core.isClosed() ? NumberUtils.readableSize(core.getIndexSize()) : ""), - true, "indexSize", getCategory().toString()); - solrMetrics.gauge(this, () -> (core != null && !core.isClosed() ? getIndexVersion().toString() : ""), - true, "indexVersion", getCategory().toString()); - solrMetrics.gauge(this, () -> (core != null && !core.isClosed() ? getIndexVersion().generation : 0), - true, GENERATION, getCategory().toString()); - solrMetrics.gauge(this, () -> (core != null && !core.isClosed() ? core.getIndexDir() : ""), - true, "indexPath", getCategory().toString()); - solrMetrics.gauge(this, () -> isMaster, - true, "isMaster", getCategory().toString()); - solrMetrics.gauge(this, () -> isSlave, - true, "isSlave", getCategory().toString()); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + super.initializeMetrics(parentContext, scope); + solrMetricsContext.gauge(this, () -> (core != null && !core.isClosed() ? NumberUtils.readableSize(core.getIndexSize()) : ""), + true, "indexSize", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> (core != null && !core.isClosed() ? getIndexVersion().toString() : ""), + true, "indexVersion", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> (core != null && !core.isClosed() ? getIndexVersion().generation : 0), + true, GENERATION, getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> (core != null && !core.isClosed() ? core.getIndexDir() : ""), + true, "indexPath", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> isMaster, + true, "isMaster", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> isSlave, + true, "isSlave", getCategory().toString(), scope); final MetricsMap fetcherMap = new MetricsMap((detailed, map) -> { IndexFetcher fetcher = currentIndexFetcher; if (fetcher != null) { @@ -907,13 +907,13 @@ public void initializeMetrics(SolrMetrics m) { addVal(map, IndexFetcher.CONF_FILES_REPLICATED, props, String.class); } }); - solrMetrics.gauge(this , fetcherMap, true, "fetcher", getCategory().toString()); - solrMetrics.gauge(this, () -> isMaster && includeConfFiles != null ? includeConfFiles : "", - true, "confFilesToReplicate", getCategory().toString()); - solrMetrics.gauge(this, () -> isMaster ? getReplicateAfterStrings() : Collections.emptyList(), - true, REPLICATE_AFTER, getCategory().toString()); - solrMetrics.gauge(this, () -> isMaster && replicationEnabled.get(), - true, "replicationEnabled", getCategory().toString()); + solrMetricsContext.gauge(this , fetcherMap, true, "fetcher", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> isMaster && includeConfFiles != null ? includeConfFiles : "", + true, "confFilesToReplicate", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> isMaster ? getReplicateAfterStrings() : Collections.emptyList(), + true, REPLICATE_AFTER, getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> isMaster && replicationEnabled.get(), + true, "replicationEnabled", getCategory().toString(), scope); } //TODO Should a failure retrieving any piece of info mark the overall request as a failure? Is there a core set of values that are required to make a response here useful? diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java index 499db959e2e9..4d9e96b8b74b 100644 --- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java +++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java @@ -24,7 +24,6 @@ import com.codahale.metrics.Counter; import com.codahale.metrics.Meter; -import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.google.common.collect.ImmutableList; import org.apache.solr.api.Api; @@ -40,7 +39,7 @@ import org.apache.solr.core.SolrInfoBean; import org.apache.solr.metrics.MetricsMap; import org.apache.solr.metrics.SolrMetricProducer; -import org.apache.solr.metrics.SolrMetrics; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.response.SolrQueryResponse; @@ -79,6 +78,7 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo private PluginInfo pluginInfo; private Set metricNames = ConcurrentHashMap.newKeySet(); + protected SolrMetricsContext solrMetricsContext; @SuppressForbidden(reason = "Need currentTimeMillis, used only for stats output") @@ -140,27 +140,25 @@ public void init(NamedList args) { } - protected SolrMetrics solrMetrics; - @Override - public SolrMetrics getMetrics() { - return solrMetrics; + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; } @Override - public void initializeMetrics(SolrMetrics m) { - this.solrMetrics = m.getChildInfo(this); - numErrors = solrMetrics.meter(this, "errors", getCategory().toString()); - numServerErrors = solrMetrics.meter(this, "serverErrors", getCategory().toString()); - numClientErrors = solrMetrics.meter(this, "clientErrors", getCategory().toString()); - numTimeouts = solrMetrics.meter(this, "timeouts", getCategory().toString()); - requests = solrMetrics.counter(this, "requests", getCategory().toString()); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + this.solrMetricsContext = parentContext.getChildContext(this); + numErrors = solrMetricsContext.meter(this, "errors", getCategory().toString(), scope); + numServerErrors = solrMetricsContext.meter(this, "serverErrors", getCategory().toString(), scope); + numClientErrors = solrMetricsContext.meter(this, "clientErrors", getCategory().toString(), scope); + numTimeouts = solrMetricsContext.meter(this, "timeouts", getCategory().toString(), scope); + requests = solrMetricsContext.counter(this, "requests", getCategory().toString(), scope); MetricsMap metricsMap = new MetricsMap((detail, map) -> shardPurposes.forEach((k, v) -> map.put(k, v.getCount()))); - solrMetrics.gauge(this, metricsMap, true, "shardRequests", getCategory().toString()); - requestTimes = solrMetrics.timer(this,"requestTimes", getCategory().toString()); - totalTime = solrMetrics.counter(this, "totalTime", getCategory().toString()); - solrMetrics.gauge(this, () -> handlerStart, true, "handlerStart", getCategory().toString()); + solrMetricsContext.gauge(this, metricsMap, true, "shardRequests", getCategory().toString(), scope); + requestTimes = solrMetricsContext.timer(this,"requestTimes", getCategory().toString(), scope); + totalTime = solrMetricsContext.counter(this, "totalTime", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> handlerStart, true, "handlerStart", getCategory().toString(), scope); } public static SolrParams getSolrParamsFromNamedList(NamedList args, String key) { @@ -274,11 +272,6 @@ public Set getMetricNames() { return metricNames; } - @Override - public MetricRegistry getMetricRegistry() { - return solrMetrics.getRegistry(); - } - @Override public SolrRequestHandler getSubHandler(String subPath) { return null; diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java index 789526e476ce..11c64048e694 100644 --- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java @@ -36,7 +36,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import java.util.function.Consumer; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -48,7 +47,6 @@ import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.cloud.ZkController; import org.apache.solr.cloud.ZkSolrResourceLoader; -import org.apache.solr.common.MapWriter; import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; @@ -64,9 +62,9 @@ import org.apache.solr.common.util.StrUtils; import org.apache.solr.common.util.Utils; import org.apache.solr.core.ConfigOverlay; +import org.apache.solr.core.PluginBag; import org.apache.solr.core.PluginInfo; import org.apache.solr.core.RequestParams; -import org.apache.solr.core.RuntimeLib; import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrResourceLoader; @@ -152,262 +150,11 @@ public void inform(SolrCore core) { public static boolean getImmutable(SolrCore core) { NamedList configSetProperties = core.getConfigSetProperties(); - if (configSetProperties == null) return false; + if(configSetProperties == null) return false; Object immutable = configSetProperties.get(IMMUTABLE_CONFIGSET_ARG); - return immutable != null && Boolean.parseBoolean(immutable.toString()); + return immutable != null ? Boolean.parseBoolean(immutable.toString()) : false; } - public static String validateName(String s) { - for (int i = 0; i < s.length(); i++) { - char c = s.charAt(i); - if ((c >= 'A' && c <= 'Z') || - (c >= 'a' && c <= 'z') || - (c >= '0' && c <= '9') || - c == '_' || - c == '-' || - c == '.' - ) continue; - else { - return formatString("''{0}'' name should only have chars [a-zA-Z_-.0-9] ", s); - } - } - return null; - } - - /** - * Block up to a specified maximum time until we see agreement on the schema - * version in ZooKeeper across all replicas for a collection. - */ - public static void waitForAllReplicasState(String collection, - ZkController zkController, - String prop, - int expectedVersion, - int maxWaitSecs) { - final RTimer timer = new RTimer(); - // get a list of active replica cores to query for the schema zk version (skipping this core of course) - List concurrentTasks = new ArrayList<>(); - - for (String coreUrl : getActiveReplicaCoreUrls(zkController, collection)) { - PerReplicaCallable e = new PerReplicaCallable(coreUrl, prop, expectedVersion, maxWaitSecs); - concurrentTasks.add(e); - } - if (concurrentTasks.isEmpty()) return; // nothing to wait for ... - - log.info(formatString("Waiting up to {0} secs for {1} replicas to set the property {2} to be of version {3} for collection {4}", - maxWaitSecs, concurrentTasks.size(), prop, expectedVersion, collection)); - - // use an executor service to invoke schema zk version requests in parallel with a max wait time - execInparallel(concurrentTasks, parallelExecutor -> { - try { - List failedList = executeAll(expectedVersion, maxWaitSecs, concurrentTasks, parallelExecutor); - // if any tasks haven't completed within the specified timeout, it's an error - if (failedList != null) - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}", - failedList.size(), concurrentTasks.size() + 1, prop, expectedVersion, maxWaitSecs, failedList)); - } catch (InterruptedException e) { - log.warn(formatString( - "Core was interrupted . trying to set the property {0} to version {1} to propagate to {2} replicas for collection {3}", - prop, expectedVersion, concurrentTasks.size(), collection)); - Thread.currentThread().interrupt(); - } - }); - - log.info("Took {}ms to set the property {} to be of version {} for collection {}", - timer.getTime(), prop, expectedVersion, collection); - } - - public static void execInparallel(List concurrentTasks, Consumer fun) { - int poolSize = Math.min(concurrentTasks.size(), 10); - ExecutorService parallelExecutor = - ExecutorUtil.newMDCAwareFixedThreadPool(poolSize, new DefaultSolrThreadFactory("solrHandlerExecutor")); - try { - - fun.accept(parallelExecutor); - - } finally { - ExecutorUtil.shutdownAndAwaitTermination(parallelExecutor); - } - } - - @Override - public SolrRequestHandler getSubHandler(String path) { - if (subPaths.contains(path)) return this; - if (path.startsWith("/params/")) return this; - List p = StrUtils.splitSmart(path, '/', true); - if (p.size() > 1) { - if (subPaths.contains("/" + p.get(0))) return this; - } - return null; - } - - - private static Set subPaths = new HashSet<>(Arrays.asList("/overlay", "/params", "/updateHandler", - "/query", "/jmx", "/requestDispatcher", "/znodeVersion")); - - static { - for (SolrConfig.SolrPluginInfo solrPluginInfo : SolrConfig.plugins) - subPaths.add("/" + solrPluginInfo.getCleanTag()); - - } - - //////////////////////// SolrInfoMBeans methods ////////////////////// - - - @Override - public String getDescription() { - return "Edit solrconfig.xml"; - } - - @Override - public Category getCategory() { - return Category.ADMIN; - } - - - public static final String SET_PROPERTY = "set-property"; - public static final String UNSET_PROPERTY = "unset-property"; - public static final String SET_USER_PROPERTY = "set-user-property"; - public static final String UNSET_USER_PROPERTY = "unset-user-property"; - public static final String SET = "set"; - public static final String UPDATE = "update"; - public static final String CREATE = "create"; - private static Set cmdPrefixes = ImmutableSet.of(CREATE, UPDATE, "delete", "add"); - - public static List executeAll(int expectedVersion, int maxWaitSecs, List concurrentTasks, ExecutorService parallelExecutor) throws InterruptedException { - List> results = - parallelExecutor.invokeAll(concurrentTasks, maxWaitSecs, TimeUnit.SECONDS); - - // determine whether all replicas have the update - List failedList = null; // lazily init'd - for (int f = 0; f < results.size(); f++) { - Boolean success = false; - Future next = results.get(f); - if (next.isDone() && !next.isCancelled()) { - // looks to have finished, but need to check if it succeeded - try { - success = next.get(); - } catch (ExecutionException e) { - // shouldn't happen since we checked isCancelled - } - } - - if (!success) { - String coreUrl = concurrentTasks.get(f).coreUrl; - log.warn("Core " + coreUrl + "could not get the expected version " + expectedVersion); - if (failedList == null) failedList = new ArrayList<>(); - failedList.add(coreUrl); - } - } - return failedList; - } - - public static class PerReplicaCallable extends SolrRequest implements Callable { - protected String coreUrl; - String prop; - protected int expectedZkVersion; - protected Number remoteVersion = null; - int maxWait; - - public PerReplicaCallable(String coreUrl, String prop, int expectedZkVersion, int maxWait) { - super(METHOD.GET, "/config/" + ZNODEVER); - this.coreUrl = coreUrl; - this.expectedZkVersion = expectedZkVersion; - this.prop = prop; - this.maxWait = maxWait; - } - - @Override - public SolrParams getParams() { - return new ModifiableSolrParams() - .set(prop, expectedZkVersion) - .set(CommonParams.WT, CommonParams.JAVABIN); - } - - @Override - public Boolean call() throws Exception { - final RTimer timer = new RTimer(); - int attempts = 0; - try (HttpSolrClient solr = new HttpSolrClient.Builder(coreUrl).build()) { - // eventually, this loop will get killed by the ExecutorService's timeout - while (true) { - try { - long timeElapsed = (long) timer.getTime() / 1000; - if (timeElapsed >= maxWait) { - return false; - } - log.info("Time elapsed : {} secs, maxWait {}", timeElapsed, maxWait); - Thread.sleep(100); - MapWriter resp = solr.httpUriRequest(this).future.get(); - if (verifyResponse(resp, attempts)) break; - attempts++; - } catch (Exception e) { - if (e instanceof InterruptedException) { - break; // stop looping - } else { - log.warn("Failed to get /schema/zkversion from " + coreUrl + " due to: " + e); - } - } - } - } - return true; - } - - protected boolean verifyResponse(MapWriter mw, int attempts) { - NamedList resp = (NamedList) mw; - if (resp != null) { - Map m = (Map) resp.get(ZNODEVER); - if (m != null) { - remoteVersion = (Number) m.get(prop); - if (remoteVersion != null && remoteVersion.intValue() >= expectedZkVersion) return true; - log.info(formatString("Could not get expectedVersion {0} from {1} for prop {2} after {3} attempts", expectedZkVersion, coreUrl, prop, attempts)); - - } - } - return false; - } - - - @Override - protected SolrResponse createResponse(SolrClient client) { - return null; - } - } - - public static List getActiveReplicaCoreUrls(ZkController zkController, - String collection) { - List activeReplicaCoreUrls = new ArrayList<>(); - ClusterState clusterState = zkController.getZkStateReader().getClusterState(); - Set liveNodes = clusterState.getLiveNodes(); - final DocCollection docCollection = clusterState.getCollectionOrNull(collection); - if (docCollection != null && docCollection.getActiveSlices() != null && docCollection.getActiveSlices().size() > 0) { - final Collection activeSlices = docCollection.getActiveSlices(); - for (Slice next : activeSlices) { - Map replicasMap = next.getReplicasMap(); - if (replicasMap != null) { - for (Map.Entry entry : replicasMap.entrySet()) { - Replica replica = entry.getValue(); - if (replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) { - activeReplicaCoreUrls.add(replica.getCoreUrl()); - } - } - } - } - } - return activeReplicaCoreUrls; - } - - @Override - public Name getPermissionName(AuthorizationContext ctx) { - switch (ctx.getHttpMethod()) { - case "GET": - return Name.CONFIG_READ_PERM; - case "POST": - return Name.CONFIG_EDIT_PERM; - default: - return null; - } - } private class Command { private final SolrQueryRequest req; @@ -510,54 +257,25 @@ private void handleGET() { private Map getConfigDetails(String componentType, SolrQueryRequest req) { String componentName = componentType == null ? null : req.getParams().get("componentName"); - if(componentName == null && parts.size() > 2){ - componentName = parts.get(2); - if(SolrRequestHandler.TYPE.equals(componentType)){ - componentName = "/"+componentName; - } - } - boolean showParams = req.getParams().getBool("expandParams", false); Map map = this.req.getCore().getSolrConfig().toMap(new LinkedHashMap<>()); - if (SolrRequestHandler.TYPE.equals(componentType) || componentType == null) { - Map reqHandlers = (Map) map.get(SolrRequestHandler.TYPE); - if (reqHandlers == null) map.put(SolrRequestHandler.TYPE, reqHandlers = new LinkedHashMap<>()); - List plugins = this.req.getCore().getImplicitHandlers(); - for (PluginInfo plugin : plugins) { - if (SolrRequestHandler.TYPE.equals(plugin.type)) { - if (!reqHandlers.containsKey(plugin.name)) { - reqHandlers.put(plugin.name, plugin); - } + if (componentType != null && !SolrRequestHandler.TYPE.equals(componentType)) return map; + Map reqHandlers = (Map) map.get(SolrRequestHandler.TYPE); + if (reqHandlers == null) map.put(SolrRequestHandler.TYPE, reqHandlers = new LinkedHashMap<>()); + List plugins = this.req.getCore().getImplicitHandlers(); + for (PluginInfo plugin : plugins) { + if (SolrRequestHandler.TYPE.equals(plugin.type)) { + if (!reqHandlers.containsKey(plugin.name)) { + reqHandlers.put(plugin.name, plugin); } } - if (showParams) { - for (Object o : reqHandlers.entrySet()) { - Map.Entry e = (Map.Entry) o; - if (componentName == null || e.getKey().equals(componentName)) { - Map m = expandUseParams(req, e.getValue()); - e.setValue(m); - } - } - } - } - - if (req.getParams().getBool("meta", false)) { - for (SolrCore.PkgListener pkgListener : req.getCore().getPackageListeners()) { - PluginInfo meta = pkgListener.pluginInfo(); - if (meta.pathInConfig != null) { - Object obj = Utils.getObjectByPath(map, false, meta.pathInConfig); - if (obj instanceof Map) { - Map m = (Map) obj; - m.put("_packageinfo_", pkgListener.lib()); - } else if(obj instanceof MapWriter){ - MapWriter mw = (MapWriter) obj; - Utils.setObjectByPath(map, meta.pathInConfig, (MapWriter) ew -> { - mw.writeMap(ew); - ew.put("_packageinfo_", pkgListener.lib()); - }, false); - } - } + if (!showParams) return map; + for (Object o : reqHandlers.entrySet()) { + Map.Entry e = (Map.Entry) o; + if (componentName == null || e.getKey().equals(componentName)) { + Map m = expandUseParams(req, e.getValue()); + e.setValue(m); } } @@ -633,8 +351,6 @@ private void handlePOST() throws IOException { } } } catch (Exception e) { - - log.error("error executing commands " + Utils.toJSONString(ops), e); resp.setException(e); resp.add(CommandOperation.ERR_MSGS, singletonList(SchemaManager.getErrorStr(e))); } @@ -709,7 +425,7 @@ private void handleParams(ArrayList ops, RequestParams params) List errs = CommandOperation.captureErrors(ops); if (!errs.isEmpty()) { - throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing params", errs); + throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing params", errs); } SolrResourceLoader loader = req.getCore().getResourceLoader(); @@ -772,7 +488,7 @@ private void handleCommands(List ops, ConfigOverlay overlay) t } List errs = CommandOperation.captureErrors(ops); if (!errs.isEmpty()) { - throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing commands", errs); + throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing commands", errs); } SolrResourceLoader loader = req.getCore().getResourceLoader(); @@ -810,20 +526,20 @@ private ConfigOverlay updateNamedPlugin(SolrConfig.SolrPluginInfo info, CommandO op.getMap(PluginInfo.INVARIANTS, null); op.getMap(PluginInfo.APPENDS, null); if (op.hasError()) return overlay; - if (info.clazz == RuntimeLib.class) { - if (!RuntimeLib.isEnabled()) { + if(info.clazz == PluginBag.RuntimeLib.class) { + if(!PluginBag.RuntimeLib.isEnabled()){ op.addError("Solr not started with -Denable.runtime.lib=true"); return overlay; } try { - new RuntimeLib(req.getCore().getCoreContainer()).init(new PluginInfo(info.tag, op.getDataMap())); + new PluginBag.RuntimeLib(req.getCore()).init(new PluginInfo(info.tag, op.getDataMap())); } catch (Exception e) { op.addError(e.getMessage()); log.error("can't load this plugin ", e); return overlay; } } - if (!verifyClass(op, clz, info)) return overlay; + if (!verifyClass(op, clz, info.clazz)) return overlay; if (pluginExists(info, overlay, name)) { if (isCeate) { op.addError(formatString(" ''{0}'' already exists . Do an ''{1}'' , if you want to change it ", name, "update-" + info.getTagCleanLower())); @@ -843,23 +559,16 @@ private ConfigOverlay updateNamedPlugin(SolrConfig.SolrPluginInfo info, CommandO private boolean pluginExists(SolrConfig.SolrPluginInfo info, ConfigOverlay overlay, String name) { List l = req.getCore().getSolrConfig().getPluginInfos(info.clazz.getName()); - for (PluginInfo pluginInfo : l) if (name.equals(pluginInfo.name)) return true; + for (PluginInfo pluginInfo : l) if(name.equals( pluginInfo.name)) return true; return overlay.getNamedPlugins(info.getCleanTag()).containsKey(name); } - private boolean verifyClass(CommandOperation op, String clz, SolrConfig.SolrPluginInfo pluginMeta) { + private boolean verifyClass(CommandOperation op, String clz, Class expected) { if (clz == null) return true; - PluginInfo info = new PluginInfo(pluginMeta.getCleanTag(), op.getDataMap()); - - if (info.isRuntimePlugin() && !RuntimeLib.isEnabled()) { - op.addError("node not started with enable.runtime.lib=true"); - return false; - } - - if (!"true".equals(String.valueOf(op.getStr(RuntimeLib.TYPE, null)))) { + if (!"true".equals(String.valueOf(op.getStr("runtimeLib", null)))) { //this is not dynamically loaded so we can verify the class right away try { - req.getCore().createInitInstance(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()), pluginMeta.clazz, clz, ""); + req.getCore().createInitInstance(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()), expected, clz, ""); } catch (Exception e) { op.addError(e.getMessage()); return false; @@ -957,6 +666,235 @@ private ConfigOverlay applySetProp(CommandOperation op, ConfigOverlay overlay) { } + public static String validateName(String s) { + for (int i = 0; i < s.length(); i++) { + char c = s.charAt(i); + if ((c >= 'A' && c <= 'Z') || + (c >= 'a' && c <= 'z') || + (c >= '0' && c <= '9') || + c == '_' || + c == '-' || + c == '.' + ) continue; + else { + return formatString("''{0}'' name should only have chars [a-zA-Z_-.0-9] ", s); + } + } + return null; + } + + @Override + public SolrRequestHandler getSubHandler(String path) { + if (subPaths.contains(path)) return this; + if (path.startsWith("/params/")) return this; + return null; + } + + + private static Set subPaths = new HashSet<>(Arrays.asList("/overlay", "/params", "/updateHandler", + "/query", "/jmx", "/requestDispatcher", "/znodeVersion")); + + static { + for (SolrConfig.SolrPluginInfo solrPluginInfo : SolrConfig.plugins) + subPaths.add("/" + solrPluginInfo.getCleanTag()); + + } + + //////////////////////// SolrInfoMBeans methods ////////////////////// + + + @Override + public String getDescription() { + return "Edit solrconfig.xml"; + } + + @Override + public Category getCategory() { + return Category.ADMIN; + } + + + public static final String SET_PROPERTY = "set-property"; + public static final String UNSET_PROPERTY = "unset-property"; + public static final String SET_USER_PROPERTY = "set-user-property"; + public static final String UNSET_USER_PROPERTY = "unset-user-property"; + public static final String SET = "set"; + public static final String UPDATE = "update"; + public static final String CREATE = "create"; + private static Set cmdPrefixes = ImmutableSet.of(CREATE, UPDATE, "delete", "add"); + + /** + * Block up to a specified maximum time until we see agreement on the schema + * version in ZooKeeper across all replicas for a collection. + */ + private static void waitForAllReplicasState(String collection, + ZkController zkController, + String prop, + int expectedVersion, + int maxWaitSecs) { + final RTimer timer = new RTimer(); + // get a list of active replica cores to query for the schema zk version (skipping this core of course) + List concurrentTasks = new ArrayList<>(); + + for (String coreUrl : getActiveReplicaCoreUrls(zkController, collection)) { + PerReplicaCallable e = new PerReplicaCallable(coreUrl, prop, expectedVersion, maxWaitSecs); + concurrentTasks.add(e); + } + if (concurrentTasks.isEmpty()) return; // nothing to wait for ... + + log.info(formatString("Waiting up to {0} secs for {1} replicas to set the property {2} to be of version {3} for collection {4}", + maxWaitSecs, concurrentTasks.size(), prop, expectedVersion, collection)); + + // use an executor service to invoke schema zk version requests in parallel with a max wait time + int poolSize = Math.min(concurrentTasks.size(), 10); + ExecutorService parallelExecutor = + ExecutorUtil.newMDCAwareFixedThreadPool(poolSize, new DefaultSolrThreadFactory("solrHandlerExecutor")); + try { + List> results = + parallelExecutor.invokeAll(concurrentTasks, maxWaitSecs, TimeUnit.SECONDS); + + // determine whether all replicas have the update + List failedList = null; // lazily init'd + for (int f = 0; f < results.size(); f++) { + Boolean success = false; + Future next = results.get(f); + if (next.isDone() && !next.isCancelled()) { + // looks to have finished, but need to check if it succeeded + try { + success = next.get(); + } catch (ExecutionException e) { + // shouldn't happen since we checked isCancelled + } + } + + if (!success) { + String coreUrl = concurrentTasks.get(f).coreUrl; + log.warn("Core " + coreUrl + "could not get the expected version " + expectedVersion); + if (failedList == null) failedList = new ArrayList<>(); + failedList.add(coreUrl); + } + } + + // if any tasks haven't completed within the specified timeout, it's an error + if (failedList != null) + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, + formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}", + failedList.size(), concurrentTasks.size() + 1, prop, expectedVersion, maxWaitSecs, failedList)); + + } catch (InterruptedException ie) { + log.warn(formatString( + "Core was interrupted . trying to set the property {1} to version {2} to propagate to {3} replicas for collection {4}", + prop, expectedVersion, concurrentTasks.size(), collection)); + Thread.currentThread().interrupt(); + } finally { + ExecutorUtil.shutdownAndAwaitTermination(parallelExecutor); + } + + log.info("Took {}ms to set the property {} to be of version {} for collection {}", + timer.getTime(), prop, expectedVersion, collection); + } + + public static List getActiveReplicaCoreUrls(ZkController zkController, + String collection) { + List activeReplicaCoreUrls = new ArrayList<>(); + ClusterState clusterState = zkController.getZkStateReader().getClusterState(); + Set liveNodes = clusterState.getLiveNodes(); + final DocCollection docCollection = clusterState.getCollectionOrNull(collection); + if (docCollection != null && docCollection.getActiveSlices() != null && docCollection.getActiveSlices().size() > 0) { + final Collection activeSlices = docCollection.getActiveSlices(); + for (Slice next : activeSlices) { + Map replicasMap = next.getReplicasMap(); + if (replicasMap != null) { + for (Map.Entry entry : replicasMap.entrySet()) { + Replica replica = entry.getValue(); + if (replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) { + activeReplicaCoreUrls.add(replica.getCoreUrl()); + } + } + } + } + } + return activeReplicaCoreUrls; + } + + @Override + public Name getPermissionName(AuthorizationContext ctx) { + switch (ctx.getHttpMethod()) { + case "GET": + return Name.CONFIG_READ_PERM; + case "POST": + return Name.CONFIG_EDIT_PERM; + default: + return null; + } + } + + private static class PerReplicaCallable extends SolrRequest implements Callable { + String coreUrl; + String prop; + int expectedZkVersion; + Number remoteVersion = null; + int maxWait; + + PerReplicaCallable(String coreUrl, String prop, int expectedZkVersion, int maxWait) { + super(METHOD.GET, "/config/" + ZNODEVER); + this.coreUrl = coreUrl; + this.expectedZkVersion = expectedZkVersion; + this.prop = prop; + this.maxWait = maxWait; + } + + @Override + public SolrParams getParams() { + return new ModifiableSolrParams() + .set(prop, expectedZkVersion) + .set(CommonParams.WT, CommonParams.JAVABIN); + } + + @Override + public Boolean call() throws Exception { + final RTimer timer = new RTimer(); + int attempts = 0; + try (HttpSolrClient solr = new HttpSolrClient.Builder(coreUrl).build()) { + // eventually, this loop will get killed by the ExecutorService's timeout + while (true) { + try { + long timeElapsed = (long) timer.getTime() / 1000; + if (timeElapsed >= maxWait) { + return false; + } + log.info("Time elapsed : {} secs, maxWait {}", timeElapsed, maxWait); + Thread.sleep(100); + NamedList resp = solr.httpUriRequest(this).future.get(); + if (resp != null) { + Map m = (Map) resp.get(ZNODEVER); + if (m != null) { + remoteVersion = (Number) m.get(prop); + if (remoteVersion != null && remoteVersion.intValue() >= expectedZkVersion) break; + } + } + + attempts++; + log.info(formatString("Could not get expectedVersion {0} from {1} for prop {2} after {3} attempts", expectedZkVersion, coreUrl, prop, attempts)); + } catch (Exception e) { + if (e instanceof InterruptedException) { + break; // stop looping + } else { + log.warn("Failed to get /schema/zkversion from " + coreUrl + " due to: " + e); + } + } + } + } + return true; + } + + + @Override + protected SolrResponse createResponse(SolrClient client) { + return null; + } + } + @Override public Collection getApis() { return ApiBag.wrapRequestHandlers(this, diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java index 2259a0e48678..d7d179ad56f5 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java @@ -17,71 +17,40 @@ package org.apache.solr.handler.admin; -import java.io.IOException; import java.lang.invoke.MethodHandles; -import java.nio.ByteBuffer; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.EnumMap; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Objects; -import org.apache.solr.api.ApiBag; -import org.apache.solr.client.solrj.SolrRequest; -import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.CollectionApiMapping; import org.apache.solr.client.solrj.request.CollectionApiMapping.CommandMeta; import org.apache.solr.client.solrj.request.CollectionApiMapping.Meta; import org.apache.solr.client.solrj.request.CollectionApiMapping.V2EndPoint; -import org.apache.solr.common.MapWriter; +import org.apache.solr.common.Callable; import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.cloud.ClusterProperties; -import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.CommandOperation; -import org.apache.solr.common.util.StrUtils; -import org.apache.solr.common.util.Utils; -import org.apache.solr.core.ConfigOverlay; -import org.apache.solr.core.CoreContainer; -import org.apache.solr.core.PluginInfo; -import org.apache.solr.core.RuntimeLib; -import org.apache.solr.handler.SolrConfigHandler; import org.apache.solr.handler.admin.CollectionsHandler.CollectionOperation; import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.response.SolrQueryResponse; -import org.apache.solr.util.RTimer; -import org.apache.zookeeper.data.Stat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Arrays.asList; -import static java.util.Collections.singletonList; -import static org.apache.solr.common.util.CommandOperation.captureErrors; -import static org.apache.solr.common.util.StrUtils.formatString; -import static org.apache.solr.core.RuntimeLib.SHA256; - public class CollectionHandlerApi extends BaseHandlerApiSupport { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); final CollectionsHandler handler; static Collection apiCommands = createCollMapping(); - public CollectionHandlerApi(CollectionsHandler handler) { - this.handler = handler; - } - private static Collection createCollMapping() { - Map apiMapping = new EnumMap<>(Meta.class); + Map result = new EnumMap<>(Meta.class); for (Meta meta : Meta.values()) { for (CollectionOperation op : CollectionOperation.values()) { if (op.action == meta.action) { - apiMapping.put(meta, new ApiCommand() { + result.put(meta, new ApiCommand() { @Override public CommandMeta meta() { return meta; @@ -96,209 +65,30 @@ public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSu } } //The following APIs have only V2 implementations - addApi(apiMapping, Meta.GET_NODES, CollectionHandlerApi::getNodes); - addApi(apiMapping, Meta.SET_CLUSTER_PROPERTY_OBJ, CollectionHandlerApi::setClusterObj); - addApi(apiMapping, Meta.ADD_PACKAGE, wrap(CollectionHandlerApi::addUpdatePackage)); - addApi(apiMapping, Meta.UPDATE_PACKAGE, wrap(CollectionHandlerApi::addUpdatePackage)); - addApi(apiMapping, Meta.DELETE_RUNTIME_LIB, wrap(CollectionHandlerApi::deletePackage)); - addApi(apiMapping, Meta.ADD_REQ_HANDLER, wrap(CollectionHandlerApi::addRequestHandler)); - addApi(apiMapping, Meta.DELETE_REQ_HANDLER, wrap(CollectionHandlerApi::deleteReqHandler)); - - for (Meta meta : Meta.values()) { - if (apiMapping.get(meta) == null) { - log.error("ERROR_INIT. No corresponding API implementation for : " + meta.commandName); - } - } - - return apiMapping.values(); - } - - static Command wrap(Command cmd) { - return info -> { - CoreContainer cc = ((CollectionHandlerApi) info.apiHandler).handler.coreContainer; - boolean modified = cmd.call(info); - if (modified) { - Stat stat = new Stat(); - Map clusterProperties = new ClusterProperties(cc.getZkController().getZkClient()).getClusterProperties(stat); - try { - cc.getPackageManager().onChange(clusterProperties); - } catch (SolrException e) { - log.error("error executing command : " + info.op.jsonStr(), e); - throw e; - } catch (Exception e) { - log.error("error executing command : " + info.op.jsonStr(), e); - throw new SolrException(ErrorCode.SERVER_ERROR, "error executing command : ", e); - } - log.info("current version of clusterprops.json is {} , trying to get every node to update ", stat.getVersion()); - log.debug("The current clusterprops.json: {}", clusterProperties); - ((CollectionHandlerApi) info.apiHandler).waitForStateSync(stat.getVersion(), cc); - - } - if (info.op != null && info.op.hasError()) { - log.error("Error in running command {} , current clusterprops.json : {}", Utils.toJSONString(info.op), Utils.toJSONString(new ClusterProperties(cc.getZkController().getZkClient()).getClusterProperties())); - } - return modified; - - }; - } - - private static boolean getNodes(ApiInfo params) { - params.rsp.add("nodes", ((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getClusterState().getLiveNodes()); - return false; - } - - private static boolean deleteReqHandler(ApiInfo params) throws Exception { - String name = params.op.getStr(""); - ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient()); - Map map = clusterProperties.getClusterProperties(); - if (Utils.getObjectByPath(map, false, asList(SolrRequestHandler.TYPE, name)) == null) { - params.op.addError("NO such requestHandler with name :"); - return false; - } - Map m = new LinkedHashMap(); - Utils.setObjectByPath(m, asList(SolrRequestHandler.TYPE, name), null, true); - clusterProperties.setClusterProperties(m); - return true; - } + addApi(result, Meta.GET_NODES, params -> params.rsp.add("nodes", ((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getClusterState().getLiveNodes())); + addApi(result, Meta.SET_CLUSTER_PROPERTY_OBJ, params -> { + List commands = params.req.getCommands(true); + if (commands == null || commands.isEmpty()) throw new RuntimeException("Empty commands"); + ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient()); - private static boolean addRequestHandler(ApiInfo params) throws Exception { - Map data = params.op.getDataMap(); - String name = (String) data.get("name"); - CoreContainer coreContainer = ((CollectionHandlerApi) params.apiHandler).handler.coreContainer; - ClusterProperties clusterProperties = new ClusterProperties(coreContainer.getZkController().getZkClient()); - Map map = clusterProperties.getClusterProperties(); - if (Utils.getObjectByPath(map, false, asList(SolrRequestHandler.TYPE, name)) != null) { - params.op.addError("A requestHandler already exists with the said name"); - return false; - } - Map m = new LinkedHashMap(); - Utils.setObjectByPath(m, asList(SolrRequestHandler.TYPE, name), data, true); - clusterProperties.setClusterProperties(m); - return true; - } - - private static boolean deletePackage(ApiInfo params) throws Exception { - if (!RuntimeLib.isEnabled()) { - params.op.addError("node not started with enable.runtime.lib=true"); - return false; - } - String name = params.op.getStr(CommandOperation.ROOT_OBJ); - ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient()); - Map props = clusterProperties.getClusterProperties(); - List pathToLib = asList(CommonParams.PACKAGE, name); - Map existing = (Map) Utils.getObjectByPath(props, false, pathToLib); - if (existing == null) { - params.op.addError("No such runtimeLib : " + name); - return false; - } - Map delta = new LinkedHashMap(); - Utils.setObjectByPath(delta, pathToLib, null, true); - clusterProperties.setClusterProperties(delta); - return true; - } - - private static boolean addUpdatePackage(ApiInfo params) throws Exception { - if (!RuntimeLib.isEnabled()) { - params.op.addError("node not started with enable.runtime.lib=true"); - return false; - } - - CollectionHandlerApi handler = (CollectionHandlerApi) params.apiHandler; - RuntimeLib lib = new RuntimeLib(handler.handler.coreContainer); - CommandOperation op = params.op; - String name = op.getStr("name"); - ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient()); - Map props = clusterProperties.getClusterProperties(); - List pathToLib = asList(CommonParams.PACKAGE, name); - Map existing = (Map) Utils.getObjectByPath(props, false, pathToLib); - if (Meta.ADD_PACKAGE.commandName.equals(op.name)) { - if (existing != null) { - op.addError(StrUtils.formatString("The jar with a name ''{0}'' already exists ", name)); - return false; - } - } else { - if (existing == null) { - op.addError(StrUtils.formatString("The jar with a name ''{0}'' does not exist", name)); - return false; - } - if (Objects.equals(existing.get(SHA256), op.getDataMap().get(SHA256))) { - op.addError("Trying to update a jar with the same sha256"); - return false; - } - } - try { - lib.init(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap())); - } catch (SolrException e) { - log.error("Error loading runtimelib ", e); - op.addError(e.getMessage()); - return false; - } - - Map delta = new LinkedHashMap(); - Utils.setObjectByPath(delta, pathToLib, op.getDataMap(), true); - clusterProperties.setClusterProperties(delta); - return true; - - } - - private static boolean setClusterObj(ApiInfo params) { - ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient()); - try { - clusterProperties.setClusterProperties(params.op.getDataMap()); - } catch (Exception e) { - throw new SolrException(ErrorCode.SERVER_ERROR, "Error in API", e); - } - return false; - } - - private void waitForStateSync(int expectedVersion, CoreContainer coreContainer) { - final RTimer timer = new RTimer(); - int waitTimeSecs = 30; - // get a list of active replica cores to query for the schema zk version (skipping this core of course) - List concurrentTasks = new ArrayList<>(); - - ZkStateReader zkStateReader = coreContainer.getZkController().getZkStateReader(); - for (String nodeName : zkStateReader.getClusterState().getLiveNodes()) { - PerNodeCallable e = new PerNodeCallable(zkStateReader.getBaseUrlForNodeName(nodeName), expectedVersion, waitTimeSecs); - concurrentTasks.add(e); - } - if (concurrentTasks.isEmpty()) return; // nothing to wait for ... - - log.info("Waiting up to {} secs for {} nodes to update clusterprops to be of version {} ", - waitTimeSecs, concurrentTasks.size(), expectedVersion); - SolrConfigHandler.execInparallel(concurrentTasks, parallelExecutor -> { try { - List failedList = SolrConfigHandler.executeAll(expectedVersion, waitTimeSecs, concurrentTasks, parallelExecutor); - - // if any tasks haven't completed within the specified timeout, it's an error - if (failedList != null) - throw new SolrException(ErrorCode.SERVER_ERROR, - formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}", - failedList.size(), concurrentTasks.size() + 1, expectedVersion, 30, failedList)); - } catch (InterruptedException e) { - log.warn(formatString( - "Request was interrupted . trying to set the clusterprops to version {0} to propagate to {1} nodes ", - expectedVersion, concurrentTasks.size())); - Thread.currentThread().interrupt(); - + clusterProperties.setClusterProperties(commands.get(0).getDataMap()); + } catch (Exception e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error in API", e); } }); - log.info("Took {}ms to update the clusterprops to be of version {} on {} nodes", - timer.getTime(), expectedVersion, concurrentTasks.size()); - - } - - interface Command { - - - boolean call(ApiInfo info) throws Exception; + for (Meta meta : Meta.values()) { + if (result.get(meta) == null) { + log.error("ERROR_INIT. No corresponding API implementation for : " + meta.commandName); + } + } + return result.values(); } - private static void addApi(Map mapping, Meta metaInfo, Command fun) { - mapping.put(metaInfo, new ApiCommand() { - + private static void addApi(Map result, Meta metaInfo, Callable fun) { + result.put(metaInfo, new ApiCommand() { @Override public CommandMeta meta() { return metaInfo; @@ -306,72 +96,35 @@ public CommandMeta meta() { @Override public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception { - CommandOperation op = null; - if (metaInfo.method == SolrRequest.METHOD.POST) { - List commands = req.getCommands(true); - if (commands == null || commands.size() != 1) - throw new SolrException(ErrorCode.BAD_REQUEST, "should have exactly one command"); - op = commands.get(0); - } - - fun.call(new ApiInfo(req, rsp, apiHandler, op)); - if (op != null && op.hasError()) { - throw new ApiBag.ExceptionWithErrObject(ErrorCode.BAD_REQUEST, "error processing commands", captureErrors(singletonList(op))); - } + fun.call(new ApiParams(req, rsp, apiHandler)); } }); } - @Override - protected List getEndPoints() { - return asList(CollectionApiMapping.EndPoint.values()); - } - - @Override - protected Collection getCommands() { - return apiCommands; - } - - public static class PerNodeCallable extends SolrConfigHandler.PerReplicaCallable { - - static final List path = Arrays.asList("metadata", CommonParams.VERSION); - - PerNodeCallable(String baseUrl, int expectedversion, int waitTime) { - super(baseUrl, ConfigOverlay.ZNODEVER, expectedversion, waitTime); - } - - @Override - protected boolean verifyResponse(MapWriter mw, int attempts) { - remoteVersion = (Number) mw._get(path, -1); - if (remoteVersion.intValue() >= expectedZkVersion) return true; - log.info(formatString("Could not get expectedVersion {0} from {1} , remote val= {2} after {3} attempts", expectedZkVersion, coreUrl, remoteVersion, attempts)); - - return false; - } - - public String getPath() { - return "/____v2/node/ext"; - } - } - - static class ApiInfo { + static class ApiParams { final SolrQueryRequest req; final SolrQueryResponse rsp; final BaseHandlerApiSupport apiHandler; - final CommandOperation op; - ApiInfo(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler, CommandOperation op) { + ApiParams(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) { this.req = req; this.rsp = rsp; this.apiHandler = apiHandler; - this.op = op; } } - public static void postBlob(String baseUrl, ByteBuffer buf) throws IOException { - try(HttpSolrClient client = new HttpSolrClient.Builder(baseUrl+"/____v2/node/blob" ).build()){ + public CollectionHandlerApi(CollectionsHandler handler) { + this.handler = handler; + } - } + @Override + protected Collection getCommands() { + return apiCommands; + } + + @Override + protected List getEndPoints() { + return Arrays.asList(CollectionApiMapping.EndPoint.values()); } } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java index 900d08ba71c0..bbea0f892a04 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java @@ -151,10 +151,10 @@ import static org.apache.solr.common.params.CommonAdminParams.ASYNC; import static org.apache.solr.common.params.CommonAdminParams.IN_PLACE_MOVE; import static org.apache.solr.common.params.CommonAdminParams.NUM_SUB_SHARDS; -import static org.apache.solr.common.params.CommonAdminParams.SPLIT_BY_PREFIX; import static org.apache.solr.common.params.CommonAdminParams.SPLIT_FUZZ; import static org.apache.solr.common.params.CommonAdminParams.SPLIT_METHOD; import static org.apache.solr.common.params.CommonAdminParams.WAIT_FOR_FINAL_STATE; +import static org.apache.solr.common.params.CommonAdminParams.SPLIT_BY_PREFIX; import static org.apache.solr.common.params.CommonParams.NAME; import static org.apache.solr.common.params.CommonParams.TIMING; import static org.apache.solr.common.params.CommonParams.VALUE_LONG; @@ -1069,8 +1069,9 @@ public Map execute(SolrQueryRequest req, SolrQueryResponse rsp, copy(req.getParams().required(), m, COLLECTION_PROP); addMapObject(m, RULE); addMapObject(m, SNITCH); - for (String prop : m.keySet()) { - if ("".equals(m.get(prop))) { + for (Map.Entry entry : m.entrySet()) { + String prop = entry.getKey(); + if ("".equals(entry.getValue())) { // set to an empty string is equivalent to removing the property, see SOLR-12507 m.put(prop, null); } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java index ae225556b132..9bb8701923dc 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java @@ -46,7 +46,7 @@ import org.apache.solr.handler.RequestHandlerBase; import org.apache.solr.logging.MDCLoggingContext; import org.apache.solr.metrics.SolrMetricManager; -import org.apache.solr.metrics.SolrMetrics; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.security.AuthorizationContext; @@ -121,10 +121,10 @@ final public void init(NamedList args) { } @Override - public void initializeMetrics(SolrMetrics m) { - super.initializeMetrics(m); - parallelExecutor = MetricUtils.instrumentedExecutorService(parallelExecutor, this, solrMetrics.getRegistry(), - SolrMetricManager.mkName("parallelCoreAdminExecutor", getCategory().name(), solrMetrics.scope, "threadPool")); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + super.initializeMetrics(parentContext, scope); + parallelExecutor = MetricUtils.instrumentedExecutorService(parallelExecutor, this, solrMetricsContext.getMetricRegistry(), + SolrMetricManager.mkName("parallelCoreAdminExecutor", getCategory().name(), scope, "threadPool")); } @Override public Boolean registerV2() { diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java index 81e6c9241915..5739651c7faf 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java @@ -55,6 +55,7 @@ import static org.apache.solr.common.params.CoreAdminParams.COLLECTION; import static org.apache.solr.common.params.CoreAdminParams.CoreAdminAction.*; import static org.apache.solr.common.params.CoreAdminParams.REPLICA; +import static org.apache.solr.common.params.CoreAdminParams.REPLICA_TYPE; import static org.apache.solr.common.params.CoreAdminParams.SHARD; import static org.apache.solr.handler.admin.CoreAdminHandler.COMPLETED; import static org.apache.solr.handler.admin.CoreAdminHandler.CallInfo; @@ -333,6 +334,7 @@ static NamedList getCoreStatus(CoreContainer cores, String cname, boolea cloudInfo.add(COLLECTION, core.getCoreDescriptor().getCloudDescriptor().getCollectionName()); cloudInfo.add(SHARD, core.getCoreDescriptor().getCloudDescriptor().getShardId()); cloudInfo.add(REPLICA, core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName()); + cloudInfo.add(REPLICA_TYPE, core.getCoreDescriptor().getCloudDescriptor().getReplicaType().name()); info.add("cloud", cloudInfo); } if (isIndexInfoNeeded) { diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java index 45ef0b907746..98a6e56d4331 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java @@ -183,12 +183,12 @@ final boolean matchesStatusFilter(Map collectionState, Set shards = (Map) collectionState.get("shards"); - for (String shardId : shards.keySet()) { + for (Object o : shards.values()) { boolean hasActive = false; - Map shard = (Map) shards.get(shardId); + Map shard = (Map) o; Map replicas = (Map) shard.get("replicas"); - for (String replicaId : replicas.keySet()) { - Map replicaState = (Map) replicas.get(replicaId); + for (Object value : replicas.values()) { + Map replicaState = (Map) value; Replica.State coreState = Replica.State.getState((String) replicaState.get(ZkStateReader.STATE_PROP)); String nodeName = (String) replicaState.get("node_name"); diff --git a/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java b/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java index 87076a0b00ba..be2a84ed7f0e 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java @@ -39,7 +39,9 @@ import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.search.DocList; import org.apache.solr.search.QueryParsing; +import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.facet.FacetDebugInfo; +import org.apache.solr.search.stats.StatsCache; import org.apache.solr.util.SolrPluginUtils; import static org.apache.solr.common.params.CommonParams.FQ; @@ -76,7 +78,7 @@ public class DebugComponent extends SearchComponent map.put(ResponseBuilder.STAGE_DONE, "DONE"); stages = Collections.unmodifiableMap(map); } - + @Override public void prepare(ResponseBuilder rb) throws IOException { @@ -91,6 +93,9 @@ public void prepare(ResponseBuilder rb) throws IOException public void process(ResponseBuilder rb) throws IOException { if( rb.isDebug() ) { + SolrQueryRequest req = rb.req; + StatsCache statsCache = req.getSearcher().getStatsCache(); + req.getContext().put(SolrIndexSearcher.STATS_SOURCE, statsCache.get(req)); DocList results = null; //some internal grouping requests won't have results value set if(rb.getResults() != null) { @@ -175,6 +180,11 @@ public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest // Turn on debug to get explain only when retrieving fields if ((sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS) != 0) { sreq.purpose |= ShardRequest.PURPOSE_GET_DEBUG; + // always distribute the latest version of global stats + sreq.purpose |= ShardRequest.PURPOSE_SET_TERM_STATS; + StatsCache statsCache = rb.req.getSearcher().getStatsCache(); + statsCache.sendGlobalStats(rb, sreq); + if (rb.isDebugAll()) { sreq.params.set(CommonParams.DEBUG_QUERY, "true"); } else { diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java index 32f2e403befc..7ebe7d1490f0 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java @@ -330,11 +330,11 @@ public void process(ResponseBuilder rb) throws IOException return; } - StatsCache statsCache = req.getCore().getStatsCache(); + SolrIndexSearcher searcher = req.getSearcher(); + StatsCache statsCache = searcher.getStatsCache(); int purpose = params.getInt(ShardParams.SHARDS_PURPOSE, ShardRequest.PURPOSE_GET_TOP_IDS); if ((purpose & ShardRequest.PURPOSE_GET_TERM_STATS) != 0) { - SolrIndexSearcher searcher = req.getSearcher(); statsCache.returnLocalStats(rb, searcher); return; } @@ -686,7 +686,7 @@ protected void regularFinishStage(ResponseBuilder rb) { } protected void createDistributedStats(ResponseBuilder rb) { - StatsCache cache = rb.req.getCore().getStatsCache(); + StatsCache cache = rb.req.getSearcher().getStatsCache(); if ( (rb.getFieldFlags() & SolrIndexSearcher.GET_SCORES)!=0 || rb.getSortSpec().includesScore()) { ShardRequest sreq = cache.retrieveStatsRequest(rb); if (sreq != null) { @@ -696,7 +696,7 @@ protected void createDistributedStats(ResponseBuilder rb) { } protected void updateStats(ResponseBuilder rb, ShardRequest sreq) { - StatsCache cache = rb.req.getCore().getStatsCache(); + StatsCache cache = rb.req.getSearcher().getStatsCache(); cache.mergeToGlobalStats(rb.req, sreq.responses); } @@ -776,8 +776,9 @@ protected void createMainQuery(ResponseBuilder rb) { // TODO: should this really sendGlobalDfs if just includeScore? - if (shardQueryIncludeScore) { - StatsCache statsCache = rb.req.getCore().getStatsCache(); + if (shardQueryIncludeScore || rb.isDebug()) { + StatsCache statsCache = rb.req.getSearcher().getStatsCache(); + sreq.purpose |= ShardRequest.PURPOSE_SET_TERM_STATS; statsCache.sendGlobalStats(rb, sreq); } diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java index b44320c7415b..3ce531b868f2 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java @@ -79,8 +79,8 @@ import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.common.util.StrUtils; -import org.apache.solr.core.SolrCore; import org.apache.solr.core.XmlConfigFile; +import org.apache.solr.core.SolrCore; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.transform.ElevatedMarkerFactory; import org.apache.solr.response.transform.ExcludedMarkerFactory; diff --git a/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java b/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java index 61b10139a214..40af722c8a88 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java +++ b/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java @@ -166,8 +166,6 @@ public void addRequest(SearchComponent me, ShardRequest sreq) { } } - public GlobalCollectionStat globalCollectionStat; - public Map resultIds; // Maps uniqueKeyValue to ShardDoc, which may be used to // determine order of the doc or uniqueKey in the final @@ -417,18 +415,6 @@ public void setTimer(RTimer timer) { this.timer = timer; } - - public static class GlobalCollectionStat { - public final long numDocs; - - public final Map dfMap; - - public GlobalCollectionStat(int numDocs, Map dfMap) { - this.numDocs = numDocs; - this.dfMap = dfMap; - } - } - /** * Creates a SolrIndexSearcher.QueryCommand from this * ResponseBuilder. TimeAllowed is left unset. diff --git a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java index 00d86970e510..d70cf9928e45 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java @@ -49,7 +49,7 @@ import org.apache.solr.core.SolrEventListener; import org.apache.solr.metrics.MetricsMap; import org.apache.solr.metrics.SolrMetricProducer; -import org.apache.solr.metrics.SolrMetrics; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.spelling.suggest.SolrSuggester; import org.apache.solr.spelling.suggest.SuggesterOptions; @@ -88,6 +88,8 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware, @SuppressWarnings("unchecked") protected NamedList initParams; + protected SolrMetricsContext metricsContext; + /** * Key is the dictionary name used in SolrConfig, value is the corresponding {@link SolrSuggester} */ @@ -347,25 +349,23 @@ public String getDescription() { return "Suggester component"; } - protected SolrMetrics metricsInfo; - @Override - public SolrMetrics getMetrics() { - return metricsInfo; + public SolrMetricsContext getSolrMetricsContext() { + return metricsContext; } @Override - public void initializeMetrics(SolrMetrics info) { - this.metricsInfo = info.getChildInfo(this); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + this.metricsContext = parentContext.getChildContext(this); - metricsInfo.metricManager.registerGauge(this, info.registry, () -> ramBytesUsed(), metricsInfo.tag, true, "totalSizeInBytes", getCategory().toString(), metricsInfo.scope); + this.metricsContext.gauge(this, () -> ramBytesUsed(), true, "totalSizeInBytes", getCategory().toString()); MetricsMap suggestersMap = new MetricsMap((detailed, map) -> { for (Map.Entry entry : suggesters.entrySet()) { SolrSuggester suggester = entry.getValue(); map.put(entry.getKey(), suggester.toString()); } }); - metricsInfo.metricManager.registerGauge(this, metricsInfo.registry, suggestersMap, metricsInfo.tag, true, "suggesters", getCategory().toString(), metricsInfo.scope); + this.metricsContext.gauge(this, suggestersMap, true, "suggesters", getCategory().toString(), scope); } @Override diff --git a/solr/core/src/java/org/apache/solr/logging/log4j2/Log4j2Watcher.java b/solr/core/src/java/org/apache/solr/logging/log4j2/Log4j2Watcher.java index de79991f0c0a..496350354256 100644 --- a/solr/core/src/java/org/apache/solr/logging/log4j2/Log4j2Watcher.java +++ b/solr/core/src/java/org/apache/solr/logging/log4j2/Log4j2Watcher.java @@ -280,8 +280,8 @@ public SolrDocument toSolrDocument(LogEvent event) { Map contextMap = event.getContextMap(); if (contextMap != null) { - for (String key : contextMap.keySet()) - doc.setField(key, contextMap.get(key)); + for (Map.Entry entry : contextMap.entrySet()) + doc.setField(entry.getKey(), entry.getValue()); } if (!doc.containsKey("core")) diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java index c57a704ce175..c318b8cd7dcc 100644 --- a/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java +++ b/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java @@ -40,9 +40,8 @@ public class SolrCoreMetricManager implements Closeable { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final SolrCore core; - private final String tag; - private final SolrMetricManager metricManager; - private String registryName; + private SolrMetricsContext solrMetricsContext; + private SolrMetricManager metricManager; private String collectionName; private String shardName; private String replicaName; @@ -56,10 +55,10 @@ public class SolrCoreMetricManager implements Closeable { */ public SolrCoreMetricManager(SolrCore core) { this.core = core; - this.tag = core.getMetricTag(); - this.metricManager = core.getCoreContainer().getMetricManager(); initCloudMode(); - registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName()); + metricManager = core.getCoreContainer().getMetricManager(); + String registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName()); + solrMetricsContext = new SolrMetricsContext(metricManager, registryName, core.getMetricTag()); leaderRegistryName = createLeaderRegistryName(cloudMode, collectionName, shardName); } @@ -86,8 +85,8 @@ public void loadReporters() { CoreContainer coreContainer = core.getCoreContainer(); NodeConfig nodeConfig = coreContainer.getConfig(); PluginInfo[] pluginInfos = nodeConfig.getMetricsConfig().getMetricReporters(); - metricManager.loadReporters(pluginInfos, core.getResourceLoader(), coreContainer, core, tag, - SolrInfoBean.Group.core, registryName); + metricManager.loadReporters(pluginInfos, core.getResourceLoader(), coreContainer, core, solrMetricsContext.tag, + SolrInfoBean.Group.core, solrMetricsContext.registry); if (cloudMode) { metricManager.loadShardReporters(pluginInfos, core); } @@ -99,19 +98,20 @@ public void loadReporters() { * This method also reloads reporters so that they use the new core name. */ public void afterCoreSetName() { - String oldRegistryName = registryName; + String oldRegistryName = solrMetricsContext.registry; String oldLeaderRegistryName = leaderRegistryName; initCloudMode(); - registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName()); + String newRegistryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName()); leaderRegistryName = createLeaderRegistryName(cloudMode, collectionName, shardName); - if (oldRegistryName.equals(registryName)) { + if (oldRegistryName.equals(newRegistryName)) { return; } // close old reporters - metricManager.closeReporters(oldRegistryName, tag); + metricManager.closeReporters(oldRegistryName, solrMetricsContext.tag); if (oldLeaderRegistryName != null) { - metricManager.closeReporters(oldLeaderRegistryName, tag); + metricManager.closeReporters(oldLeaderRegistryName, solrMetricsContext.tag); } + solrMetricsContext = new SolrMetricsContext(metricManager, newRegistryName, solrMetricsContext.tag); // load reporters again, using the new core name loadReporters(); } @@ -127,15 +127,16 @@ public void registerMetricProducer(String scope, SolrMetricProducer producer) { throw new IllegalArgumentException("registerMetricProducer() called with illegal arguments: " + "scope = " + scope + ", producer = " + producer); } - producer.initializeMetrics(metricManager, getRegistryName(), tag, scope); + // use deprecated method for back-compat, remove in 9.0 + producer.initializeMetrics(solrMetricsContext.metricManager, solrMetricsContext.registry, solrMetricsContext.tag, scope); } /** * Return the registry used by this SolrCore. */ public MetricRegistry getRegistry() { - if (registryName != null) { - return metricManager.registry(registryName); + if (solrMetricsContext != null) { + return solrMetricsContext.getMetricRegistry(); } else { return null; } @@ -146,11 +147,15 @@ public MetricRegistry getRegistry() { */ @Override public void close() throws IOException { - metricManager.closeReporters(getRegistryName(), tag); + metricManager.closeReporters(solrMetricsContext.registry, solrMetricsContext.tag); if (getLeaderRegistryName() != null) { - metricManager.closeReporters(getLeaderRegistryName(), tag); + metricManager.closeReporters(getLeaderRegistryName(), solrMetricsContext.tag); } - metricManager.unregisterGauges(getRegistryName(), tag); + metricManager.unregisterGauges(solrMetricsContext.registry, solrMetricsContext.tag); + } + + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; } public SolrCore getCore() { @@ -175,7 +180,7 @@ public SolrCore getCore() { * @return the metric registry name of the manager. */ public String getRegistryName() { - return registryName; + return solrMetricsContext != null ? solrMetricsContext.registry : null; } /** @@ -190,7 +195,7 @@ public String getLeaderRegistryName() { * Return a tag specific to this instance. */ public String getTag() { - return tag; + return solrMetricsContext.tag; } public static String createRegistryName(boolean cloud, String collectionName, String shardName, String replicaName, String coreName) { diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java index 187598d30cae..977b0ca66be6 100644 --- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java +++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java @@ -607,14 +607,10 @@ public Map getMetrics(String registry, MetricFilter metricFilter */ public Meter meter(SolrInfoBean info, String registry, String metricName, String... metricPath) { final String name = mkName(metricName, metricPath); - return meter(info, registry(registry), name); - } - - public Meter meter(SolrInfoBean info, MetricRegistry registry, String metricsPath) { if (info != null) { - info.registerMetricName(metricsPath); + info.registerMetricName(name); } - return registry.meter(metricsPath, meterSupplier); + return registry(registry).meter(name, meterSupplier); } /** @@ -634,14 +630,6 @@ public Timer timer(SolrInfoBean info, String registry, String metricName, String return registry(registry).timer(name, timerSupplier); } - public Timer timer(SolrInfoBean info, MetricRegistry registry, String name) { - if (info != null) { - info.registerMetricName(name); - } - return registry.timer(name, timerSupplier); - - } - /** * Create or get an existing named {@link Counter} * @@ -653,15 +641,10 @@ public Timer timer(SolrInfoBean info, MetricRegistry registry, String name) { */ public Counter counter(SolrInfoBean info, String registry, String metricName, String... metricPath) { final String name = mkName(metricName, metricPath); - return counter(info, registry(registry), name); - } - - public Counter counter(SolrInfoBean info, MetricRegistry registry, String name) { if (info != null) { info.registerMetricName(name); } - return registry.counter(name, counterSupplier); - + return registry(registry).counter(name, counterSupplier); } /** @@ -707,27 +690,13 @@ public void registerMetric(SolrInfoBean info, String registry, Metric metric, bo } } - public void registerGauge(SolrInfoBean info, MetricRegistry registry, Gauge g, boolean force, String name) { - if (info != null) { - info.registerMetricName(name); - } - synchronized (registry) { - if (force && registry.getMetrics().containsKey(name)) { - registry.remove(name); - } - registry.register(name, g); - } - - } - - - /** - * This is a wrapper for {@link Gauge} metrics, which are usually implemented as - * lambdas that often keep a reference to their parent instance. In order to make sure that - * all such metrics are removed when their parent instance is removed / closed the - * metric is associated with an instance tag, which can be used then to remove - * wrappers with the matching tag using {@link #unregisterGauges(String, String)}. - */ + /** + * This is a wrapper for {@link Gauge} metrics, which are usually implemented as + * lambdas that often keep a reference to their parent instance. In order to make sure that + * all such metrics are removed when their parent instance is removed / closed the + * metric is associated with an instance tag, which can be used then to remove + * wrappers with the matching tag using {@link #unregisterGauges(String, String)}. + */ public static class GaugeWrapper implements Gauge { private final Gauge gauge; private final String tag; @@ -765,8 +734,10 @@ public int unregisterGauges(String registryName, String tagSegment) { registry.removeMatching((name, metric) -> { if (metric instanceof GaugeWrapper) { GaugeWrapper wrapper = (GaugeWrapper) metric; - boolean toRemove = tagSegment.equals(wrapper.getTag()) || wrapper.getTag().contains(tagSegment); - if (toRemove) removed.incrementAndGet(); + boolean toRemove = wrapper.getTag().contains(tagSegment); + if (toRemove) { + removed.incrementAndGet(); + } return toRemove; } return false; @@ -813,7 +784,6 @@ public static String makeName(List path, String name) { sb.append(name); return sb.toString(); } - } /** diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java index cb534aca5dcc..29c14cefbcde 100644 --- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java +++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java @@ -17,25 +17,29 @@ package org.apache.solr.metrics; /** - * Used by objects that expose metrics through {@link SolrCoreMetricManager}. + * Used by objects that expose metrics through {@link SolrMetricManager}. */ public interface SolrMetricProducer extends AutoCloseable { /** - * Unique metric name is in the format of A.B.C + * Unique metric tag identifies components with the same life-cycle, which should + * be registered / unregistered together. It is in the format of A:B:C, where * A is the parent of B is the parent of C and so on. - * If object "B" is unregistered , C also must get unregistered. - * If object "A" is unregistered , B , C also must get unregistered. + * If object "B" is unregistered C also must get unregistered. + * If object "A" is unregistered B and C also must get unregistered. + * @param o object to create a tag for + * @param parentName parent object name, or null if no parent exists */ - default String getUniqueMetricTag(String parentName) { - String name = getClass().getSimpleName() + "@" + Integer.toHexString(hashCode()); - if (parentName != null && parentName.contains(name)) return parentName; + static String getUniqueMetricTag(Object o, String parentName) { + String name = o.getClass().getSimpleName() + "@" + Integer.toHexString(o.hashCode()); + if (parentName != null && parentName.contains(name)) { + throw new RuntimeException("Parent already includes this component! parent=" + parentName + ", this=" + name); + } return parentName == null ? name : parentName + ":" + name; } - /** * Initializes metrics specific to this producer * @@ -43,29 +47,55 @@ default String getUniqueMetricTag(String parentName) { * @param registry registry name where metrics are registered * @param tag a symbolic tag that represents this instance of the producer, * or a group of related instances that have the same life-cycle. This tag is - * used when managing life-cycle of some metrics and is set when - * {@link #initializeMetrics(SolrMetricManager, String, String, String)} is called. - * @param scope scope of the metrics (eg. handler name) to separate metrics of + * used when managing life-cycle of some metrics. + * @param scope scope of the metrics (eg. handler name) to separate metrics of components with + * the same implementation but different scope. + * @deprecated use {@link #initializeMetrics(SolrMetricsContext, String)} instead */ + @Deprecated default void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) { - initializeMetrics(new SolrMetrics(manager, registry, tag, scope)); + initializeMetrics(new SolrMetricsContext(manager, registry, tag), scope); } - default void initializeMetrics(SolrMetrics info) { - throw new RuntimeException("This means , the class has not implemented both of these methods"); + /** + * Initialize metrics specific to this producer. + * @param parentContext parent metrics context. If this component has the same life-cycle as the parent + * it can simply use the parent context, otherwise it should obtain a child context + * using {@link SolrMetricsContext#getChildContext(Object)} passing this + * as the child. + * @param scope component scope + */ + default void initializeMetrics(SolrMetricsContext parentContext, String scope) { + throw new RuntimeException("In class " + getClass().getName() + + " you must implement either initializeMetrics(SolrMetricsContext, String) or " + + "initializeMetrics(SolrMetricManager, String, String, String)"); } - default SolrMetrics getMetrics() { + /** + * Implementing classes should override this method to provide the context obtained in + * {@link #initializeMetrics(SolrMetricsContext, String)} to ensure proper cleanup of metrics + * at the end of the life-cycle of this component. + */ + default SolrMetricsContext getSolrMetricsContext() { return null; } + /** + * Implementations should always call SolrMetricProducer.super.close() to ensure that + * metrics with the same life-cycle as this component are properly unregistered. This prevents + * obscure memory leaks. + */ @Override default void close() throws Exception { - SolrMetrics info = getMetrics(); - if (info == null || info.tag.indexOf(':') == -1) return;//this will end up unregistering the root itself - info.unregister(); + SolrMetricsContext context = getSolrMetricsContext(); + if (context == null) { + return; + } else { + context.unregister(); + } + // ??? (ab) no idea what this was supposed to avoid + //if (info == null || info.tag.indexOf(':') == -1) return;//this will end up unregistering the root itself } - } diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetrics.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetrics.java deleted file mode 100644 index d73b04b33c98..000000000000 --- a/solr/core/src/java/org/apache/solr/metrics/SolrMetrics.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.solr.metrics; - -import java.util.ArrayList; -import java.util.Collections; - -import com.codahale.metrics.Counter; -import com.codahale.metrics.Gauge; -import com.codahale.metrics.Meter; -import com.codahale.metrics.MetricRegistry; -import com.codahale.metrics.Timer; -import org.apache.solr.core.SolrInfoBean; - -import static org.apache.solr.metrics.SolrMetricManager.makeName; - -public class SolrMetrics { - public final String registry; - public final SolrMetricManager metricManager; - public final String tag; - public final String scope; - private SolrMetrics parent; - - public SolrMetrics(SolrMetricManager metricManager, String registry, String tag, String scope) { - this.registry = registry; - this.metricManager = metricManager; - this.tag = tag; - this.scope = scope; - } - - public String getTag() { - return tag; - } - - public void unregister() { - metricManager.unregisterGauges(registry, tag); - } - - public SolrMetrics getChildInfo(SolrMetricProducer producer) { - SolrMetrics metricsInfo = new SolrMetrics(metricManager, registry, producer.getUniqueMetricTag(tag), scope); - metricsInfo.parent = this; - return metricsInfo; - } - - public Meter meter(SolrInfoBean info, String metricName, String... metricpath) { - return metricManager.meter(info, getRegistry(), createName(metricName, metricpath)); - } - - private String createName(String metricName, String... metricpath) { - ArrayList l = new ArrayList<>(); - if (metricpath != null) { - Collections.addAll(l, metricpath); - } - l.add(scope); - return makeName(l, metricName); - } - - public Counter counter(SolrInfoBean info, String metricName, String... metricpath) { - return metricManager.counter(info, getRegistry(), createName(metricName, metricpath)); - - } - - public void gauge(SolrInfoBean info, Gauge gauge, boolean force, String metricName, String... metricpath) { - String name = metricpath == null || metricpath.length == 0 ? metricName : createName(metricName, metricpath); - metricManager.registerGauge(info, getRegistry(), new SolrMetricManager.GaugeWrapper<>(gauge, tag), force, name); - } - - public Timer timer(SolrInfoBean info, String metricName, String... metricpath) { - return metricManager.timer(info, getRegistry(), createName(metricName, metricpath)); - - } - - public SolrMetrics getParent() { - return parent; - } - - public MetricRegistry getRegistry() { - return metricManager.registry(registry); - } -} diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricsContext.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricsContext.java new file mode 100644 index 000000000000..dd37e1fabe86 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricsContext.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.metrics; + +import com.codahale.metrics.Counter; +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Histogram; +import com.codahale.metrics.Meter; +import com.codahale.metrics.MetricRegistry; +import com.codahale.metrics.Timer; +import org.apache.solr.core.SolrInfoBean; + +/** + * This class represents a metrics context that ties together components with the same life-cycle + * and provides convenient access to the metric registry. + */ +public class SolrMetricsContext { + public final String registry; + public final SolrMetricManager metricManager; + public final String tag; + + public SolrMetricsContext(SolrMetricManager metricManager, String registry, String tag) { + this.registry = registry; + this.metricManager = metricManager; + this.tag = tag; + } + + /** + * Metrics tag that represents objects with the same life-cycle. + */ + public String getTag() { + return tag; + } + + /** + * Unregister all {@link Gauge} metrics that use this context's tag. + * + *

NOTE: This method MUST be called at the end of a life-cycle (typically in close()) + * of components that register gauge metrics with references to the current object's instance. Failure to + * do so may result in hard-to-debug memory leaks.

+ */ + public void unregister() { + metricManager.unregisterGauges(registry, tag); + } + + /** + * Get a context with the same registry name but a tag that represents a parent-child relationship. + * Since it's a different tag than the parent's context it is assumed that the life-cycle of the parent + * and child are different. + * @param child child object that produces metrics with a different life-cycle than the parent. + */ + public SolrMetricsContext getChildContext(Object child) { + SolrMetricsContext childContext = new SolrMetricsContext(metricManager, registry, SolrMetricProducer.getUniqueMetricTag(child, tag)); + return childContext; + } + + /** + * Convenience method for {@link SolrMetricManager#meter(SolrInfoBean, String, String, String...)}. + */ + public Meter meter(SolrInfoBean info, String metricName, String... metricPath) { + return metricManager.meter(info, registry, metricName, metricPath); + } + + /** + * Convenience method for {@link SolrMetricManager#counter(SolrInfoBean, String, String, String...)}. + */ + public Counter counter(SolrInfoBean info, String metricName, String... metricPath) { + return metricManager.counter(info, registry, metricName, metricPath); + + } + + /** + * Convenience method for {@link SolrMetricManager#registerGauge(SolrInfoBean, String, Gauge, String, boolean, String, String...)}. + */ + public void gauge(SolrInfoBean info, Gauge gauge, boolean force, String metricName, String... metricPath) { + metricManager.registerGauge(info, registry, gauge, tag, force, metricName, metricPath); + } + + /** + * Convenience method for {@link SolrMetricManager#meter(SolrInfoBean, String, String, String...)}. + */ + public Timer timer(SolrInfoBean info, String metricName, String... metricPath) { + return metricManager.timer(info, registry, metricName, metricPath); + } + + /** + * Convenience method for {@link SolrMetricManager#histogram(SolrInfoBean, String, String, String...)}. + */ + public Histogram histogram(SolrInfoBean info, String metricName, String... metricPath) { + return metricManager.histogram(info, registry, metricName, metricPath); + } + + /** + * Get the MetricRegistry instance that is used for registering metrics in this context. + */ + public MetricRegistry getMetricRegistry() { + return metricManager.registry(registry); + } +} diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java index 468ba602719a..54b4530fa38d 100644 --- a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java +++ b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java @@ -70,7 +70,7 @@ public SolrJmxReporter(SolrMetricManager metricManager, String registryName) { protected synchronized void doInit() { if (serviceUrl != null && agentId != null) { mBeanServer = JmxUtil.findFirstMBeanServer(); - log.warn("No more than one of serviceUrl({}) and agentId({}) should be configured, using first MBeanServer instead of configuration.", + log.warn("No more than one of serviceUrl({}) and agentId({}) should be configured, using first MBeanServer {} instead of configuration.", serviceUrl, agentId, mBeanServer); } else if (serviceUrl != null) { // reuse existing services diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java index 189d14d273c7..56f295fb2f3a 100644 --- a/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java +++ b/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java @@ -28,7 +28,6 @@ import javax.management.QueryExp; import java.io.Closeable; import java.lang.invoke.MethodHandles; -import java.lang.management.ManagementFactory; import java.util.HashMap; import java.util.Locale; import java.util.Map; @@ -157,9 +156,6 @@ public Builder withTag(String tag) { } public JmxMetricsReporter build() { - if (mBeanServer == null) { - mBeanServer = ManagementFactory.getPlatformMBeanServer(); - } if (tag == null) { tag = Integer.toHexString(this.hashCode()); } diff --git a/solr/core/src/java/org/apache/solr/request/IntervalFacets.java b/solr/core/src/java/org/apache/solr/request/IntervalFacets.java index 6e492e70034e..188c07a61cd2 100644 --- a/solr/core/src/java/org/apache/solr/request/IntervalFacets.java +++ b/solr/core/src/java/org/apache/solr/request/IntervalFacets.java @@ -558,7 +558,7 @@ public static class FacetInterval { } else if (intervalStr.charAt(lastNdx) == ']') { endOpen = false; } else { - throw new SyntaxError("Invalid end character " + intervalStr.charAt(0) + " in facet interval " + intervalStr); + throw new SyntaxError("Invalid end character " + intervalStr.charAt(lastNdx) + " in facet interval " + intervalStr); } StringBuilder startStr = new StringBuilder(lastNdx); diff --git a/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java b/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java index fc6778123887..b9c73bc4bc31 100644 --- a/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java +++ b/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java @@ -22,6 +22,8 @@ import java.util.List; import java.util.Map; +import org.apache.solr.common.SolrException; + public class ObjectUtil { public static class ConflictHandler { @@ -103,10 +105,14 @@ public static void mergeObjects(Map top, List path, Objec // OK, now we need to merge values handler.handleConflict(outer, path, key, val, existingVal); } - } else { + } else if (val instanceof Map) { // merging at top level... Map newMap = (Map)val; handler.mergeMap(outer, newMap, path); + } else { + // todo: find a way to return query param in error message + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Expected JSON Object but got " + val.getClass().getSimpleName() + "=" + val); } } diff --git a/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java b/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java index 6e7e02a69edc..e1ddfcfb549d 100644 --- a/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java +++ b/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java @@ -179,13 +179,14 @@ public static void processParams(SolrRequestHandler handler, SolrQueryRequest re } mergeJSON(json, JSON, jsonS, new ObjectUtil.ConflictHandler()); } - for (String key : newMap.keySet()) { + for (Map.Entry entry : newMap.entrySet()) { + String key = entry.getKey(); // json.nl, json.wrf are existing query parameters if (key.startsWith("json.") && !("json.nl".equals(key) || "json.wrf".equals(key))) { if (json == null) { json = new LinkedHashMap<>(); } - mergeJSON(json, key, newMap.get(key), new ObjectUtil.ConflictHandler()); + mergeJSON(json, key, entry.getValue(), new ObjectUtil.ConflictHandler()); } } @@ -269,6 +270,8 @@ private static void mergeJSON(Map json, String queryParamName, St ObjectUtil.mergeObjects(json, path, o, handler); } } + } catch (JSONParser.ParseException e ) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } catch (IOException e) { // impossible } diff --git a/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java b/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java index 1af663471c8f..82a530178fe8 100644 --- a/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java +++ b/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java @@ -219,6 +219,8 @@ private void writeAnalyzer(SimpleOrderedMap analyzerProperties, String a if ( ! "solr.TokenizerChain".equals(analyzerProperties.getVal(i))) { writeAttr(name, analyzerProperties.getVal(i).toString()); } + } else if (name.equals(IndexSchema.LUCENE_MATCH_VERSION_PARAM)) { + writeAttr(name, analyzerProperties.getVal(i).toString()); } } boolean isEmptyTag diff --git a/solr/core/src/java/org/apache/solr/rest/ManagedResource.java b/solr/core/src/java/org/apache/solr/rest/ManagedResource.java index 8668c9cd9048..2dc402817310 100644 --- a/solr/core/src/java/org/apache/solr/rest/ManagedResource.java +++ b/solr/core/src/java/org/apache/solr/rest/ManagedResource.java @@ -18,6 +18,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.lang.invoke.MethodHandles; +import java.util.Collection; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; @@ -79,7 +80,7 @@ protected ManagedResource(String resourceId, SolrResourceLoader loader, StorageI * Called once during core initialization to get the managed * data loaded from storage and notify observers. */ - public void loadManagedDataAndNotify(List observers) + public void loadManagedDataAndNotify(Collection observers) throws SolrException { // load managed data from storage @@ -101,8 +102,7 @@ public void loadManagedDataAndNotify(List observers) * reload the core to get updates applied to the analysis components that * depend on the ManagedResource data. */ - @SuppressWarnings("unchecked") - protected void notifyObserversDuringInit(NamedList args, List observers) + protected void notifyObserversDuringInit(NamedList args, Collection observers) throws SolrException { if (observers == null || observers.isEmpty()) diff --git a/solr/core/src/java/org/apache/solr/rest/RestManager.java b/solr/core/src/java/org/apache/solr/rest/RestManager.java index 8450d9bd0477..abefc68f614b 100644 --- a/solr/core/src/java/org/apache/solr/rest/RestManager.java +++ b/solr/core/src/java/org/apache/solr/rest/RestManager.java @@ -25,6 +25,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -76,7 +77,7 @@ public class RestManager { private static class ManagedResourceRegistration { String resourceId; Class implClass; - List observers = new ArrayList<>(); + Set observers = new LinkedHashSet<>(); private ManagedResourceRegistration(String resourceId, Class implClass, @@ -229,7 +230,7 @@ public synchronized void registerManagedResource(String resourceId, } // there may be a RestManager, in which case, we want to add this new ManagedResource immediately - if (initializedRestManager != null) { + if (initializedRestManager != null && initializedRestManager.getManagedResourceOrNull(resourceId) == null) { initializedRestManager.addRegisteredResource(registered.get(resourceId)); } } diff --git a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java index 050132823ac0..d25ab89c93e1 100644 --- a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java +++ b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java @@ -16,11 +16,12 @@ */ package org.apache.solr.rest.schema; +import java.util.List; +import java.util.Map; + import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; -import java.util.List; -import java.util.Map; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; diff --git a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java index 80d6a7076a1d..6d7c1f570857 100644 --- a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java +++ b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java @@ -137,7 +137,8 @@ protected void onManagedDataLoadedFromStorage(NamedList managedInitArgs, Obje synonymMappings = new TreeMap<>(); if (managedData != null) { Map storedSyns = (Map)managedData; - for (String key : storedSyns.keySet()) { + for (Map.Entry entry : storedSyns.entrySet()) { + String key = entry.getKey(); String caseKey = applyCaseSetting(ignoreCase, key); CasePreservedSynonymMappings cpsm = synonymMappings.get(caseKey); @@ -148,7 +149,7 @@ protected void onManagedDataLoadedFromStorage(NamedList managedInitArgs, Obje // give the nature of our JSON parsing solution, we really have // no guarantees on what is in the file - Object mapping = storedSyns.get(key); + Object mapping = entry.getValue(); if (!(mapping instanceof List)) { throw new SolrException(ErrorCode.SERVER_ERROR, "Invalid synonym file format! Expected a list of synonyms for "+key+ @@ -156,7 +157,7 @@ protected void onManagedDataLoadedFromStorage(NamedList managedInitArgs, Obje } Set sortedVals = new TreeSet<>(); - sortedVals.addAll((List)storedSyns.get(key)); + sortedVals.addAll((List) entry.getValue()); cpsm.mappings.put(key, sortedVals); } } @@ -264,8 +265,8 @@ protected boolean applyMapUpdates(Map jsonMap, boolean ignoreCase protected Map> getStoredView() { Map> storedView = new TreeMap<>(); for (CasePreservedSynonymMappings cpsm : synonymMappings.values()) { - for (String key : cpsm.mappings.keySet()) { - storedView.put(key, cpsm.mappings.get(key)); + for (Map.Entry> entry : cpsm.mappings.entrySet()) { + storedView.put(entry.getKey(), entry.getValue()); } } return storedView; @@ -361,10 +362,10 @@ public ManagedSynonymParser(SynonymManager synonymManager, boolean dedup, Analyz public void parse(Reader in) throws IOException, ParseException { boolean ignoreCase = synonymManager.getIgnoreCase(); for (CasePreservedSynonymMappings cpsm : synonymManager.synonymMappings.values()) { - for (String term : cpsm.mappings.keySet()) { - for (String mapping : cpsm.mappings.get(term)) { + for (Map.Entry> entry : cpsm.mappings.entrySet()) { + for (String mapping : entry.getValue()) { // apply the case setting to match the behavior of the SynonymMap builder - CharsRef casedTerm = analyze(synonymManager.applyCaseSetting(ignoreCase, term), new CharsRefBuilder()); + CharsRef casedTerm = analyze(synonymManager.applyCaseSetting(ignoreCase, entry.getKey()), new CharsRefBuilder()); CharsRef casedMapping = analyze(synonymManager.applyCaseSetting(ignoreCase, mapping), new CharsRefBuilder()); add(casedTerm, casedMapping, false); } diff --git a/solr/core/src/java/org/apache/solr/schema/BoolField.java b/solr/core/src/java/org/apache/solr/schema/BoolField.java index 8cad7438bc87..5fb2d85fbc22 100644 --- a/solr/core/src/java/org/apache/solr/schema/BoolField.java +++ b/solr/core/src/java/org/apache/solr/schema/BoolField.java @@ -45,6 +45,7 @@ import org.apache.solr.search.QParser; import org.apache.solr.search.function.OrdFieldSource; import org.apache.solr.uninverting.UninvertingReader.Type; + /** * */ @@ -260,8 +261,8 @@ private int getOrdForDoc(int doc) throws IOException { return -1; } } + @Override - public boolean boolVal(int doc) throws IOException { return getOrdForDoc(doc) == trueOrd; } @@ -298,9 +299,10 @@ public boolean equals(Object o) { } private static final int hcode = OrdFieldSource.class.hashCode(); + @Override public int hashCode() { return hcode + field.hashCode(); - }; + } } diff --git a/solr/core/src/java/org/apache/solr/schema/FieldType.java b/solr/core/src/java/org/apache/solr/schema/FieldType.java index 69ef98170a0f..83748b4486d0 100644 --- a/solr/core/src/java/org/apache/solr/schema/FieldType.java +++ b/solr/core/src/java/org/apache/solr/schema/FieldType.java @@ -1004,9 +1004,10 @@ public SimpleOrderedMap getNamedPropertyValues(boolean showDefaults) { if (showDefaults) { Map fieldTypeArgs = getNonFieldPropertyArgs(); if (null != fieldTypeArgs) { - for (String key : fieldTypeArgs.keySet()) { - if ( ! CLASS_NAME.equals(key) && ! TYPE_NAME.equals(key)) { - namedPropertyValues.add(key, fieldTypeArgs.get(key)); + for (Map.Entry entry : fieldTypeArgs.entrySet()) { + String key = entry.getKey(); + if ( ! CLASS_NAME.equals(key) && ! TYPE_NAME.equals(key)) { + namedPropertyValues.add(key, entry.getValue()); } } } @@ -1048,11 +1049,12 @@ public SimpleOrderedMap getNamedPropertyValues(boolean showDefaults) { fieldProperties.add(propertyName); } - for (String key : args.keySet()) { + for (Map.Entry entry : args.entrySet()) { + String key = entry.getKey(); if (fieldProperties.contains(key)) { - namedPropertyValues.add(key, StrUtils.parseBool(args.get(key))); + namedPropertyValues.add(key, StrUtils.parseBool(entry.getValue())); } else if (!CLASS_NAME.equals(key) && !TYPE_NAME.equals(key)) { - namedPropertyValues.add(key, args.get(key)); + namedPropertyValues.add(key, entry.getValue()); } } } @@ -1112,14 +1114,15 @@ protected static SimpleOrderedMap getAnalyzerProperties(Analyzer analyze props.add(CLASS_NAME, charFilterFactory.getClassArg()); factoryArgs = charFilterFactory.getOriginalArgs(); if (null != factoryArgs) { - for (String key : factoryArgs.keySet()) { + for (Map.Entry entry : factoryArgs.entrySet()) { + String key = entry.getKey(); if ( ! CLASS_NAME.equals(key)) { if (LUCENE_MATCH_VERSION_PARAM.equals(key)) { if (charFilterFactory.isExplicitLuceneMatchVersion()) { - props.add(key, factoryArgs.get(key)); + props.add(key, entry.getValue()); } } else { - props.add(key, factoryArgs.get(key)); + props.add(key, entry.getValue()); } } } @@ -1134,14 +1137,15 @@ protected static SimpleOrderedMap getAnalyzerProperties(Analyzer analyze tokenizerProps.add(CLASS_NAME, tokenizerFactory.getClassArg()); factoryArgs = tokenizerFactory.getOriginalArgs(); if (null != factoryArgs) { - for (String key : factoryArgs.keySet()) { + for (Map.Entry entry : factoryArgs.entrySet()) { + String key = entry.getKey(); if ( ! CLASS_NAME.equals(key)) { if (LUCENE_MATCH_VERSION_PARAM.equals(key)) { if (tokenizerFactory.isExplicitLuceneMatchVersion()) { - tokenizerProps.add(key, factoryArgs.get(key)); + tokenizerProps.add(key, entry.getValue()); } } else { - tokenizerProps.add(key, factoryArgs.get(key)); + tokenizerProps.add(key, entry.getValue()); } } } @@ -1156,14 +1160,15 @@ protected static SimpleOrderedMap getAnalyzerProperties(Analyzer analyze props.add(CLASS_NAME, filterFactory.getClassArg()); factoryArgs = filterFactory.getOriginalArgs(); if (null != factoryArgs) { - for (String key : factoryArgs.keySet()) { + for (Map.Entry entry : factoryArgs.entrySet()) { + String key = entry.getKey(); if ( ! CLASS_NAME.equals(key)) { if (LUCENE_MATCH_VERSION_PARAM.equals(key)) { if (filterFactory.isExplicitLuceneMatchVersion()) { - props.add(key, factoryArgs.get(key)); + props.add(key, entry.getValue()); } } else { - props.add(key, factoryArgs.get(key)); + props.add(key, entry.getValue()); } } } diff --git a/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java b/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java index 25b54083a97b..701789a09616 100644 --- a/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java +++ b/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java @@ -233,10 +233,9 @@ private Analyzer readAnalyzer(Node node) throws XPathExpressionException { schema.getDefaultLuceneMatchVersion() : SolrConfig.parseLuceneVersionString(matchVersionStr); if (luceneMatchVersion == null) { - throw new SolrException - ( SolrException.ErrorCode.SERVER_ERROR, + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Configuration Error: Analyzer '" + clazz.getName() + - "' needs a 'luceneMatchVersion' parameter"); + "' needs a '" + IndexSchema.LUCENE_MATCH_VERSION_PARAM + "' parameter"); } analyzer.setVersion(luceneMatchVersion); return analyzer; diff --git a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java index 980be8d60072..02168f180a7b 100644 --- a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java +++ b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java @@ -138,7 +138,7 @@ public class IndexSchema { protected List fieldsWithDefaultValue = new ArrayList<>(); protected Collection requiredFields = new HashSet<>(); - protected volatile DynamicField[] dynamicFields; + protected DynamicField[] dynamicFields = new DynamicField[] {}; public DynamicField[] getDynamicFields() { return dynamicFields; } protected Map dynamicFieldCache = new ConcurrentHashMap<>(); @@ -151,7 +151,7 @@ public class IndexSchema { protected Map> copyFieldsMap = new HashMap<>(); public Map> getCopyFieldsMap() { return Collections.unmodifiableMap(copyFieldsMap); } - protected DynamicCopy[] dynamicCopyFields; + protected DynamicCopy[] dynamicCopyFields = new DynamicCopy[] {}; public DynamicCopy[] getDynamicCopyFields() { return dynamicCopyFields; } private Map decoders = new HashMap<>(); // cache to avoid scanning token filters repeatedly, unnecessarily @@ -962,18 +962,12 @@ protected void registerExplicitSrcAndDestFields(String source, int maxChars, Sch private void incrementCopyFieldTargetCount(SchemaField dest) { copyFieldTargetCounts.put(dest, copyFieldTargetCounts.containsKey(dest) ? copyFieldTargetCounts.get(dest) + 1 : 1); } - - private void registerDynamicCopyField( DynamicCopy dcopy ) { - if( dynamicCopyFields == null ) { - dynamicCopyFields = new DynamicCopy[] {dcopy}; - } - else { - DynamicCopy[] temp = new DynamicCopy[dynamicCopyFields.length+1]; - System.arraycopy(dynamicCopyFields,0,temp,0,dynamicCopyFields.length); - temp[temp.length -1] = dcopy; - dynamicCopyFields = temp; - } - log.trace("Dynamic Copy Field:" + dcopy); + + private void registerDynamicCopyField(DynamicCopy dcopy) { + DynamicCopy[] temp = new DynamicCopy[dynamicCopyFields.length + 1]; + System.arraycopy(dynamicCopyFields, 0, temp, 0, dynamicCopyFields.length); + temp[temp.length - 1] = dcopy; + dynamicCopyFields = temp; } static SimilarityFactory readSimilarity(SolrResourceLoader loader, Node node) { @@ -1337,11 +1331,9 @@ public List getCopySources(String destField) { } } } - if (null != dynamicCopyFields) { - for (DynamicCopy dynamicCopy : dynamicCopyFields) { - if (dynamicCopy.getDestFieldName().equals(destField)) { - fieldNames.add(dynamicCopy.getRegex()); - } + for (DynamicCopy dynamicCopy : dynamicCopyFields) { + if (dynamicCopy.getDestFieldName().equals(destField)) { + fieldNames.add(dynamicCopy.getRegex()); } } return fieldNames; @@ -1356,11 +1348,9 @@ public List getCopySources(String destField) { // This is useful when we need the maxSize param of each CopyField public List getCopyFieldsList(final String sourceField){ final List result = new ArrayList<>(); - if (null != dynamicCopyFields) { - for (DynamicCopy dynamicCopy : dynamicCopyFields) { - if (dynamicCopy.matches(sourceField)) { - result.add(new CopyField(getField(sourceField), dynamicCopy.getTargetField(sourceField), dynamicCopy.maxChars)); - } + for (DynamicCopy dynamicCopy : dynamicCopyFields) { + if (dynamicCopy.matches(sourceField)) { + result.add(new CopyField(getField(sourceField), dynamicCopy.getTargetField(sourceField), dynamicCopy.maxChars)); } } List fixedCopyFields = copyFieldsMap.get(sourceField); @@ -1556,48 +1546,46 @@ public Map getNamedPropertyValues(String name, SolrParams params } } } - if (null != dynamicCopyFields) { - for (IndexSchema.DynamicCopy dynamicCopy : dynamicCopyFields) { - final String source = dynamicCopy.getRegex(); - final String destination = dynamicCopy.getDestFieldName(); - if ((null == requestedSourceFields || requestedSourceFields.contains(source)) - && (null == requestedDestinationFields || requestedDestinationFields.contains(destination))) { - SimpleOrderedMap dynamicCopyProps = new SimpleOrderedMap<>(); - - dynamicCopyProps.add(SOURCE, dynamicCopy.getRegex()); - if (showDetails) { - IndexSchema.DynamicField sourceDynamicBase = dynamicCopy.getSourceDynamicBase(); - if (null != sourceDynamicBase) { - dynamicCopyProps.add(SOURCE_DYNAMIC_BASE, sourceDynamicBase.getRegex()); - } else if (source.contains("*")) { - List sourceExplicitFields = new ArrayList<>(); - Pattern pattern = Pattern.compile(source.replace("*", ".*")); // glob->regex - for (String field : fields.keySet()) { - if (pattern.matcher(field).matches()) { - sourceExplicitFields.add(field); - } - } - if (sourceExplicitFields.size() > 0) { - Collections.sort(sourceExplicitFields); - dynamicCopyProps.add(SOURCE_EXPLICIT_FIELDS, sourceExplicitFields); + for (IndexSchema.DynamicCopy dynamicCopy : dynamicCopyFields) { + final String source = dynamicCopy.getRegex(); + final String destination = dynamicCopy.getDestFieldName(); + if ((null == requestedSourceFields || requestedSourceFields.contains(source)) + && (null == requestedDestinationFields || requestedDestinationFields.contains(destination))) { + SimpleOrderedMap dynamicCopyProps = new SimpleOrderedMap<>(); + + dynamicCopyProps.add(SOURCE, dynamicCopy.getRegex()); + if (showDetails) { + IndexSchema.DynamicField sourceDynamicBase = dynamicCopy.getSourceDynamicBase(); + if (null != sourceDynamicBase) { + dynamicCopyProps.add(SOURCE_DYNAMIC_BASE, sourceDynamicBase.getRegex()); + } else if (source.contains("*")) { + List sourceExplicitFields = new ArrayList<>(); + Pattern pattern = Pattern.compile(source.replace("*", ".*")); // glob->regex + for (String field : fields.keySet()) { + if (pattern.matcher(field).matches()) { + sourceExplicitFields.add(field); } } - } - - dynamicCopyProps.add(DESTINATION, dynamicCopy.getDestFieldName()); - if (showDetails) { - IndexSchema.DynamicField destDynamicBase = dynamicCopy.getDestDynamicBase(); - if (null != destDynamicBase) { - dynamicCopyProps.add(DESTINATION_DYNAMIC_BASE, destDynamicBase.getRegex()); + if (sourceExplicitFields.size() > 0) { + Collections.sort(sourceExplicitFields); + dynamicCopyProps.add(SOURCE_EXPLICIT_FIELDS, sourceExplicitFields); } } + } - if (0 != dynamicCopy.getMaxChars()) { - dynamicCopyProps.add(MAX_CHARS, dynamicCopy.getMaxChars()); + dynamicCopyProps.add(DESTINATION, dynamicCopy.getDestFieldName()); + if (showDetails) { + IndexSchema.DynamicField destDynamicBase = dynamicCopy.getDestDynamicBase(); + if (null != destDynamicBase) { + dynamicCopyProps.add(DESTINATION_DYNAMIC_BASE, destDynamicBase.getRegex()); } + } - copyFieldProperties.add(dynamicCopyProps); + if (0 != dynamicCopy.getMaxChars()) { + dynamicCopyProps.add(MAX_CHARS, dynamicCopy.getMaxChars()); } + + copyFieldProperties.add(dynamicCopyProps); } } return copyFieldProperties; diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java index c7fbf276be5e..57b0c90e90bb 100644 --- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java +++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java @@ -81,7 +81,7 @@ /** Solr-managed schema - non-user-editable, but can be mutable via internal and external REST API requests. */ public final class ManagedIndexSchema extends IndexSchema { - private boolean isMutable = false; + private final boolean isMutable; @Override public boolean isMutable() { return isMutable; } @@ -654,7 +654,7 @@ public ManagedIndexSchema deleteDynamicFields(Collection fieldNamePatter System.arraycopy(newSchema.dynamicFields, dfPos + 1, temp, dfPos, newSchema.dynamicFields.length - dfPos - 1); newSchema.dynamicFields = temp; } else { - newSchema.dynamicFields = new DynamicField[0]; + newSchema.dynamicFields = new DynamicField[] {}; } } // After removing all dynamic fields, rebuild affected dynamic copy fields. @@ -840,26 +840,24 @@ private void deleteCopyField(String source, String dest) { boolean found = false; if (null == destSchemaField || null == sourceSchemaField) { // Must be dynamic copy field - if (dynamicCopyFields != null) { - for (int i = 0 ; i < dynamicCopyFields.length ; ++i) { - DynamicCopy dynamicCopy = dynamicCopyFields[i]; - if (source.equals(dynamicCopy.getRegex()) && dest.equals(dynamicCopy.getDestFieldName())) { - found = true; - SchemaField destinationPrototype = dynamicCopy.getDestination().getPrototype(); - if (copyFieldTargetCounts.containsKey(destinationPrototype)) { - decrementCopyFieldTargetCount(destinationPrototype); - } - if (dynamicCopyFields.length > 1) { - DynamicCopy[] temp = new DynamicCopy[dynamicCopyFields.length - 1]; - System.arraycopy(dynamicCopyFields, 0, temp, 0, i); - // skip over the dynamic copy field to be deleted - System.arraycopy(dynamicCopyFields, i + 1, temp, i, dynamicCopyFields.length - i - 1); - dynamicCopyFields = temp; - } else { - dynamicCopyFields = null; - } - break; + for (int i = 0; i < dynamicCopyFields.length; ++i) { + DynamicCopy dynamicCopy = dynamicCopyFields[i]; + if (source.equals(dynamicCopy.getRegex()) && dest.equals(dynamicCopy.getDestFieldName())) { + found = true; + SchemaField destinationPrototype = dynamicCopy.getDestination().getPrototype(); + if (copyFieldTargetCounts.containsKey(destinationPrototype)) { + decrementCopyFieldTargetCount(destinationPrototype); + } + if (dynamicCopyFields.length > 1) { + DynamicCopy[] temp = new DynamicCopy[dynamicCopyFields.length - 1]; + System.arraycopy(dynamicCopyFields, 0, temp, 0, i); + // skip over the dynamic copy field to be deleted + System.arraycopy(dynamicCopyFields, i + 1, temp, i, dynamicCopyFields.length - i - 1); + dynamicCopyFields = temp; + } else { + dynamicCopyFields = new DynamicCopy[] {}; } + break; } } } else { // non-dynamic copy field directive diff --git a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java index afc3b04fe893..31a7206ed8e6 100644 --- a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java +++ b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java @@ -138,6 +138,7 @@ private List doOperations(List operations) throws InterruptedE //only for non cloud stuff managedIndexSchema.persistManagedSchema(false); core.setLatestSchema(managedIndexSchema); + core.getCoreContainer().reload(core.getName()); } catch (SolrException e) { log.warn(errorMsg); errors = singletonList(errorMsg + e.getMessage()); diff --git a/solr/core/src/java/org/apache/solr/search/CacheConfig.java b/solr/core/src/java/org/apache/solr/search/CacheConfig.java index 753762a07c17..16a9d5725682 100644 --- a/solr/core/src/java/org/apache/solr/search/CacheConfig.java +++ b/solr/core/src/java/org/apache/solr/search/CacheConfig.java @@ -14,150 +14,148 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.solr.search; import javax.xml.xpath.XPathConstants; -import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import org.apache.lucene.analysis.util.ResourceLoader; -import org.apache.solr.common.MapWriter; import org.apache.solr.common.SolrException; -import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.StrUtils; -import org.apache.solr.common.util.Utils; -import org.apache.solr.core.ConfigOverlay; -import org.apache.solr.core.MemClassLoader; -import org.apache.solr.core.PluginInfo; -import org.apache.solr.core.RuntimeLib; +import org.apache.solr.common.MapSerializable; import org.apache.solr.core.SolrConfig; -import org.apache.solr.core.SolrCore; +import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.util.DOMUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import static org.apache.solr.common.params.CommonParams.NAME; -public class CacheConfig implements MapWriter { - final PluginInfo args; - private CacheRegenerator defRegen; - private final String name; - private String cacheImpl, regenImpl; - Object[] persistence = new Object[1]; +/** + * Contains the knowledge of how cache config is + * stored in the solrconfig.xml file, and implements a + * factory to create caches. + * + * + */ +public class CacheConfig implements MapSerializable{ + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private String nodeName; + private Class clazz; + private Map args; + private CacheRegenerator regenerator; - public CacheConfig(Map args, String path) { - this.args = new PluginInfo(SolrCache.TYPE, (Map) copyValsAsString(args)); - this.name = args.get(NAME); - this.cacheImpl = args.getOrDefault("class", "solr.LRUCache"); - this.regenImpl = args.get("regenerator"); - this.args.pathInConfig = StrUtils.splitSmart(path, '/', true); - } + private String cacheImpl; - static Map copyValsAsString(Map m) { - Map copy = new LinkedHashMap(m.size()); - m.forEach((k, v) -> copy.put(String.valueOf(k), String.valueOf(v))); - return copy; - } + private Object[] persistence = new Object[1]; - public static CacheConfig getConfig(SolrConfig solrConfig, String xpath) { - Node node = solrConfig.getNode(xpath, false); - if (node == null || !"true".equals(DOMUtil.getAttrOrDefault(node, "enabled", "true"))) { - Map m = solrConfig.getOverlay().getEditableSubProperties(xpath); - if (m == null) return null; - List pieces = StrUtils.splitSmart(xpath, '/'); - String name = pieces.get(pieces.size() - 1); - m = Utils.getDeepCopy(m, 2); - m.put(NAME, name); - return new CacheConfig(m, xpath); - } else { - Map attrs = DOMUtil.toMap(node.getAttributes()); - attrs.put(NAME, node.getNodeName()); - return new CacheConfig(applyOverlay(xpath, solrConfig.getOverlay(), attrs), xpath); + private String regenImpl; - } + public CacheConfig() {} + public CacheConfig(Class clazz, Map args, CacheRegenerator regenerator) { + this.clazz = clazz; + this.args = args; + this.regenerator = regenerator; + } + public CacheRegenerator getRegenerator() { + return regenerator; } - private static Map applyOverlay(String xpath, ConfigOverlay overlay, Map args) { - Map map = xpath == null ? null : overlay.getEditableSubProperties(xpath); - if (map != null) { - HashMap mapCopy = new HashMap<>(args); - for (Map.Entry e : map.entrySet()) { - mapCopy.put(e.getKey(), String.valueOf(e.getValue())); - } - return mapCopy; - } - return args; + public void setRegenerator(CacheRegenerator regenerator) { + this.regenerator = regenerator; } - public static Map getConfigs(SolrConfig solrConfig, String configPath) { + public static Map getMultipleConfigs(SolrConfig solrConfig, String configPath) { NodeList nodes = (NodeList) solrConfig.evaluate(configPath, XPathConstants.NODESET); if (nodes == null || nodes.getLength() == 0) return new LinkedHashMap<>(); Map result = new HashMap<>(nodes.getLength()); for (int i = 0; i < nodes.getLength(); i++) { - Map args = DOMUtil.toMap(nodes.item(i).getAttributes()); - result.put(args.get(NAME), new CacheConfig(args, configPath+"/"+args.get(NAME))); + CacheConfig config = getConfig(solrConfig, nodes.item(i).getNodeName(), DOMUtil.toMap(nodes.item(i).getAttributes()), configPath); + result.put(config.args.get(NAME), config); } return result; } - public String getName() { - return name; + + public static CacheConfig getConfig(SolrConfig solrConfig, String xpath) { + Node node = solrConfig.getNode(xpath, false); + if(node == null || !"true".equals(DOMUtil.getAttrOrDefault(node, "enabled", "true"))) { + Map m = solrConfig.getOverlay().getEditableSubProperties(xpath); + if(m==null) return null; + List parts = StrUtils.splitSmart(xpath, '/'); + return getConfig(solrConfig,parts.get(parts.size()-1) , Collections.EMPTY_MAP,xpath); + } + return getConfig(solrConfig, node.getNodeName(),DOMUtil.toMap(node.getAttributes()), xpath); } - public SolrCacheHolder newInstance(SolrCore core) { - return new SolrCacheHolder(new CacheInfo(this, core)); - } + public static CacheConfig getConfig(SolrConfig solrConfig, String nodeName, Map attrs, String xpath) { + CacheConfig config = new CacheConfig(); + config.nodeName = nodeName; + Map attrsCopy = new LinkedHashMap<>(attrs.size()); + for (Map.Entry e : attrs.entrySet()) { + attrsCopy.put(e.getKey(), String.valueOf(e.getValue())); + } + attrs = attrsCopy; + config.args = attrs; - static class CacheInfo { - final CacheConfig cfg; - SolrCore core; - SolrCache cache = null; - String pkg; - RuntimeLib runtimeLib; - CacheRegenerator regen = null; - - - CacheInfo(CacheConfig cfg, SolrCore core) { - this.core = core; - this.cfg = cfg; - pkg = cfg.args.attributes.get(CommonParams.PACKAGE); - ResourceLoader loader = pkg == null ? core.getResourceLoader() : - core.getCoreContainer().getPackageManager().getResourceLoader(pkg); - - try { - cache = loader.findClass(cfg.cacheImpl, SolrCache.class).getConstructor().newInstance(); - regen = null; - if (cfg.regenImpl != null) { - regen = loader.findClass(cfg.regenImpl, CacheRegenerator.class).getConstructor().newInstance(); - } - } catch (Exception e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error loading cache " + cfg.jsonStr(), e); - } - if (regen == null && cfg.defRegen != null) regen = cfg.defRegen; - cfg.persistence[0] = cache.init(cfg.args.attributes, cfg.persistence[0], regen); - if (pkg!=null && loader instanceof MemClassLoader) { - MemClassLoader memClassLoader = (MemClassLoader) loader; - runtimeLib = core.getCoreContainer().getPackageManager().getLib(pkg); + Map map = xpath == null ? null : solrConfig.getOverlay().getEditableSubProperties(xpath); + if(map != null){ + HashMap mapCopy = new HashMap<>(config.args); + for (Map.Entry e : map.entrySet()) { + mapCopy.put(e.getKey(),String.valueOf(e.getValue())); } + config.args = mapCopy; + } + String nameAttr = config.args.get(NAME); // OPTIONAL + if (nameAttr==null) { + config.args.put(NAME, config.nodeName); + } + SolrResourceLoader loader = solrConfig.getResourceLoader(); + config.cacheImpl = config.args.get("class"); + if(config.cacheImpl == null) config.cacheImpl = "solr.LRUCache"; + config.regenImpl = config.args.get("regenerator"); + config.clazz = loader.findClass(config.cacheImpl, SolrCache.class); + if (config.regenImpl != null) { + config.regenerator = loader.newInstance(config.regenImpl, CacheRegenerator.class); } + + return config; } - - public void setDefaultRegenerator(CacheRegenerator regen) { - this.defRegen = regen; + public SolrCache newInstance() { + try { + SolrCache cache = clazz.getConstructor().newInstance(); + persistence[0] = cache.init(args, persistence[0], regenerator); + return cache; + } catch (Exception e) { + SolrException.log(log,"Error instantiating cache",e); + // we can carry on without a cache... but should we? + // in some cases (like an OOM) we probably should try to continue. + return null; + } } @Override - public void writeMap(EntryWriter ew) throws IOException { - args.attributes.forEach(ew.getBiConsumer()); + public Map toMap(Map map) { + Map result = Collections.unmodifiableMap(args); + return result; } + + public String getNodeName() { + return nodeName; + } + + } diff --git a/solr/core/src/java/org/apache/solr/search/CaffeineCache.java b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java new file mode 100644 index 000000000000..f3c4c66f4229 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java @@ -0,0 +1,367 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.search; + +import java.lang.invoke.MethodHandles; +import java.time.Duration; +import java.util.Collections; +import java.util.Locale; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.LongAdder; + +import com.github.benmanes.caffeine.cache.RemovalCause; +import com.github.benmanes.caffeine.cache.RemovalListener; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.apache.solr.common.SolrException; +import org.apache.solr.metrics.MetricsMap; +import org.apache.solr.metrics.SolrMetricsContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; +import com.github.benmanes.caffeine.cache.Policy.Eviction; +import com.github.benmanes.caffeine.cache.stats.CacheStats; +import com.google.common.annotations.VisibleForTesting; + +/** + * A SolrCache backed by the Caffeine caching library [1]. By default it uses the Window TinyLFU (W-TinyLFU) + * eviction policy. + *

This cache supports either maximum size limit (the number of items) or maximum ram bytes limit, but + * not both. If both values are set then only maxRamMB limit is used and maximum size limit is ignored.

+ *

+ * W-TinyLFU [2] is a near optimal policy that uses recency and frequency to determine which entry + * to evict in O(1) time. The estimated frequency is retained in a Count-Min Sketch and entries + * reside on LRU priority queues [3]. By capturing the historic frequency of an entry, the cache is + * able to outperform classic policies like LRU and LFU, as well as modern policies like ARC and + * LIRS. This policy performed particularly well in search workloads. + *

+ * [1] https://github.com/ben-manes/caffeine + * [2] http://arxiv.org/pdf/1512.00727.pdf + * [3] http://highscalability.com/blog/2016/1/25/design-of-a-modern-cache.html + */ +public class CaffeineCache extends SolrCacheBase implements SolrCache, Accountable, RemovalListener { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CaffeineCache.class) + + RamUsageEstimator.shallowSizeOfInstance(CacheStats.class) + + 2 * RamUsageEstimator.shallowSizeOfInstance(LongAdder.class); + + private Executor executor; + + private CacheStats priorStats; + private long priorInserts; + + private String description = "Caffeine Cache"; + private LongAdder inserts; + private Cache cache; + private long warmupTime; + private int maxSize; + private long maxRamBytes; + private int initialSize; + private int maxIdleTimeSec; + private boolean cleanupThread; + + private Set metricNames = ConcurrentHashMap.newKeySet(); + private MetricsMap cacheMap; + private SolrMetricsContext solrMetricsContext; + + private long initialRamBytes = 0; + private final LongAdder ramBytes = new LongAdder(); + + public CaffeineCache() { + this.priorStats = CacheStats.empty(); + } + + @Override + @SuppressWarnings({"unchecked", "rawtypes"}) + public Object init(Map args, Object persistence, CacheRegenerator regenerator) { + super.init(args, regenerator); + String str = (String) args.get(SIZE_PARAM); + maxSize = (str == null) ? 1024 : Integer.parseInt(str); + str = (String) args.get("initialSize"); + initialSize = Math.min((str == null) ? 1024 : Integer.parseInt(str), maxSize); + str = (String) args.get(MAX_IDLE_TIME_PARAM); + if (str == null) { + maxIdleTimeSec = -1; + } else { + maxIdleTimeSec = Integer.parseInt(str); + } + str = (String) args.get(MAX_RAM_MB_PARAM); + int maxRamMB = str == null ? -1 : Double.valueOf(str).intValue(); + maxRamBytes = maxRamMB < 0 ? Long.MAX_VALUE : maxRamMB * 1024L * 1024L; + str = (String) args.get(CLEANUP_THREAD_PARAM); + cleanupThread = str != null && Boolean.parseBoolean(str); + if (cleanupThread) { + executor = ForkJoinPool.commonPool(); + } else { + executor = Runnable::run; + } + + description = generateDescription(maxSize, initialSize); + + cache = buildCache(null); + inserts = new LongAdder(); + + initialRamBytes = + RamUsageEstimator.shallowSizeOfInstance(cache.getClass()) + + RamUsageEstimator.shallowSizeOfInstance(executor.getClass()) + + RamUsageEstimator.sizeOfObject(description); + + return persistence; + } + + private Cache buildCache(Cache prev) { + Caffeine builder = Caffeine.newBuilder() + .initialCapacity(initialSize) + .executor(executor) + .removalListener(this) + .recordStats(); + if (maxIdleTimeSec > 0) { + builder.expireAfterAccess(Duration.ofSeconds(maxIdleTimeSec)); + } + if (maxRamBytes != Long.MAX_VALUE) { + builder.maximumWeight(maxRamBytes); + builder.weigher((k, v) -> (int) (RamUsageEstimator.sizeOfObject(k) + RamUsageEstimator.sizeOfObject(v))); + } else { + builder.maximumSize(maxSize); + } + Cache newCache = builder.build(); + if (prev != null) { + newCache.putAll(prev.asMap()); + } + return newCache; + } + + @Override + public void onRemoval(K key, V value, RemovalCause cause) { + ramBytes.add( + - (RamUsageEstimator.sizeOfObject(key, RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED) + + RamUsageEstimator.sizeOfObject(value, RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED) + + RamUsageEstimator.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY) + ); + } + + @Override + public long ramBytesUsed() { + return BASE_RAM_BYTES_USED + initialRamBytes + ramBytes.sum(); + } + + @Override + public V get(K key) { + return cache.getIfPresent(key); + } + + @Override + public V put(K key, V val) { + inserts.increment(); + V old = cache.asMap().put(key, val); + ramBytes.add(RamUsageEstimator.sizeOfObject(key, RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED) + + RamUsageEstimator.sizeOfObject(val, RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED)); + if (old != null) { + ramBytes.add(- RamUsageEstimator.sizeOfObject(old, RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED)); + } else { + ramBytes.add(RamUsageEstimator.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY); + } + return old; + } + + @Override + public void clear() { + cache.invalidateAll(); + ramBytes.reset(); + } + + @Override + public int size() { + return cache.asMap().size(); + } + + @Override + public void close() throws Exception { + SolrCache.super.close(); + cache.invalidateAll(); + cache.cleanUp(); + if (executor instanceof ExecutorService) { + ((ExecutorService)executor).shutdownNow(); + } + ramBytes.reset(); + } + + @Override + public int getMaxSize() { + return maxSize; + } + + @Override + public void setMaxSize(int maxSize) { + if (this.maxSize == maxSize) { + return; + } + Optional> evictionOpt = cache.policy().eviction(); + if (evictionOpt.isPresent()) { + Eviction eviction = evictionOpt.get(); + eviction.setMaximum(maxSize); + this.maxSize = maxSize; + initialSize = Math.min(1024, this.maxSize); + description = generateDescription(this.maxSize, initialSize); + cache.cleanUp(); + } + } + + @Override + public int getMaxRamMB() { + return maxRamBytes != Long.MAX_VALUE ? (int) (maxRamBytes / 1024L / 1024L) : -1; + } + + @Override + public void setMaxRamMB(int maxRamMB) { + long newMaxRamBytes = maxRamMB < 0 ? Long.MAX_VALUE : maxRamMB * 1024L * 1024L; + if (newMaxRamBytes != maxRamBytes) { + maxRamBytes = newMaxRamBytes; + Optional> evictionOpt = cache.policy().eviction(); + if (evictionOpt.isPresent()) { + Eviction eviction = evictionOpt.get(); + if (!eviction.isWeighted()) { + // rebuild cache using weigher + cache = buildCache(cache); + return; + } else if (maxRamBytes == Long.MAX_VALUE) { + // rebuild cache using maxSize + cache = buildCache(cache); + return; + } + eviction.setMaximum(newMaxRamBytes); + description = generateDescription(this.maxSize, initialSize); + cache.cleanUp(); + } + } + } + + @Override + public void warm(SolrIndexSearcher searcher, SolrCache old) { + if (regenerator == null) { + return; + } + + long warmingStartTime = System.nanoTime(); + Map hottest = Collections.emptyMap(); + CaffeineCache other = (CaffeineCache)old; + + // warm entries + if (isAutowarmingOn()) { + Eviction policy = other.cache.policy().eviction().get(); + int size = autowarm.getWarmCount(other.cache.asMap().size()); + hottest = policy.hottest(size); + } + + for (Entry entry : hottest.entrySet()) { + try { + boolean continueRegen = regenerator.regenerateItem( + searcher, this, old, entry.getKey(), entry.getValue()); + if (!continueRegen) { + break; + } + } + catch (Exception e) { + SolrException.log(log, "Error during auto-warming of key:" + entry.getKey(), e); + } + } + + inserts.reset(); + priorStats = other.cache.stats().plus(other.priorStats); + priorInserts = other.inserts.sum() + other.priorInserts; + warmupTime = TimeUnit.MILLISECONDS.convert(System.nanoTime() - warmingStartTime, TimeUnit.NANOSECONDS); + } + + /** Returns the description of this cache. */ + private String generateDescription(int limit, int initialSize) { + return String.format(Locale.ROOT, "TinyLfu Cache(maxSize=%d, initialSize=%d%s)", + limit, initialSize, isAutowarmingOn() ? (", " + getAutowarmDescription()) : ""); + } + + //////////////////////// SolrInfoBean methods ////////////////////// + + @Override + public String getName() { + return CaffeineCache.class.getName(); + } + + @Override + public String getDescription() { + return description; + } + + // for unit tests only + @VisibleForTesting + MetricsMap getMetricsMap() { + return cacheMap; + } + + @Override + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; + } + + @Override + public String toString() { + return name() + (cacheMap != null ? cacheMap.getValue().toString() : ""); + } + + @Override + public Set getMetricNames() { + return metricNames; + } + + @Override + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + solrMetricsContext = parentContext.getChildContext(this); + cacheMap = new MetricsMap((detailed, map) -> { + if (cache != null) { + CacheStats stats = cache.stats(); + long insertCount = inserts.sum(); + + map.put(LOOKUPS_PARAM, stats.requestCount()); + map.put(HITS_PARAM, stats.hitCount()); + map.put(HIT_RATIO_PARAM, stats.hitRate()); + map.put(INSERTS_PARAM, insertCount); + map.put(EVICTIONS_PARAM, stats.evictionCount()); + map.put(SIZE_PARAM, cache.asMap().size()); + map.put("warmupTime", warmupTime); + map.put(RAM_BYTES_USED_PARAM, ramBytesUsed()); + map.put(MAX_RAM_MB_PARAM, getMaxRamMB()); + + CacheStats cumulativeStats = priorStats.plus(stats); + map.put("cumulative_lookups", cumulativeStats.requestCount()); + map.put("cumulative_hits", cumulativeStats.hitCount()); + map.put("cumulative_hitratio", cumulativeStats.hitRate()); + map.put("cumulative_inserts", priorInserts + insertCount); + map.put("cumulative_evictions", cumulativeStats.evictionCount()); + } + }); + solrMetricsContext.gauge(this, cacheMap, true, scope, getCategory().toString()); + } +} diff --git a/solr/core/src/java/org/apache/solr/search/DisMaxQParser.java b/solr/core/src/java/org/apache/solr/search/DisMaxQParser.java index fa6100de7f86..04aa77cb8554 100644 --- a/solr/core/src/java/org/apache/solr/search/DisMaxQParser.java +++ b/solr/core/src/java/org/apache/solr/search/DisMaxQParser.java @@ -122,9 +122,9 @@ protected void addBoostFunctions(BooleanQuery.Builder query, SolrParams solrPara for (String boostFunc : boostFuncs) { if (null == boostFunc || "".equals(boostFunc)) continue; Map ff = SolrPluginUtils.parseFieldBoosts(boostFunc); - for (String f : ff.keySet()) { - Query fq = subQuery(f, FunctionQParserPlugin.NAME).getQuery(); - Float b = ff.get(f); + for (Map.Entry entry : ff.entrySet()) { + Query fq = subQuery(entry.getKey(), FunctionQParserPlugin.NAME).getQuery(); + Float b = entry.getValue(); if (null != b) { fq = new BoostQuery(fq, b); } diff --git a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java index d23412173c04..584532a8301f 100644 --- a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java +++ b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java @@ -542,9 +542,9 @@ protected List getBoostFunctions() throws SyntaxError { for (String boostFunc : config.boostFuncs) { if(null == boostFunc || "".equals(boostFunc)) continue; Map ff = SolrPluginUtils.parseFieldBoosts(boostFunc); - for (String f : ff.keySet()) { - Query fq = subQuery(f, FunctionQParserPlugin.NAME).getQuery(); - Float b = ff.get(f); + for (Map.Entry entry : ff.entrySet()) { + Query fq = subQuery(entry.getKey(), FunctionQParserPlugin.NAME).getQuery(); + Float b = entry.getValue(); if (null != b && b.floatValue() != 1f) { fq = new BoostQuery(fq, b); } diff --git a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java index 5a5ea01b1970..b74b63fc58e6 100644 --- a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java +++ b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java @@ -24,14 +24,11 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; -import com.codahale.metrics.MetricRegistry; -import com.google.common.collect.ImmutableList; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; import org.apache.solr.common.SolrException; import org.apache.solr.metrics.MetricsMap; -import org.apache.solr.metrics.SolrMetricManager; -import org.apache.solr.metrics.SolrMetrics; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.util.ConcurrentLRUCache; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -55,9 +52,6 @@ public class FastLRUCache extends SolrCacheBase implements SolrCache public static final String MIN_SIZE_PARAM = "minSize"; public static final String ACCEPTABLE_SIZE_PARAM = "acceptableSize"; - public static final String INITIAL_SIZE_PARAM = "initialSize"; - public static final String CLEANUP_THREAD_PARAM = "cleanupThread"; - public static final String SHOW_ITEMS_PARAM = "showItems"; // contains the statistics objects for all open caches of the same type private List statsList; @@ -74,10 +68,12 @@ public class FastLRUCache extends SolrCacheBase implements SolrCache private int initialSize; private int acceptableSize; private boolean cleanupThread; + private int maxIdleTimeSec; private long ramLowerWatermark; private MetricsMap cacheMap; private Set metricNames = ConcurrentHashMap.newKeySet(); + private SolrMetricsContext solrMetricsContext; @Override public Object init(Map args, Object persistence, CacheRegenerator regenerator) { @@ -104,22 +100,29 @@ public Object init(Map args, Object persistence, CacheRegenerator regenerator) { str = (String) args.get(INITIAL_SIZE_PARAM); initialSize = str == null ? maxSize : Integer.parseInt(str); str = (String) args.get(CLEANUP_THREAD_PARAM); - cleanupThread = str != null && Boolean.parseBoolean(str); + cleanupThread = str == null ? false : Boolean.parseBoolean(str); str = (String) args.get(SHOW_ITEMS_PARAM); showItems = str == null ? 0 : Integer.parseInt(str); + str = (String) args.get(MAX_IDLE_TIME_PARAM); + if (str == null) { + maxIdleTimeSec = -1; + } else { + maxIdleTimeSec = Integer.parseInt(str); + } + str = (String) args.get(MAX_RAM_MB_PARAM); long maxRamMB = str == null ? -1 : (long) Double.parseDouble(str); this.maxRamBytes = maxRamMB < 0 ? Long.MAX_VALUE : maxRamMB * 1024L * 1024L; if (maxRamBytes != Long.MAX_VALUE) { ramLowerWatermark = Math.round(maxRamBytes * 0.8); description = generateDescription(maxRamBytes, ramLowerWatermark, cleanupThread); - cache = new ConcurrentLRUCache<>(ramLowerWatermark, maxRamBytes, cleanupThread, null); + cache = new ConcurrentLRUCache<>(ramLowerWatermark, maxRamBytes, cleanupThread, null, maxIdleTimeSec); } else { ramLowerWatermark = -1L; description = generateDescription(maxSize, initialSize, minSizeLimit, acceptableSize, cleanupThread); - cache = new ConcurrentLRUCache<>(maxSize, minSizeLimit, acceptableSize, initialSize, cleanupThread, false, null); + cache = new ConcurrentLRUCache<>(maxSize, minSizeLimit, acceptableSize, initialSize, cleanupThread, false, null, maxIdleTimeSec); } cache.setAlive(false); @@ -135,6 +138,63 @@ public Object init(Map args, Object persistence, CacheRegenerator regenerator) { statsList.add(new ConcurrentLRUCache.Stats()); } statsList.add(cache.getStats()); + cacheMap = new MetricsMap((detailed, map) -> { + if (cache != null) { + ConcurrentLRUCache.Stats stats = cache.getStats(); + long lookups = stats.getCumulativeLookups(); + long hits = stats.getCumulativeHits(); + long inserts = stats.getCumulativePuts(); + long evictions = stats.getCumulativeEvictions(); + long idleEvictions = stats.getCumulativeIdleEvictions(); + long size = stats.getCurrentSize(); + long clookups = 0; + long chits = 0; + long cinserts = 0; + long cevictions = 0; + long cIdleEvictions = 0; + + // NOTE: It is safe to iterate on a CopyOnWriteArrayList + for (ConcurrentLRUCache.Stats statistiscs : statsList) { + clookups += statistiscs.getCumulativeLookups(); + chits += statistiscs.getCumulativeHits(); + cinserts += statistiscs.getCumulativePuts(); + cevictions += statistiscs.getCumulativeEvictions(); + cIdleEvictions += statistiscs.getCumulativeIdleEvictions(); + } + + map.put(LOOKUPS_PARAM, lookups); + map.put(HITS_PARAM, hits); + map.put(HIT_RATIO_PARAM, calcHitRatio(lookups, hits)); + map.put(INSERTS_PARAM, inserts); + map.put(EVICTIONS_PARAM, evictions); + map.put(SIZE_PARAM, size); + map.put("cleanupThread", cleanupThread); + map.put("idleEvictions", idleEvictions); + map.put(RAM_BYTES_USED_PARAM, ramBytesUsed()); + map.put(MAX_RAM_MB_PARAM, getMaxRamMB()); + + map.put("warmupTime", warmupTime); + map.put("cumulative_lookups", clookups); + map.put("cumulative_hits", chits); + map.put("cumulative_hitratio", calcHitRatio(clookups, chits)); + map.put("cumulative_inserts", cinserts); + map.put("cumulative_evictions", cevictions); + map.put("cumulative_idleEvictions", cIdleEvictions); + + if (detailed && showItems != 0) { + Map items = cache.getLatestAccessedItems(showItems == -1 ? Integer.MAX_VALUE : showItems); + for (Map.Entry e : (Set) items.entrySet()) { + Object k = e.getKey(); + Object v = e.getValue(); + + String ks = "item_" + k; + String vs = v.toString(); + map.put(ks, vs); + } + + } + } + }); return statsList; } @@ -224,8 +284,8 @@ public void warm(SolrIndexSearcher searcher, SolrCache old) { @Override - public void close() { - if (solrMetrics != null) solrMetrics.unregister(); + public void close() throws Exception { + SolrCache.super.close(); // add the stats to the cumulative stats object (the first in the statsList) statsList.get(0).add(cache.getStats()); statsList.remove(cache.getStats()); @@ -249,83 +309,22 @@ public Set getMetricNames() { } - SolrMetrics solrMetrics; - @Override - public SolrMetrics getMetrics() { - return solrMetrics; + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; } @Override - public void initializeMetrics(SolrMetrics info) { - solrMetrics = info.getChildInfo(this); - cacheMap = new MetricsMap((detailed, map) -> { - if (cache != null) { - ConcurrentLRUCache.Stats stats = cache.getStats(); - long lookups = stats.getCumulativeLookups(); - long hits = stats.getCumulativeHits(); - long inserts = stats.getCumulativePuts(); - long evictions = stats.getCumulativeEvictions(); - long size = stats.getCurrentSize(); - long clookups = 0; - long chits = 0; - long cinserts = 0; - long cevictions = 0; - - // NOTE: It is safe to iterate on a CopyOnWriteArrayList - for (ConcurrentLRUCache.Stats statistiscs : statsList) { - clookups += statistiscs.getCumulativeLookups(); - chits += statistiscs.getCumulativeHits(); - cinserts += statistiscs.getCumulativePuts(); - cevictions += statistiscs.getCumulativeEvictions(); - } - - map.put(LOOKUPS_PARAM, lookups); - map.put(HITS_PARAM, hits); - map.put(HIT_RATIO_PARAM, calcHitRatio(lookups, hits)); - map.put(INSERTS_PARAM, inserts); - map.put(EVICTIONS_PARAM, evictions); - map.put(SIZE_PARAM, size); - map.put("cleanupThread", cleanupThread); - map.put(RAM_BYTES_USED_PARAM, ramBytesUsed()); - map.put(MAX_RAM_MB_PARAM, getMaxRamMB()); - - map.put("warmupTime", warmupTime); - map.put("cumulative_lookups", clookups); - map.put("cumulative_hits", chits); - map.put("cumulative_hitratio", calcHitRatio(clookups, chits)); - map.put("cumulative_inserts", cinserts); - map.put("cumulative_evictions", cevictions); - - if (detailed && showItems != 0) { - Map items = cache.getLatestAccessedItems(showItems == -1 ? Integer.MAX_VALUE : showItems); - for (Map.Entry e : (Set) items.entrySet()) { - Object k = e.getKey(); - Object v = e.getValue(); - - String ks = "item_" + k; - String vs = v.toString(); - map.put(ks, vs); - } - - } - } - }); - String metricName = SolrMetricManager.makeName(ImmutableList.of(getCategory().toString()), solrMetrics.scope); - solrMetrics.gauge(this, cacheMap,true, metricName); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + this.solrMetricsContext = parentContext.getChildContext(this); + this.solrMetricsContext.gauge(this, cacheMap, true, scope, getCategory().toString()); } - // for unit tests only MetricsMap getMetricsMap() { return cacheMap; } - @Override - public MetricRegistry getMetricRegistry() { - return solrMetrics == null ? null : solrMetrics.getRegistry(); - } - @Override public String toString() { return name() + (cacheMap != null ? cacheMap.getValue().toString() : ""); diff --git a/solr/core/src/java/org/apache/solr/search/LFUCache.java b/solr/core/src/java/org/apache/solr/search/LFUCache.java index 78e7cb8c0be1..125f08a3bf85 100644 --- a/solr/core/src/java/org/apache/solr/search/LFUCache.java +++ b/solr/core/src/java/org/apache/solr/search/LFUCache.java @@ -24,14 +24,11 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; -import com.codahale.metrics.MetricRegistry; -import com.google.common.collect.ImmutableList; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; import org.apache.solr.common.SolrException; import org.apache.solr.metrics.MetricsMap; -import org.apache.solr.metrics.SolrMetricManager; -import org.apache.solr.metrics.SolrMetrics; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.util.ConcurrentLFUCache; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -78,8 +75,11 @@ public class LFUCache implements SolrCache, Accountable { private ConcurrentLFUCache cache; private int showItems = 0; private Boolean timeDecay = true; + private int maxIdleTimeSec; private MetricsMap cacheMap; private Set metricNames = ConcurrentHashMap.newKeySet(); + private SolrMetricsContext solrMetricsContext; + private int maxSize; private int minSizeLimit; @@ -116,18 +116,25 @@ public Object init(Map args, Object persistence, CacheRegenerator regenerator) { str = (String) args.get(AUTOWARM_COUNT_PARAM); autowarmCount = str == null ? 0 : Integer.parseInt(str); str = (String) args.get(CLEANUP_THREAD_PARAM); - cleanupThread = str != null && Boolean.parseBoolean(str); + cleanupThread = str == null ? false : Boolean.parseBoolean(str); str = (String) args.get(SHOW_ITEMS_PARAM); showItems = str == null ? 0 : Integer.parseInt(str); // Don't make this "efficient" by removing the test, default is true and omitting the param will make it false. str = (String) args.get(TIME_DECAY_PARAM); - timeDecay = (str == null) || Boolean.parseBoolean(str); + timeDecay = (str == null) ? true : Boolean.parseBoolean(str); + str = (String) args.get(MAX_IDLE_TIME_PARAM); + if (str == null) { + maxIdleTimeSec = -1; + } else { + maxIdleTimeSec = Integer.parseInt(str); + } description = generateDescription(); - cache = new ConcurrentLFUCache<>(maxSize, minSizeLimit, acceptableSize, initialSize, cleanupThread, false, null, timeDecay); + cache = new ConcurrentLFUCache<>(maxSize, minSizeLimit, acceptableSize, initialSize, + cleanupThread, false, null, timeDecay, maxIdleTimeSec); cache.setAlive(false); statsList = (List) persistence; @@ -147,7 +154,8 @@ public Object init(Map args, Object persistence, CacheRegenerator regenerator) { private String generateDescription() { String descr = "Concurrent LFU Cache(maxSize=" + maxSize + ", initialSize=" + initialSize + ", minSize=" + minSizeLimit + ", acceptableSize=" + acceptableSize + ", cleanupThread=" + cleanupThread + - ", timeDecay=" + timeDecay; + ", timeDecay=" + timeDecay + + ", maxIdleTime=" + maxIdleTimeSec; if (autowarmCount > 0) { descr += ", autowarmCount=" + autowarmCount + ", regenerator=" + regenerator; } @@ -222,12 +230,12 @@ public void warm(SolrIndexSearcher searcher, SolrCache old) { @Override - public void close() { + public void close() throws Exception { + SolrCache.super.close(); // add the stats to the cumulative stats object (the first in the statsList) statsList.get(0).add(cache.getStats()); statsList.remove(cache.getStats()); cache.destroy(); - if (solrMetrics != null) solrMetrics.unregister(); } //////////////////////// SolrInfoMBeans methods ////////////////////// @@ -255,17 +263,14 @@ private static String calcHitRatio(long lookups, long hits) { return "0." + hundredths; } - - private SolrMetrics solrMetrics; - @Override - public SolrMetrics getMetrics() { - return solrMetrics; + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; } @Override - public void initializeMetrics(SolrMetrics info) { - solrMetrics = info.getChildInfo(this); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + solrMetricsContext = parentContext.getChildContext(this); cacheMap = new MetricsMap((detailed, map) -> { if (cache != null) { ConcurrentLFUCache.Stats stats = cache.getStats(); @@ -273,6 +278,7 @@ public void initializeMetrics(SolrMetrics info) { long hits = stats.getCumulativeHits(); long inserts = stats.getCumulativePuts(); long evictions = stats.getCumulativeEvictions(); + long idleEvictions = stats.getCumulativeIdleEvictions(); long size = stats.getCurrentSize(); map.put(LOOKUPS_PARAM, lookups); @@ -288,7 +294,9 @@ public void initializeMetrics(SolrMetrics info) { map.put(CLEANUP_THREAD_PARAM, cleanupThread); map.put(SHOW_ITEMS_PARAM, showItems); map.put(TIME_DECAY_PARAM, timeDecay); - + map.put(RAM_BYTES_USED_PARAM, ramBytesUsed()); + map.put(MAX_IDLE_TIME_PARAM, maxIdleTimeSec); + map.put("idleEvictions", idleEvictions); map.put("warmupTime", warmupTime); @@ -296,6 +304,7 @@ public void initializeMetrics(SolrMetrics info) { long chits = 0; long cinserts = 0; long cevictions = 0; + long cidleEvictions = 0; // NOTE: It is safe to iterate on a CopyOnWriteArrayList for (ConcurrentLFUCache.Stats statistics : statsList) { @@ -303,13 +312,14 @@ public void initializeMetrics(SolrMetrics info) { chits += statistics.getCumulativeHits(); cinserts += statistics.getCumulativePuts(); cevictions += statistics.getCumulativeEvictions(); + cidleEvictions += statistics.getCumulativeIdleEvictions(); } map.put("cumulative_lookups", clookups); map.put("cumulative_hits", chits); map.put("cumulative_hitratio", calcHitRatio(clookups, chits)); map.put("cumulative_inserts", cinserts); map.put("cumulative_evictions", cevictions); - map.put(RAM_BYTES_USED_PARAM, ramBytesUsed()); + map.put("cumulative_idleEvictions", cidleEvictions); if (detailed && showItems != 0) { Map items = cache.getMostUsedItems(showItems == -1 ? Integer.MAX_VALUE : showItems); @@ -326,8 +336,7 @@ public void initializeMetrics(SolrMetrics info) { } }); - String metricName = SolrMetricManager.makeName(ImmutableList.of(getCategory().toString()), solrMetrics.scope); - solrMetrics.gauge(this, cacheMap, true, metricName); + solrMetricsContext.gauge(this, cacheMap, true, scope, getCategory().toString()); } // for unit tests only @@ -340,12 +349,6 @@ public Set getMetricNames() { return metricNames; } - @Override - public MetricRegistry getMetricRegistry() { - return solrMetrics == null ? null : solrMetrics.getRegistry(); - - } - @Override public String toString() { return name + (cacheMap != null ? cacheMap.getValue().toString() : ""); @@ -400,5 +403,4 @@ private void checkAndAdjustLimits() { } } } - } diff --git a/solr/core/src/java/org/apache/solr/search/LRUCache.java b/solr/core/src/java/org/apache/solr/search/LRUCache.java index e9e378987e77..7a1b37cfb963 100644 --- a/solr/core/src/java/org/apache/solr/search/LRUCache.java +++ b/solr/core/src/java/org/apache/solr/search/LRUCache.java @@ -26,15 +26,13 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.LongAdder; -import com.codahale.metrics.MetricRegistry; -import com.google.common.collect.ImmutableList; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.RamUsageEstimator; import org.apache.solr.common.SolrException; +import org.apache.solr.common.util.TimeSource; import org.apache.solr.metrics.MetricsMap; -import org.apache.solr.metrics.SolrMetricManager; -import org.apache.solr.metrics.SolrMetrics; +import org.apache.solr.metrics.SolrMetricsContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,6 +56,7 @@ private static class CumulativeStats { LongAdder inserts = new LongAdder(); LongAdder evictions = new LongAdder(); LongAdder evictionsRamUsage = new LongAdder(); + LongAdder evictionsIdleTime = new LongAdder(); } private CumulativeStats stats; @@ -69,20 +68,47 @@ private static class CumulativeStats { private long inserts; private long evictions; private long evictionsRamUsage; + private long evictionsIdleTime; private long warmupTime = 0; - private Map map; + private Map> map; private String description="LRU Cache"; private MetricsMap cacheMap; private Set metricNames = ConcurrentHashMap.newKeySet(); + private SolrMetricsContext solrMetricsContext; private int maxSize; private int initialSize; private long maxRamBytes = Long.MAX_VALUE; + private long maxIdleTimeNs; + private final TimeSource timeSource = TimeSource.NANO_TIME; + private long oldestEntry = 0L; + // for unit testing + private boolean syntheticEntries = false; + // The synchronization used for the map will be used to update this, // hence not an AtomicLong - private long ramBytesUsed = 0; + private long ramBytesUsed = 0L; + + public static final class CacheValue implements Accountable { + public static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CacheValue.class); + final long ramBytesUsed; + public final long createTime; + public final V value; + + public CacheValue(V value, long createTime) { + this.value = value; + this.createTime = createTime; + ramBytesUsed = BASE_RAM_BYTES_USED + + RamUsageEstimator.sizeOfObject(value, QUERY_DEFAULT_RAM_BYTES_USED); + } + + @Override + public long ramBytesUsed() { + return ramBytesUsed; + } + } @Override public Object init(Map args, Object persistence, CacheRegenerator regenerator) { @@ -93,15 +119,63 @@ public Object init(Map args, Object persistence, CacheRegenerator regenerator) { initialSize = Math.min(str==null ? 1024 : Integer.parseInt(str), maxSize); str = (String) args.get(MAX_RAM_MB_PARAM); this.maxRamBytes = str == null ? Long.MAX_VALUE : (long) (Double.parseDouble(str) * 1024L * 1024L); + str = (String) args.get(MAX_IDLE_TIME_PARAM); + if (str == null) { + maxIdleTimeNs = Long.MAX_VALUE; + } else { + int maxIdleTime = Integer.parseInt(str); + if (maxIdleTime > 0) { + maxIdleTimeNs = TimeUnit.NANOSECONDS.convert(Integer.parseInt(str), TimeUnit.SECONDS); + } else { + maxIdleTimeNs = Long.MAX_VALUE; + } + } description = generateDescription(); - map = new LinkedHashMap(initialSize, 0.75f, true) { + map = new LinkedHashMap>(initialSize, 0.75f, true) { @Override protected boolean removeEldestEntry(Map.Entry eldest) { + // remove items older than maxIdleTimeNs + if (maxIdleTimeNs != Long.MAX_VALUE) { + long idleCutoff = timeSource.getEpochTimeNs() - maxIdleTimeNs; + if (oldestEntry < idleCutoff) { + long currentOldestEntry = Long.MAX_VALUE; + Iterator>> iterator = entrySet().iterator(); + while (iterator.hasNext()) { + Map.Entry> entry = iterator.next(); + if (entry.getValue().createTime < idleCutoff) { + long bytesToDecrement = RamUsageEstimator.sizeOfObject(entry.getKey(), QUERY_DEFAULT_RAM_BYTES_USED); + bytesToDecrement += RamUsageEstimator.sizeOfObject(entry.getValue(), QUERY_DEFAULT_RAM_BYTES_USED); + bytesToDecrement += LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY; + ramBytesUsed -= bytesToDecrement; + iterator.remove(); + evictions++; + evictionsIdleTime++; + stats.evictionsIdleTime.increment(); + stats.evictions.increment(); + } else { + if (syntheticEntries) { + // no guarantee on the actual create time - make a full sweep + if (currentOldestEntry > entry.getValue().createTime) { + currentOldestEntry = entry.getValue().createTime; + } + } else { + // iterator is sorted by insertion order (and time) + // so we can quickly terminate the sweep + currentOldestEntry = entry.getValue().createTime; + break; + } + } + } + if (currentOldestEntry != Long.MAX_VALUE) { + oldestEntry = currentOldestEntry; + } + } + } if (ramBytesUsed > getMaxRamBytes()) { - Iterator> iterator = entrySet().iterator(); + Iterator>> iterator = entrySet().iterator(); do { - Map.Entry entry = iterator.next(); + Map.Entry> entry = iterator.next(); long bytesToDecrement = RamUsageEstimator.sizeOfObject(entry.getKey(), QUERY_DEFAULT_RAM_BYTES_USED); bytesToDecrement += RamUsageEstimator.sizeOfObject(entry.getValue(), QUERY_DEFAULT_RAM_BYTES_USED); bytesToDecrement += LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY; @@ -112,13 +186,10 @@ protected boolean removeEldestEntry(Map.Entry eldest) { stats.evictions.increment(); stats.evictionsRamUsage.increment(); } while (iterator.hasNext() && ramBytesUsed > getMaxRamBytes()); - // must return false according to javadocs of removeEldestEntry if we're modifying - // the map ourselves - return false; } else if (size() > getMaxSize()) { - Iterator> iterator = entrySet().iterator(); + Iterator>> iterator = entrySet().iterator(); do { - Map.Entry entry = iterator.next(); + Map.Entry> entry = iterator.next(); long bytesToDecrement = RamUsageEstimator.sizeOfObject(entry.getKey(), QUERY_DEFAULT_RAM_BYTES_USED); bytesToDecrement += RamUsageEstimator.sizeOfObject(entry.getValue(), QUERY_DEFAULT_RAM_BYTES_USED); bytesToDecrement += LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY; @@ -130,11 +201,9 @@ protected boolean removeEldestEntry(Map.Entry eldest) { evictions++; stats.evictions.increment(); } while (iterator.hasNext() && size() > getMaxSize()); - // must return false according to javadocs of removeEldestEntry if we're modifying - // the map ourselves - return false; } - // neither size nor RAM exceeded - ok to keep the entry + // must return false according to javadocs of removeEldestEntry if we're modifying + // the map ourselves return false; } }; @@ -149,6 +218,16 @@ protected boolean removeEldestEntry(Map.Entry eldest) { return persistence; } + /** + * Visible for testing. This flag tells the eviction code that (unlike with real entries) + * there's no guarantee on the order of entries being inserted with monotonically ascending creation + * time. Setting this to true causes a full sweep when looking for entries to evict. + * @lucene.internal + */ + public void setSyntheticEntries(boolean syntheticEntries) { + this.syntheticEntries = syntheticEntries; + } + public long getMaxRamBytes() { return maxRamBytes; } @@ -165,6 +244,9 @@ private String generateDescription() { if (getMaxRamBytes() != Long.MAX_VALUE) { description += ", maxRamMB=" + (getMaxRamBytes() / 1024L / 1024L); } + if (maxIdleTimeNs != Long.MAX_VALUE) { + description += ", " + MAX_IDLE_TIME_PARAM + "=" + TimeUnit.SECONDS.convert(maxIdleTimeNs, TimeUnit.NANOSECONDS); + } description += ')'; return description; } @@ -181,20 +263,35 @@ public V put(K key, V value) { if (maxSize == Integer.MAX_VALUE && maxRamBytes == Long.MAX_VALUE) { throw new IllegalStateException("Cache: " + getName() + " has neither size nor RAM limit!"); } + CacheValue cacheValue = new CacheValue<>(value, timeSource.getEpochTimeNs()); + return putCacheValue(key, cacheValue); + } + + /** + * Visible for testing to create synthetic cache entries. + * @lucene.internal + */ + public V putCacheValue(K key, CacheValue cacheValue) { synchronized (map) { if (getState() == State.LIVE) { stats.inserts.increment(); } + if (syntheticEntries) { + if (cacheValue.createTime < oldestEntry) { + oldestEntry = cacheValue.createTime; + } + } + // increment local inserts regardless of state??? // it does make it more consistent with the current size... inserts++; // important to calc and add new ram bytes first so that removeEldestEntry can compare correctly long keySize = RamUsageEstimator.sizeOfObject(key, QUERY_DEFAULT_RAM_BYTES_USED); - long valueSize = RamUsageEstimator.sizeOfObject(value, QUERY_DEFAULT_RAM_BYTES_USED); + long valueSize = RamUsageEstimator.sizeOfObject(cacheValue, QUERY_DEFAULT_RAM_BYTES_USED); ramBytesUsed += keySize + valueSize + LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY; - V old = map.put(key, value); + CacheValue old = map.put(key, cacheValue); if (old != null) { long bytesToDecrement = RamUsageEstimator.sizeOfObject(old, QUERY_DEFAULT_RAM_BYTES_USED); // the key existed in the map but we added its size before the put, so let's back out @@ -202,14 +299,14 @@ public V put(K key, V value) { bytesToDecrement += RamUsageEstimator.sizeOfObject(key, QUERY_DEFAULT_RAM_BYTES_USED); ramBytesUsed -= bytesToDecrement; } - return old; + return old == null ? null : old.value; } } @Override public V get(K key) { synchronized (map) { - V val = map.get(key); + CacheValue val = map.get(key); if (getState() == State.LIVE) { // only increment lookups and hits if we are live. lookups++; @@ -219,7 +316,7 @@ public V get(K key) { stats.hits.increment(); } } - return val; + return val == null ? null : val.value; } } @@ -249,7 +346,7 @@ public void warm(SolrIndexSearcher searcher, SolrCache old) { keys = new Object[sz]; vals = new Object[sz]; - Iterator> iter = other.map.entrySet().iterator(); + Iterator>> iter = other.map.entrySet().iterator(); // iteration goes from oldest (least recently used) to most recently used, // so we need to skip over the oldest entries. @@ -258,9 +355,9 @@ public void warm(SolrIndexSearcher searcher, SolrCache old) { for (int i=0; i entry = iter.next(); + Map.Entry> entry = iter.next(); keys[i]=entry.getKey(); - vals[i]=entry.getValue(); + vals[i]=entry.getValue().value; } } @@ -280,11 +377,6 @@ public void warm(SolrIndexSearcher searcher, SolrCache old) { warmupTime = TimeUnit.MILLISECONDS.convert(System.nanoTime() - warmingStartTime, TimeUnit.NANOSECONDS); } - @Override - public void close() { - if(solrMetrics != null) solrMetrics.unregister(); - } - //////////////////////// SolrInfoMBeans methods ////////////////////// @@ -303,16 +395,14 @@ public Set getMetricNames() { return metricNames; } - SolrMetrics solrMetrics; - @Override - public SolrMetrics getMetrics() { - return solrMetrics; + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; } @Override - public void initializeMetrics(SolrMetrics m) { - solrMetrics = m.getChildInfo(this); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + solrMetricsContext = parentContext.getChildContext(this); cacheMap = new MetricsMap((detailed, res) -> { synchronized (map) { res.put(LOOKUPS_PARAM, lookups); @@ -324,7 +414,10 @@ public void initializeMetrics(SolrMetrics m) { res.put(RAM_BYTES_USED_PARAM, ramBytesUsed()); res.put(MAX_RAM_MB_PARAM, getMaxRamMB()); res.put(MAX_SIZE_PARAM, maxSize); + res.put(MAX_IDLE_TIME_PARAM, maxIdleTimeNs != Long.MAX_VALUE ? + TimeUnit.SECONDS.convert(maxIdleTimeNs, TimeUnit.NANOSECONDS) : -1); res.put("evictionsRamUsage", evictionsRamUsage); + res.put("evictionsIdleTime", evictionsIdleTime); } res.put("warmupTime", warmupTime); @@ -336,9 +429,9 @@ public void initializeMetrics(SolrMetrics m) { res.put("cumulative_inserts", stats.inserts.longValue()); res.put("cumulative_evictions", stats.evictions.longValue()); res.put("cumulative_evictionsRamUsage", stats.evictionsRamUsage.longValue()); + res.put("cumulative_evictionsIdleTime", stats.evictionsIdleTime.longValue()); }); - String metricName = SolrMetricManager.makeName(ImmutableList.of(getCategory().toString()), solrMetrics.scope); - solrMetrics.gauge(this, cacheMap, true, metricName); + solrMetricsContext.gauge(this, cacheMap, true, scope, getCategory().toString()); } // for unit tests only @@ -346,11 +439,6 @@ MetricsMap getMetricsMap() { return cacheMap; } - @Override - public MetricRegistry getMetricRegistry() { - return solrMetrics ==null ?null: solrMetrics.getRegistry(); - } - @Override public String toString() { return name() + (cacheMap != null ? cacheMap.getValue().toString() : ""); diff --git a/solr/core/src/java/org/apache/solr/search/SolrCache.java b/solr/core/src/java/org/apache/solr/search/SolrCache.java index b4817f5c36be..55f57ec72f45 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrCache.java +++ b/solr/core/src/java/org/apache/solr/search/SolrCache.java @@ -16,17 +16,16 @@ */ package org.apache.solr.search; -import java.util.Map; - import org.apache.solr.core.SolrInfoBean; import org.apache.solr.metrics.SolrMetricProducer; +import java.util.Map; + /** * Primary API for dealing with Solr's internal caches. */ public interface SolrCache extends SolrInfoBean, SolrMetricProducer { - String TYPE = "cache"; String HIT_RATIO_PARAM = "hitratio"; String HITS_PARAM = "hits"; @@ -37,6 +36,10 @@ public interface SolrCache extends SolrInfoBean, SolrMetricProducer { String MAX_SIZE_PARAM = "maxSize"; String RAM_BYTES_USED_PARAM = "ramBytesUsed"; String MAX_RAM_MB_PARAM = "maxRamMB"; + String MAX_IDLE_TIME_PARAM = "maxIdleTime"; + String INITIAL_SIZE_PARAM = "initialSize"; + String CLEANUP_THREAD_PARAM = "cleanupThread"; + String SHOW_ITEMS_PARAM = "showItems"; /** * The initialization routine. Instance specific arguments are passed in @@ -60,7 +63,7 @@ public interface SolrCache extends SolrInfoBean, SolrMetricProducer { * regenerate an item in the new cache from an entry in the old cache. * */ - Object init(Map args, Object persistence, CacheRegenerator regenerator); + public Object init(Map args, Object persistence, CacheRegenerator regenerator); // I don't think we need a factory for faster creation given that these // will be associated with slow-to-create SolrIndexSearchers. // change to NamedList when other plugins do? @@ -76,29 +79,29 @@ public interface SolrCache extends SolrInfoBean, SolrMetricProducer { * * :TODO: verify this. */ - String name(); + public String name(); // Should SolrCache just extend the java.util.Map interface? // Following the conventions of the java.util.Map interface in any case. /** :TODO: copy from Map */ - int size(); + public int size(); /** :TODO: copy from Map */ - V put(K key, V value); + public V put(K key, V value); /** :TODO: copy from Map */ - V get(K key); + public V get(K key); /** :TODO: copy from Map */ - void clear(); + public void clear(); /** * Enumeration of possible States for cache instances. * :TODO: only state that seems to ever be set is LIVE ? */ - enum State { + public enum State { /** :TODO */ CREATED, /** :TODO */ @@ -115,15 +118,14 @@ enum State { * The cache user (SolrIndexSearcher) will take care of switching * cache states. */ - void setState(State state); + public void setState(State state); /** * Returns the last State set on this instance * * @see #setState */ - State getState(); - + public State getState(); /** * Warm this cache associated with searcher using the old @@ -135,7 +137,9 @@ enum State { /** Frees any non-memory resources */ - void close(); + default void close() throws Exception { + SolrMetricProducer.super.close(); + } /** Returns maximum size limit (number of items) if set and supported, -1 otherwise. */ int getMaxSize(); diff --git a/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java b/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java index 9a83a984eb9d..7afe96dc7321 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java +++ b/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java @@ -22,11 +22,7 @@ import java.util.Set; import com.codahale.metrics.MetricRegistry; -import org.apache.solr.common.MapWriter; -import org.apache.solr.core.PluginInfo; -import org.apache.solr.core.RuntimeLib; -import org.apache.solr.core.SolrCore; -import org.apache.solr.metrics.SolrMetrics; +import org.apache.solr.metrics.SolrMetricsContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,50 +30,12 @@ public class SolrCacheHolder implements SolrCache { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private CacheConfig.CacheInfo info; + private final CacheConfig factory; protected volatile SolrCache delegate; - - - public SolrCacheHolder(CacheConfig.CacheInfo cacheInfo) { - this.info = cacheInfo; - this.delegate = cacheInfo.cache; - - if(info.pkg != null) { - info.core.addPackageListener(new SolrCore.PkgListener() { - @Override - public String packageName() { - return info.pkg; - } - - @Override - public PluginInfo pluginInfo() { - return info.cfg.args; - } - - @Override - public MapWriter lib() { - return info.runtimeLib; - } - - @Override - public void changed(RuntimeLib lib) { - reloadCache(lib); - } - }); - } - } - - private void reloadCache(RuntimeLib lib) { - int znodeVersion = info.runtimeLib == null ? -1 : info.runtimeLib.getZnodeVersion(); - if (lib.getZnodeVersion() > znodeVersion) { - log.info("Cache {} being reloaded, package: {} loaded from: {} ", delegate.getClass().getSimpleName(), info.pkg, lib.getUrl()); - info = new CacheConfig.CacheInfo(info.cfg, info.core); - delegate.close(); - delegate = info.cache; - delegate.initializeMetrics(metrics); - - } + public SolrCacheHolder(SolrCache delegate, CacheConfig factory) { + this.delegate = delegate; + this.factory = factory; } public int size() { @@ -118,7 +76,7 @@ public SolrCache get() { return delegate; } - public void close() { + public void close() throws Exception { delegate.close(); } @@ -182,11 +140,9 @@ public Category getCategory() { return delegate.getCategory(); } - private SolrMetrics metrics; @Override - public void initializeMetrics(SolrMetrics info) { - this.metrics = info; - delegate.initializeMetrics(info); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + delegate.initializeMetrics(parentContext, scope); } } diff --git a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java index 76b03ae09a40..b1c17becf2ea 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java @@ -111,8 +111,7 @@ public class SolrDocumentFetcher { this.searcher = searcher; this.enableLazyFieldLoading = solrConfig.enableLazyFieldLoading; if (cachingEnabled) { - documentCache = solrConfig.documentCacheConfig == null ? null : - solrConfig.documentCacheConfig.newInstance(searcher.getCore()); + documentCache = solrConfig.documentCacheConfig == null ? null : solrConfig.documentCacheConfig.newInstance(); } else { documentCache = null; } diff --git a/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java b/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java index b2647cdcbbec..b6deb7c506bd 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java +++ b/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java @@ -19,11 +19,10 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; -import com.codahale.metrics.MetricRegistry; import org.apache.solr.core.SolrInfoBean; import org.apache.solr.metrics.MetricsMap; -import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.uninverting.UninvertingReader; /** @@ -35,7 +34,7 @@ public class SolrFieldCacheBean implements SolrInfoBean, SolrMetricProducer { private boolean disableEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryList"); private boolean disableJmxEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryListJmx"); - private MetricRegistry registry; + private SolrMetricsContext solrMetricsContext; private Set metricNames = ConcurrentHashMap.newKeySet(); @Override @@ -50,14 +49,15 @@ public String getDescription() { public Set getMetricNames() { return metricNames; } + @Override - public MetricRegistry getMetricRegistry() { - return registry; + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; } @Override - public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) { - registry = manager.registry(registryName); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + this.solrMetricsContext = parentContext; MetricsMap metricsMap = new MetricsMap((detailed, map) -> { if (detailed && !disableEntryList && !disableJmxEntryList) { UninvertingReader.FieldCacheStats fieldCacheStats = UninvertingReader.getUninvertedStats(); @@ -72,6 +72,6 @@ public void initializeMetrics(SolrMetricManager manager, String registryName, St map.put("entries_count", UninvertingReader.getUninvertedStatsSize()); } }); - manager.registerGauge(this, registryName, metricsMap, tag, true, "fieldCache", Category.CACHE.toString(), scope); + solrMetricsContext.gauge(this, metricsMap, true, "fieldCache", Category.CACHE.toString(), scope); } } diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java index deb6dc10ebad..a831b4501e8f 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -35,7 +35,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import com.codahale.metrics.MetricRegistry; import com.google.common.collect.Iterables; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; @@ -48,7 +47,6 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermStates; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.*; @@ -67,8 +65,10 @@ import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrInfoBean; import org.apache.solr.index.SlowCompositeReaderWrapper; +import org.apache.solr.metrics.MetricsMap; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestInfo; @@ -76,6 +76,7 @@ import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; import org.apache.solr.search.facet.UnInvertedField; +import org.apache.solr.search.stats.StatsCache; import org.apache.solr.search.stats.StatsSource; import org.apache.solr.uninverting.UninvertingReader; import org.apache.solr.update.IndexFingerprint; @@ -136,9 +137,10 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI private final String path; private boolean releaseDirectory; + private final StatsCache statsCache; + private Set metricNames = ConcurrentHashMap.newKeySet(); - private SolrMetricManager metricManager; - private String registryName; + private SolrMetricsContext solrMetricsContext; private static DirectoryReader getReader(SolrCore core, SolrIndexConfig config, DirectoryFactory directoryFactory, String path) throws IOException { @@ -237,6 +239,7 @@ public SolrIndexSearcher(SolrCore core, String path, IndexSchema schema, String this.rawReader = r; this.leafReader = SlowCompositeReaderWrapper.wrap(this.reader); this.core = core; + this.statsCache = core.createStatsCache(); this.schema = schema; this.name = "Searcher@" + Integer.toHexString(hashCode()) + "[" + core.getName() + "]" + (name != null ? " " + name : ""); @@ -268,12 +271,12 @@ public SolrIndexSearcher(SolrCore core, String path, IndexSchema schema, String if (cachingEnabled) { final ArrayList clist = new ArrayList<>(); fieldValueCache = solrConfig.fieldValueCacheConfig == null ? null - : solrConfig.fieldValueCacheConfig.newInstance(core); - if (fieldValueCache != null) clist.add( fieldValueCache); - filterCache = solrConfig.filterCacheConfig == null ? null : solrConfig.filterCacheConfig.newInstance(core); + : solrConfig.fieldValueCacheConfig.newInstance(); + if (fieldValueCache != null) clist.add(fieldValueCache); + filterCache = solrConfig.filterCacheConfig == null ? null : solrConfig.filterCacheConfig.newInstance(); if (filterCache != null) clist.add(filterCache); queryResultCache = solrConfig.queryResultCacheConfig == null ? null - : solrConfig.queryResultCacheConfig.newInstance(core); + : solrConfig.queryResultCacheConfig.newInstance(); if (queryResultCache != null) clist.add(queryResultCache); SolrCache documentCache = docFetcher.getDocumentCache(); if (documentCache != null) clist.add(documentCache); @@ -282,8 +285,8 @@ public SolrIndexSearcher(SolrCore core, String path, IndexSchema schema, String cacheMap = NO_GENERIC_CACHES; } else { cacheMap = new HashMap<>(solrConfig.userCacheConfigs.size()); - for (Map.Entry e : solrConfig.userCacheConfigs.entrySet()) { - SolrCache cache = e.getValue().newInstance(core); + for (Map.Entry e : solrConfig.userCacheConfigs.entrySet()) { + SolrCache cache = e.getValue().newInstance(); if (cache != null) { cacheMap.put(cache.name(), cache); clist.add(cache); @@ -316,6 +319,10 @@ List getLeafContexts() { return super.leafContexts; } + public StatsCache getStatsCache() { + return statsCache; + } + public FieldInfos getFieldInfos() { return leafReader.getFieldInfos(); } @@ -324,15 +331,15 @@ public FieldInfos getFieldInfos() { * Override these two methods to provide a way to use global collection stats. */ @Override - public TermStatistics termStatistics(Term term, TermStates context) throws IOException { + public TermStatistics termStatistics(Term term, int docFreq, long totalTermFreq) throws IOException { final SolrRequestInfo reqInfo = SolrRequestInfo.getRequestInfo(); if (reqInfo != null) { final StatsSource statsSrc = (StatsSource) reqInfo.getReq().getContext().get(STATS_SOURCE); if (statsSrc != null) { - return statsSrc.termStatistics(this, term, context); + return statsSrc.termStatistics(this, term, docFreq, totalTermFreq); } } - return localTermStatistics(term, context); + return localTermStatistics(term, docFreq, totalTermFreq); } @Override @@ -347,8 +354,8 @@ public CollectionStatistics collectionStatistics(String field) throws IOExceptio return localCollectionStatistics(field); } - public TermStatistics localTermStatistics(Term term, TermStates context) throws IOException { - return super.termStatistics(term, context); + public TermStatistics localTermStatistics(Term term, int docFreq, long totalTermFreq) throws IOException { + return super.termStatistics(term, docFreq, totalTermFreq); } public CollectionStatistics localCollectionStatistics(String field) throws IOException { @@ -423,12 +430,13 @@ public void register() { cache.setState(SolrCache.State.LIVE); infoRegistry.put(cache.name(), cache); } - metricManager = core.getCoreContainer().getMetricManager(); - registryName = core.getCoreMetricManager().getRegistryName(); + this.solrMetricsContext = core.getSolrMetricsContext().getChildContext(this); for (SolrCache cache : cacheList) { - cache.initializeMetrics(metricManager, registryName, core.getMetricTag(), SolrMetricManager.mkName(cache.name(), STATISTICS_KEY)); + // XXX use the deprecated method for back-compat. remove in 9.0 + cache.initializeMetrics(solrMetricsContext.metricManager, + solrMetricsContext.registry, solrMetricsContext.tag, SolrMetricManager.mkName(cache.name(), STATISTICS_KEY)); } - initializeMetrics(metricManager, registryName, core.getMetricTag(), STATISTICS_KEY); + initializeMetrics(solrMetricsContext, STATISTICS_KEY); registerTime = new Date(); } @@ -471,7 +479,11 @@ public void close() throws IOException { } for (SolrCache cache : cacheList) { - cache.close(); + try { + cache.close(); + } catch (Exception e) { + SolrException.log(log, "Exception closing cache " + cache.name(), e); + } } if (releaseDirectory) { @@ -503,8 +515,8 @@ public SolrCache getFilterCache() { // Set default regenerators on filter and query caches if they don't have any // public static void initRegenerators(SolrConfig solrConfig) { - if (solrConfig.fieldValueCacheConfig != null) { - solrConfig.fieldValueCacheConfig.setDefaultRegenerator(new CacheRegenerator() { + if (solrConfig.fieldValueCacheConfig != null && solrConfig.fieldValueCacheConfig.getRegenerator() == null) { + solrConfig.fieldValueCacheConfig.setRegenerator(new CacheRegenerator() { @Override public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache, Object oldKey, Object oldVal) throws IOException { @@ -516,8 +528,8 @@ public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, }); } - if (solrConfig.filterCacheConfig != null ) { - solrConfig.filterCacheConfig.setDefaultRegenerator(new CacheRegenerator() { + if (solrConfig.filterCacheConfig != null && solrConfig.filterCacheConfig.getRegenerator() == null) { + solrConfig.filterCacheConfig.setRegenerator(new CacheRegenerator() { @Override public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache, Object oldKey, Object oldVal) throws IOException { @@ -527,9 +539,9 @@ public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, }); } - if (solrConfig.queryResultCacheConfig != null) { + if (solrConfig.queryResultCacheConfig != null && solrConfig.queryResultCacheConfig.getRegenerator() == null) { final int queryResultWindowSize = solrConfig.queryResultWindowSize; - solrConfig.queryResultCacheConfig.setDefaultRegenerator(new CacheRegenerator() { + solrConfig.queryResultCacheConfig.setRegenerator(new CacheRegenerator() { @Override public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache, Object oldKey, Object oldVal) throws IOException { @@ -624,7 +636,7 @@ public final Document doc(int i, Set fields) throws IOException { /** expert: internal API, subject to change */ public SolrCache getFieldValueCache() { - return fieldValueCache ; + return fieldValueCache; } /** Returns a weighted sort according to this searcher */ @@ -2267,23 +2279,26 @@ public Set getMetricNames() { } @Override - public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) { - this.registryName = registry; - this.metricManager = manager; - manager.registerGauge(this, registry, () -> name, tag, true, "searcherName", Category.SEARCHER.toString(), scope); - manager.registerGauge(this, registry, () -> cachingEnabled, tag, true, "caching", Category.SEARCHER.toString(), scope); - manager.registerGauge(this, registry, () -> openTime, tag, true, "openedAt", Category.SEARCHER.toString(), scope); - manager.registerGauge(this, registry, () -> warmupTime, tag, true, "warmupTime", Category.SEARCHER.toString(), scope); - manager.registerGauge(this, registry, () -> registerTime, tag, true, "registeredAt", Category.SEARCHER.toString(), scope); + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; + } + + @Override + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + parentContext.gauge(this, () -> name, true, "searcherName", Category.SEARCHER.toString(), scope); + parentContext.gauge(this, () -> cachingEnabled, true, "caching", Category.SEARCHER.toString(), scope); + parentContext.gauge(this, () -> openTime, true, "openedAt", Category.SEARCHER.toString(), scope); + parentContext.gauge(this, () -> warmupTime, true, "warmupTime", Category.SEARCHER.toString(), scope); + parentContext.gauge(this, () -> registerTime, true, "registeredAt", Category.SEARCHER.toString(), scope); // reader stats - manager.registerGauge(this, registry, () -> reader.numDocs(), tag, true, "numDocs", Category.SEARCHER.toString(), scope); - manager.registerGauge(this, registry, () -> reader.maxDoc(), tag, true, "maxDoc", Category.SEARCHER.toString(), scope); - manager.registerGauge(this, registry, () -> reader.maxDoc() - reader.numDocs(), tag, true, "deletedDocs", Category.SEARCHER.toString(), scope); - manager.registerGauge(this, registry, () -> reader.toString(), tag, true, "reader", Category.SEARCHER.toString(), scope); - manager.registerGauge(this, registry, () -> reader.directory().toString(), tag, true, "readerDir", Category.SEARCHER.toString(), scope); - manager.registerGauge(this, registry, () -> reader.getVersion(), tag, true, "indexVersion", Category.SEARCHER.toString(), scope); + parentContext.gauge(this, () -> reader.numDocs(), true, "numDocs", Category.SEARCHER.toString(), scope); + parentContext.gauge(this, () -> reader.maxDoc(), true, "maxDoc", Category.SEARCHER.toString(), scope); + parentContext.gauge(this, () -> reader.maxDoc() - reader.numDocs(), true, "deletedDocs", Category.SEARCHER.toString(), scope); + parentContext.gauge(this, () -> reader.toString(), true, "reader", Category.SEARCHER.toString(), scope); + parentContext.gauge(this, () -> reader.directory().toString(), true, "readerDir", Category.SEARCHER.toString(), scope); + parentContext.gauge(this, () -> reader.getVersion(), true, "indexVersion", Category.SEARCHER.toString(), scope); // size of the currently opened commit - manager.registerGauge(this, registry, () -> { + parentContext.gauge(this, () -> { try { Collection files = reader.getIndexCommit().getFileNames(); long total = 0; @@ -2294,13 +2309,13 @@ public void initializeMetrics(SolrMetricManager manager, String registry, String } catch (Exception e) { return -1; } - }, tag, true, "indexCommitSize", Category.SEARCHER.toString(), scope); - - } - - @Override - public MetricRegistry getMetricRegistry() { - return core.getMetricRegistry(); + }, true, "indexCommitSize", Category.SEARCHER.toString(), scope); + // statsCache metrics + parentContext.gauge(this, + new MetricsMap((detailed, map) -> { + statsCache.getCacheMetrics().getSnapshot(map::put); + map.put("statsCacheImpl", statsCache.getClass().getSimpleName()); + }), true, "statsCache", Category.CACHE.toString(), scope); } private static class FilterImpl extends Filter { @@ -2469,7 +2484,7 @@ private boolean equalsTo(FilterImpl other) { @Override public int hashCode() { - return classHash() + return classHash() + 31 * Objects.hashCode(topFilter) + 31 * Objects.hashCode(weights); } diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java index 40eb7854bfa8..5fb69d8e5aa4 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java @@ -34,6 +34,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Query; import org.apache.lucene.util.PriorityQueue; +import org.apache.solr.common.SolrException; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.schema.FieldType; import org.apache.solr.schema.SchemaField; @@ -148,7 +149,7 @@ protected void createAccs(int docCount, int slotCount) throws IOException { } /** - * Simple helper for checking if a {@FacetRequest.FacetSort} is on "count" or "index" and picking + * Simple helper for checking if a {@link FacetRequest.FacetSort} is on "count" or "index" and picking * the existing SlotAcc * @return an existing SlotAcc for sorting, else null if it should be built from the Aggs */ @@ -224,6 +225,12 @@ void createCollectAcc(int numDocs, int numSlots) throws IOException { boolean needOtherAccs = freq.allBuckets; // TODO: use for missing too... + if (sortAcc == null) { + // as sort is already validated, in what case sortAcc would be null? + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Invalid sort '" + sort + "' for field '" + sf.getName() + "'"); + } + if (!needOtherAccs) { // we may need them later, but we don't want to create them now // otherwise we won't know if we need to call setNextReader on them. @@ -287,6 +294,7 @@ void collectFirstPhase(int segDoc, int slot, IntFunction slotContex SimpleOrderedMap findTopSlots(final int numSlots, final int slotCardinality, IntFunction bucketValFromSlotNumFunc, Function fieldQueryValFunc) throws IOException { + assert this.sortAcc != null; int numBuckets = 0; final int off = fcontext.isShard() ? 0 : (int) freq.offset; diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java index d7925193491f..b5f152188a02 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java @@ -38,6 +38,7 @@ import org.apache.solr.schema.TrieDateField; import org.apache.solr.schema.TrieField; import org.apache.solr.search.DocSet; +import org.apache.solr.search.SyntaxError; import org.apache.solr.search.facet.SlotAcc.SlotContext; import org.apache.solr.util.DateMathParser; @@ -50,6 +51,7 @@ public class FacetRange extends FacetRequestSorted { Object start; Object end; Object gap; + Object ranges; boolean hardend = false; EnumSet include; EnumSet others; @@ -72,11 +74,15 @@ public FacetMerger createFacetMerger(Object prototype) { @Override public Map getFacetDescription() { - Map descr = new HashMap(); + Map descr = new HashMap<>(); descr.put("field", field); - descr.put("start", start); - descr.put("end", end); - descr.put("gap", gap); + if (ranges != null) { + descr.put("ranges", ranges); + } else { + descr.put("start", start); + descr.put("end", end); + descr.put("gap", gap); + } return descr; } @@ -95,7 +101,8 @@ class FacetRangeProcessor extends FacetProcessor { final Comparable start; final Comparable end; final String gap; - + final Object ranges; + /** Build by {@link #createRangeList} if and only if needed for basic faceting */ List rangeList; /** Build by {@link #createRangeList} if and only if needed for basic faceting */ @@ -120,11 +127,22 @@ class FacetRangeProcessor extends FacetProcessor { include = freq.include; sf = fcontext.searcher.getSchema().getField(freq.field); calc = getCalcForField(sf); - start = calc.getValue(freq.start.toString()); - end = calc.getValue(freq.end.toString()); - gap = freq.gap.toString(); + if (freq.ranges != null && (freq.start != null || freq.end != null || freq.gap != null)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Cannot set gap/start/end and ranges params together"); + } + if (freq.ranges != null) { + ranges = freq.ranges; + start = null; + end = null; + gap = null; + } else { + start = calc.getValue(freq.start.toString()); + end = calc.getValue(freq.end.toString()); + gap = freq.gap.toString(); + ranges = null; + } - // Under the normal mincount=0, each shard will need to return 0 counts since we don't calculate buckets at the top level. // If mincount>0 then we could *potentially* set our sub mincount to 1... // ...but that would require sorting the buckets (by their val) at the top level @@ -245,7 +263,12 @@ private void createRangeList() throws IOException { Comparable low = start; Comparable loop_end = this.end; - + + if (ranges != null) { + rangeList.addAll(parseRanges(ranges)); + return; + } + while (low.compareTo(end) < 0) { Comparable high = calc.addGap(low, gap); if (end.compareTo(high) < 0) { @@ -263,14 +286,14 @@ private void createRangeList() throws IOException { if (high.compareTo(low) == 0) { throw new SolrException (SolrException.ErrorCode.BAD_REQUEST, - "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high ); + "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high); } - boolean incLower =(include.contains(FacetRangeInclude.LOWER) || - (include.contains(FacetRangeInclude.EDGE) && 0 == low.compareTo(start))); + boolean incLower = (include.contains(FacetRangeInclude.LOWER) || + (include.contains(FacetRangeInclude.EDGE) && 0 == low.compareTo(start))); boolean incUpper = (include.contains(FacetRangeInclude.UPPER) || - (include.contains(FacetRangeInclude.EDGE) && 0 == high.compareTo(end))); - + (include.contains(FacetRangeInclude.EDGE) && 0 == high.compareTo(end))); + Range range = new Range(calc.buildRangeLabel(low), low, high, incLower, incUpper); rangeList.add( range ); @@ -299,8 +322,203 @@ private void createRangeList() throws IOException { actual_end = null; } } - - + + /** + * Parses the given list of maps and returns list of Ranges + * + * @param input - list of map containing the ranges + * @return list of {@link Range} + */ + private List parseRanges(Object input) { + if (!(input instanceof List)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Expected List for ranges but got " + input.getClass().getSimpleName() + " = " + input + ); + } + List intervals = (List) input; + List ranges = new ArrayList<>(); + for (Object obj : intervals) { + if (!(obj instanceof Map)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Expected Map for range but got " + obj.getClass().getSimpleName() + " = " + obj); + } + Range range; + Map interval = (Map) obj; + if (interval.containsKey("range")) { + range = getRangeByOldFormat(interval); + } else { + range = getRangeByNewFormat(interval); + } + ranges.add(range); + } + return ranges; + } + + private boolean getBoolean(Map args, String paramName, boolean defVal) { + Object o = args.get(paramName); + if (o == null) { + return defVal; + } + // TODO: should we be more flexible and accept things like "true" (strings)? + // Perhaps wait until the use case comes up. + if (!(o instanceof Boolean)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); + } + + return (Boolean)o; + } + + private String getString(Map args, String paramName, boolean required) { + Object o = args.get(paramName); + if (o == null) { + if (required) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Missing required parameter '" + paramName + "' for " + args); + } + return null; + } + if (!(o instanceof String)) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Expected string type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o); + } + + return (String)o; + } + + /** + * Parses the range given in format {from:val1, to:val2, inclusive_to:true} + * and returns the {@link Range} + * + * @param rangeMap Map containing the range info + * @return {@link Range} + */ + private Range getRangeByNewFormat(Map rangeMap) { + Object fromObj = rangeMap.get("from"); + Object toObj = rangeMap.get("to"); + + String fromStr = fromObj == null? "*" : fromObj.toString(); + String toStr = toObj == null? "*": toObj.toString(); + boolean includeUpper = getBoolean(rangeMap, "inclusive_to", false); + boolean includeLower = getBoolean(rangeMap, "inclusive_from", true); + + Object key = rangeMap.get("key"); + // if (key == null) { + // key = (includeLower? "[": "(") + fromStr + "," + toStr + (includeUpper? "]": ")"); + // } + // using the default key as custom key won't work with refine + // refine would need both low and high values + key = (includeLower? "[": "(") + fromStr + "," + toStr + (includeUpper? "]": ")"); + + Comparable from = getComparableFromString(fromStr); + Comparable to = getComparableFromString(toStr); + if (from != null && to != null && from.compareTo(to) > 0) { + // allowing from and to be same + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'from' is higher than 'to' in range for key: " + key); + } + + return new Range(key, from, to, includeLower, includeUpper); + } + + /** + * Parses the range string from the map and Returns {@link Range} + * + * @param range map containing the interval + * @return {@link Range} + */ + private Range getRangeByOldFormat(Map range) { + String key = getString(range, "key", false); + String rangeStr = getString(range, "range", true); + try { + return parseRangeFromString(key, rangeStr); + } catch (SyntaxError e) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); + } + } + + /** + * Parses the given string and returns Range. + * This is adopted from {@link org.apache.solr.request.IntervalFacets} + * + * @param key The name of range which would be used as {@link Range}'s label + * @param rangeStr The string containing the Range + * @return {@link Range} + */ + private Range parseRangeFromString(String key, String rangeStr) throws SyntaxError { + rangeStr = rangeStr.trim(); + if (rangeStr.isEmpty()) { + throw new SyntaxError("empty facet range"); + } + + boolean includeLower = true, includeUpper = true; + Comparable start = null, end = null; + if (rangeStr.charAt(0) == '(') { + includeLower = false; + } else if (rangeStr.charAt(0) != '[') { + throw new SyntaxError( "Invalid start character " + rangeStr.charAt(0) + " in facet range " + rangeStr); + } + + final int lastNdx = rangeStr.length() - 1; + if (rangeStr.charAt(lastNdx) == ')') { + includeUpper = false; + } else if (rangeStr.charAt(lastNdx) != ']') { + throw new SyntaxError("Invalid end character " + rangeStr.charAt(lastNdx) + " in facet range " + rangeStr); + } + + StringBuilder startStr = new StringBuilder(lastNdx); + int i = unescape(rangeStr, 1, lastNdx, startStr); + if (i == lastNdx) { + if (rangeStr.charAt(lastNdx - 1) == ',') { + throw new SyntaxError("Empty range limit"); + } + throw new SyntaxError("Missing unescaped comma separating range ends in " + rangeStr); + } + start = getComparableFromString(startStr.toString()); + + StringBuilder endStr = new StringBuilder(lastNdx); + i = unescape(rangeStr, i, lastNdx, endStr); + if (i != lastNdx) { + throw new SyntaxError("Extra unescaped comma at index " + i + " in range " + rangeStr); + } + end = getComparableFromString(endStr.toString()); + + if (start != null && end != null && start.compareTo(end) > 0) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'start' is higher than 'end' in range for key: " + rangeStr); + } + + // not using custom key as it won't work with refine + // refine would need both low and high values + return new Range(rangeStr, start, end, includeLower, includeUpper); + } + + /* Fill in sb with a string from i to the first unescaped comma, or n. + Return the index past the unescaped comma, or n if no unescaped comma exists */ + private int unescape(String s, int i, int n, StringBuilder sb) throws SyntaxError { + for (; i < n; ++i) { + char c = s.charAt(i); + if (c == '\\') { + ++i; + if (i < n) { + c = s.charAt(i); + } else { + throw new SyntaxError("Unfinished escape at index " + i + " in facet range " + s); + } + } else if (c == ',') { + return i + 1; + } + sb.append(c); + } + return n; + } + + private Comparable getComparableFromString(String value) { + value = value.trim(); + if ("*".equals(value)) { + return null; + } + return calc.getValue(value); + } + private SimpleOrderedMap getRangeCountsIndexed() throws IOException { int slotCount = rangeList.size() + otherList.size(); @@ -341,7 +559,7 @@ private SimpleOrderedMap getRangeCountsIndexed() throws IOException { addStats(bucket, rangeList.size() + idx); doSubs(bucket, rangeList.size() + idx); } - + if (null != actual_end) { res.add(FacetRange.ACTUAL_END_JSON_KEY, calc.formatValue(actual_end)); } @@ -404,7 +622,7 @@ public long bitsToSortableBits(long bits) { } /** - * Given the low value for a bucket, generates the appropraite "label" object to use. + * Given the low value for a bucket, generates the appropriate "label" object to use. * By default return the low object unmodified. */ public Object buildRangeLabel(Comparable low) { @@ -471,7 +689,7 @@ protected Object parseGap(final String rawval) throws java.text.ParseException { /** * Adds the String gap param to a low Range endpoint value to determine - * the corrisponding high Range endpoint value, throwing + * the corresponding high Range endpoint value, throwing * a useful exception if not possible. */ public final Comparable addGap(Comparable value, String gap) { @@ -485,7 +703,7 @@ public final Comparable addGap(Comparable value, String gap) { } /** * Adds the String gap param to a low Range endpoint value to determine - * the corrisponding high Range endpoint value. + * the corresponding high Range endpoint value. * Can throw a low level format exception as needed. */ protected abstract Comparable parseAndAddGap(Comparable value, String gap) @@ -695,7 +913,7 @@ protected SimpleOrderedMap refineFacets() throws IOException { // But range faceting does *NOT* use the "leaves" and "partial" syntax // // If/When range facet becomes more like field facet in it's ability to sort and limit the "range buckets" - // FacetRangeProcessor and FacetFieldProcessor should prbably be refactored to share more code. + // FacetRangeProcessor and FacetFieldProcessor should probably be refactored to share more code. boolean skipThisFacet = (fcontext.flags & SKIP_FACET) != 0; @@ -722,7 +940,7 @@ protected SimpleOrderedMap refineFacets() throws IOException { { // refine the special "other" buckets - // NOTE: we're re-useing this variable for each special we look for... + // NOTE: we're re-using this variable for each special we look for... Map specialFacetInfo; specialFacetInfo = (Map) fcontext.facetInfo.get(FacetRangeOther.BEFORE.toString()); @@ -784,7 +1002,20 @@ private Comparable getOrComputeActualEndForRefinement() { private SimpleOrderedMap refineBucket(Object bucketVal, boolean skip, Map facetInfo) throws IOException { - Comparable low = calc.getValue(bucketVal.toString()); + String val = bucketVal.toString(); + if (ranges != null) { + try { + Range range = parseRangeFromString(val, val); + final SimpleOrderedMap bucket = refineRange(range, skip, facetInfo); + bucket.add("val", range.label); + return bucket; + } catch (SyntaxError e) { + // execution won't reach here as ranges are already validated + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); + } + } + + Comparable low = calc.getValue(val); Comparable high = calc.addGap(low, gap); Comparable max_end = end; if (end.compareTo(high) < 0) { diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java index fd8ce79273e7..6860a943841f 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java @@ -21,8 +21,9 @@ import java.util.EnumSet; import java.util.LinkedHashMap; import java.util.List; -import java.util.Objects; import java.util.Map; +import java.util.Objects; +import java.util.Optional; import org.apache.lucene.search.Query; import org.apache.solr.common.SolrException; @@ -96,6 +97,20 @@ private SortDirection(int multiplier) { this.multiplier = multiplier; } + public static SortDirection fromObj(Object direction) { + if (direction == null) { + // should we just default either to desc/asc?? + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Missing Sort direction"); + } + + switch (direction.toString()) { + case "asc": return asc; + case "desc": return desc; + default: + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown Sort direction '" + direction + "'"); + } + } + // asc==-1, desc==1 public int getMultiplier() { return multiplier; @@ -367,12 +382,12 @@ public void addSubFacet(String key, FacetRequest facetRequest) { @Override public String toString() { Map descr = getFacetDescription(); - String s = "facet request: { "; - for (String key : descr.keySet()) { - s += key + ":" + descr.get(key) + ","; + StringBuilder s = new StringBuilder("facet request: { "); + for (Map.Entry entry : descr.entrySet()) { + s.append(entry.getKey()).append(':').append(entry.getValue()).append(','); } - s += "}"; - return s; + s.append('}'); + return s.toString(); } /** @@ -986,11 +1001,10 @@ public FacetField parse(Object arg) throws SyntaxError { Object o = m.get("facet"); parseSubs(o); - // TODO: SOLR-13022 ... validate the sortVariabls against the subs. - facet.sort = parseSort( m.get(SORT) ); - facet.prelim_sort = parseSort( m.get("prelim_sort") ); + facet.sort = parseAndValidateSort(facet, m, SORT); + facet.prelim_sort = parseAndValidateSort(facet, m, "prelim_sort"); } else if (arg != null) { - // something lke json.facet.facet.field=2 + // something like json.facet.facet.field=2 throw err("Expected string/map for facet field, received " + arg.getClass().getSimpleName() + "=" + arg); } @@ -1001,42 +1015,69 @@ public FacetField parse(Object arg) throws SyntaxError { return facet; } - - // Sort specification is currently - // sort : 'mystat desc' - // OR - // sort : { mystat : 'desc' } - private static FacetRequest.FacetSort parseSort(Object sort) { + /** + * Parses, validates and returns the {@link FacetRequest.FacetSort} for given sortParam + * and facet field + *

+ * Currently, supported sort specifications are 'mystat desc' OR {mystat: 'desc'} + * index - This is equivalent to 'index asc' + * count - This is equivalent to 'count desc' + *

+ * + * @param facet {@link FacetField} for which sort needs to be parsed and validated + * @param args map containing the sortVal for given sortParam + * @param sortParam parameter for which sort needs to parsed and validated + * @return parsed facet sort + */ + private static FacetRequest.FacetSort parseAndValidateSort(FacetField facet, Map args, String sortParam) { + Object sort = args.get(sortParam); if (sort == null) { return null; - } else if (sort instanceof String) { + } + + FacetRequest.FacetSort facetSort = null; + + if (sort instanceof String) { String sortStr = (String)sort; if (sortStr.endsWith(" asc")) { - return new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" asc".length()), - FacetRequest.SortDirection.asc); + facetSort = new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" asc".length()), + FacetRequest.SortDirection.asc); } else if (sortStr.endsWith(" desc")) { - return new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" desc".length()), - FacetRequest.SortDirection.desc); + facetSort = new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" desc".length()), + FacetRequest.SortDirection.desc); } else { - return new FacetRequest.FacetSort(sortStr, - // default direction for "index" is ascending - ("index".equals(sortStr) - ? FacetRequest.SortDirection.asc - : FacetRequest.SortDirection.desc)); + facetSort = new FacetRequest.FacetSort(sortStr, + // default direction for "index" is ascending + ("index".equals(sortStr) + ? FacetRequest.SortDirection.asc + : FacetRequest.SortDirection.desc)); } } else if (sort instanceof Map) { - // sort : { myvar : 'desc' } - Map map = (Map)sort; - // TODO: validate - Map.Entry entry = map.entrySet().iterator().next(); - String k = entry.getKey(); - Object v = entry.getValue(); - return new FacetRequest.FacetSort(k, FacetRequest.SortDirection.valueOf(v.toString())); + // { myvar : 'desc' } + Optional> optional = ((Map)sort).entrySet().stream().findFirst(); + if (optional.isPresent()) { + Map.Entry entry = optional.get(); + facetSort = new FacetRequest.FacetSort(entry.getKey(), FacetRequest.SortDirection.fromObj(entry.getValue())); + } } else { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Expected string/map for 'sort', received "+ sort.getClass().getSimpleName() + "=" + sort); + "Expected string/map for '" + sortParam +"', received "+ sort.getClass().getSimpleName() + "=" + sort); } + + Map facetStats = facet.facetStats; + // validate facet sort + boolean isValidSort = facetSort == null || + "index".equals(facetSort.sortVariable) || + "count".equals(facetSort.sortVariable) || + (facetStats != null && facetStats.containsKey(facetSort.sortVariable)); + + if (!isValidSort) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Invalid " + sortParam + " option '" + sort + "' for field '" + facet.field + "'"); + } + return facetSort; } + } @@ -1057,10 +1098,12 @@ public FacetRange parse(Object arg) throws SyntaxError { Map m = (Map) arg; facet.field = getString(m, "field", null); + facet.ranges = getVal(m, "ranges", false); - facet.start = getVal(m, "start", true); - facet.end = getVal(m, "end", true); - facet.gap = getVal(m, "gap", true); + boolean required = facet.ranges == null; + facet.start = getVal(m, "start", required); + facet.end = getVal(m, "end", required); + facet.gap = getVal(m, "gap", required); facet.hardend = getBoolean(m, "hardend", facet.hardend); facet.mincount = getLong(m, "mincount", 0); @@ -1069,7 +1112,7 @@ public FacetRange parse(Object arg) throws SyntaxError { List list = getStringList(m, "include", false); String[] includeList = null; if (list != null) { - includeList = (String[])list.toArray(new String[list.size()]); + includeList = list.toArray(new String[list.size()]); } facet.include = FacetParams.FacetRangeInclude.parseParam( includeList ); facet.others = EnumSet.noneOf(FacetParams.FacetRangeOther.class); diff --git a/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java b/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java index 451c62102846..c57fb0000624 100644 --- a/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java +++ b/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java @@ -100,10 +100,10 @@ public Query parse() { } else { Map fieldDefinitions = req.getSearcher().getSchema().getFields(); ArrayList fields = new ArrayList(); - for (String fieldName : fieldDefinitions.keySet()) { - if (fieldDefinitions.get(fieldName).indexed() && fieldDefinitions.get(fieldName).stored()) - if (fieldDefinitions.get(fieldName).getType().getNumberType() == null) - fields.add(fieldName); + for (Map.Entry entry : fieldDefinitions.entrySet()) { + if (entry.getValue().indexed() && entry.getValue().stored()) + if (entry.getValue().getType().getNumberType() == null) + fields.add(entry.getKey()); } fieldNames = fields.toArray(new String[0]); } diff --git a/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java index c7758ff8ce67..93fb6e4ae8a6 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java +++ b/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java @@ -21,13 +21,19 @@ import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; -import org.apache.solr.core.PluginInfo; import org.apache.solr.handler.component.ResponseBuilder; import org.apache.solr.request.SolrQueryRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - +/** + * This class implements exact caching of statistics. It requires an additional + * round-trip to parse query at shard servers, and return term statistics for + * query terms (and collection statistics for term fields). + *

Global statistics are accumulated in the instance of this component (with the same life-cycle as + * SolrSearcher), in unbounded maps. NOTE: This may lead to excessive memory usage, in which case + * a {@link LRUStatsCache} should be considered.

+ */ public class ExactSharedStatsCache extends ExactStatsCache { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -39,13 +45,19 @@ public class ExactSharedStatsCache extends ExactStatsCache { private final Map currentGlobalColStats = new ConcurrentHashMap<>(); @Override - public StatsSource get(SolrQueryRequest req) { + protected StatsSource doGet(SolrQueryRequest req) { log.debug("total={}, cache {}", currentGlobalColStats, currentGlobalTermStats.size()); - return new ExactStatsSource(currentGlobalTermStats, currentGlobalColStats); + return new ExactStatsSource(statsCacheMetrics, currentGlobalTermStats, currentGlobalColStats); } - + @Override - public void init(PluginInfo info) {} + public void clear() { + super.clear(); + perShardTermStats.clear(); + perShardColStats.clear(); + currentGlobalTermStats.clear(); + currentGlobalColStats.clear(); + } @Override protected void addToPerShardColStats(SolrQueryRequest req, String shard, diff --git a/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java index fe315d2114ad..fc60f1ca87cd 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java +++ b/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java @@ -18,6 +18,7 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -25,22 +26,23 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.stream.Collectors; -import com.google.common.collect.Lists; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermStates; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermStatistics; import org.apache.solr.client.solrj.SolrResponse; +import org.apache.solr.cloud.CloudDescriptor; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ShardParams; import org.apache.solr.common.util.NamedList; -import org.apache.solr.core.PluginInfo; +import org.apache.solr.common.util.Utils; import org.apache.solr.handler.component.ResponseBuilder; import org.apache.solr.handler.component.ShardRequest; import org.apache.solr.handler.component.ShardResponse; @@ -53,36 +55,30 @@ * This class implements exact caching of statistics. It requires an additional * round-trip to parse query at shard servers, and return term statistics for * query terms (and collection statistics for term fields). + *

Global statistics are cached in the current request's context and discarded + * once the processing of the current request is complete. There's no support for + * longer-term caching, and each request needs to build the global statistics from scratch, + * even for repeating queries.

*/ public class ExactStatsCache extends StatsCache { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - // experimenting with strategy that takes more RAM, but also doesn't share memory - // across threads - private static final String CURRENT_GLOBAL_COL_STATS = "org.apache.solr.stats.currentGlobalColStats"; - private static final String CURRENT_GLOBAL_TERM_STATS = "org.apache.solr.stats.currentGlobalTermStats"; - private static final String PER_SHARD_TERM_STATS = "org.apache.solr.stats.perShardTermStats"; - private static final String PER_SHARD_COL_STATS = "org.apache.solr.stats.perShardColStats"; + private static final String CURRENT_GLOBAL_COL_STATS = "solr.stats.globalCol"; + private static final String CURRENT_GLOBAL_TERM_STATS = "solr.stats.globalTerm"; + private static final String PER_SHARD_TERM_STATS = "solr.stats.shardTerm"; + private static final String PER_SHARD_COL_STATS = "solr.stats.shardCol"; @Override - public StatsSource get(SolrQueryRequest req) { - Map currentGlobalColStats = (Map) req.getContext().get(CURRENT_GLOBAL_COL_STATS); - Map currentGlobalTermStats = (Map) req.getContext().get(CURRENT_GLOBAL_TERM_STATS); - if (currentGlobalColStats == null) { - currentGlobalColStats = Collections.emptyMap(); - } - if (currentGlobalTermStats == null) { - currentGlobalTermStats = Collections.emptyMap(); - } + protected StatsSource doGet(SolrQueryRequest req) { + Map currentGlobalColStats = (Map) req.getContext().getOrDefault(CURRENT_GLOBAL_COL_STATS, Collections.emptyMap()); + Map currentGlobalTermStats = (Map) req.getContext().getOrDefault(CURRENT_GLOBAL_TERM_STATS, Collections.emptyMap()); log.debug("Returning StatsSource. Collection stats={}, Term stats size= {}", currentGlobalColStats, currentGlobalTermStats.size()); - return new ExactStatsSource(currentGlobalTermStats, currentGlobalColStats); + return new ExactStatsSource(statsCacheMetrics, currentGlobalTermStats, currentGlobalColStats); } @Override - public void init(PluginInfo info) {} - - @Override - public ShardRequest retrieveStatsRequest(ResponseBuilder rb) { + protected ShardRequest doRetrieveStatsRequest(ResponseBuilder rb) { + // always request shard statistics ShardRequest sreq = new ShardRequest(); sreq.purpose = ShardRequest.PURPOSE_GET_TERM_STATS; sreq.params = new ModifiableSolrParams(rb.req.getParams()); @@ -92,20 +88,27 @@ public ShardRequest retrieveStatsRequest(ResponseBuilder rb) { } @Override - public void mergeToGlobalStats(SolrQueryRequest req, List responses) { - Set allTerms = new HashSet<>(); + protected void doMergeToGlobalStats(SolrQueryRequest req, List responses) { + Set allTerms = new HashSet<>(); for (ShardResponse r : responses) { log.debug("Merging to global stats, shard={}, response={}", r.getShard(), r.getSolrResponse().getResponse()); + // response's "shard" is really a shardURL, or even a list of URLs String shard = r.getShard(); SolrResponse res = r.getSolrResponse(); + if (res.getException() != null) { + log.debug("Exception response={}", res); + continue; + } + if (res.getResponse().get(ShardParams.SHARD_NAME) != null) { + shard = (String) res.getResponse().get(ShardParams.SHARD_NAME); + } NamedList nl = res.getResponse(); - // TODO: nl == null if not all shards respond (no server hosting shard) String termStatsString = (String) nl.get(TERM_STATS_KEY); if (termStatsString != null) { addToPerShardTermStats(req, shard, termStatsString); } - List terms = nl.getAll(TERMS_KEY); + Set terms = StatsUtil.termsFromEncodedString((String) nl.get(TERMS_KEY)); allTerms.addAll(terms); String colStatsString = (String) nl.get(COL_STATS_KEY); Map colStats = StatsUtil.colStatsMapFromString(colStatsString); @@ -114,48 +117,36 @@ public void mergeToGlobalStats(SolrQueryRequest req, List respons } } if (allTerms.size() > 0) { - req.getContext().put(TERMS_KEY, Lists.newArrayList(allTerms)); + req.getContext().put(TERMS_KEY, StatsUtil.termsToEncodedString(allTerms)); } if (log.isDebugEnabled()) printStats(req); } protected void addToPerShardColStats(SolrQueryRequest req, String shard, Map colStats) { - Map> perShardColStats = (Map>) req.getContext().get(PER_SHARD_COL_STATS); - if (perShardColStats == null) { - perShardColStats = new HashMap<>(); - req.getContext().put(PER_SHARD_COL_STATS, perShardColStats); - } + Map> perShardColStats = (Map>) req.getContext().computeIfAbsent(PER_SHARD_COL_STATS, Utils.NEW_HASHMAP_FUN); perShardColStats.put(shard, colStats); } protected void printStats(SolrQueryRequest req) { - Map> perShardTermStats = (Map>) req.getContext().get(PER_SHARD_TERM_STATS); - if (perShardTermStats == null) { - perShardTermStats = Collections.emptyMap(); - } - Map> perShardColStats = (Map>) req.getContext().get(PER_SHARD_COL_STATS); - if (perShardColStats == null) { - perShardColStats = Collections.emptyMap(); - } + Map> perShardTermStats = (Map>) req.getContext().getOrDefault(PER_SHARD_TERM_STATS, Collections.emptyMap()); + Map> perShardColStats = (Map>) req.getContext().getOrDefault(PER_SHARD_COL_STATS, Collections.emptyMap()); log.debug("perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats); } protected void addToPerShardTermStats(SolrQueryRequest req, String shard, String termStatsString) { Map termStats = StatsUtil.termStatsMapFromString(termStatsString); if (termStats != null) { - Map> perShardTermStats = (Map>) req.getContext().get(PER_SHARD_TERM_STATS); - if (perShardTermStats == null) { - perShardTermStats = new HashMap<>(); - req.getContext().put(PER_SHARD_TERM_STATS, perShardTermStats); - } + Map> perShardTermStats = (Map>) req.getContext().computeIfAbsent(PER_SHARD_TERM_STATS, Utils.NEW_HASHMAP_FUN); perShardTermStats.put(shard, termStats); } } @Override - public void returnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) { + protected void doReturnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) { Query q = rb.getQuery(); try { + Set additionalTerms = StatsUtil.termsFromEncodedString(rb.req.getParams().get(TERMS_KEY)); + Set additionalFields = StatsUtil.fieldsFromString(rb.req.getParams().get(FIELDS_KEY)); HashSet terms = new HashSet<>(); HashMap statsMap = new HashMap<>(); HashMap colMap = new HashMap<>(); @@ -170,29 +161,39 @@ public CollectionStatistics collectionStatistics(String field) throws IOExceptio } @Override - public TermStatistics termStatistics(Term term, TermStates context) throws IOException { - TermStatistics ts = super.termStatistics(term, context); - if (ts == null) { - return null; - } + public TermStatistics termStatistics(Term term, int docFreq, long totalTermFreq) throws IOException { + TermStatistics ts = super.termStatistics(term, docFreq, totalTermFreq); terms.add(term); statsMap.put(term.toString(), new TermStats(term.field(), ts)); return ts; } }; statsCollectingSearcher.createWeight(searcher.rewrite(q), ScoreMode.COMPLETE, 1); + for (String field : additionalFields) { + if (colMap.containsKey(field)) { + continue; + } + statsCollectingSearcher.collectionStatistics(field); + } + for (Term term : additionalTerms) { + statsCollectingSearcher.createWeight(searcher.rewrite(new TermQuery(term)), ScoreMode.COMPLETE, 1); + } - for (Term t : terms) { - rb.rsp.add(TERMS_KEY, t.toString()); + CloudDescriptor cloudDescriptor = searcher.getCore().getCoreDescriptor().getCloudDescriptor(); + if (cloudDescriptor != null) { + rb.rsp.add(ShardParams.SHARD_NAME, cloudDescriptor.getShardId()); } - if (statsMap.size() != 0) { //Don't add empty keys + if (!terms.isEmpty()) { + rb.rsp.add(TERMS_KEY, StatsUtil.termsToEncodedString(terms)); + } + if (!statsMap.isEmpty()) { //Don't add empty keys String termStatsString = StatsUtil.termStatsMapToString(statsMap); rb.rsp.add(TERM_STATS_KEY, termStatsString); if (log.isDebugEnabled()) { log.debug("termStats={}, terms={}, numDocs={}", termStatsString, terms, searcher.maxDoc()); } } - if (colMap.size() != 0){ + if (!colMap.isEmpty()) { String colStatsString = StatsUtil.colStatsMapToString(colMap); rb.rsp.add(COL_STATS_KEY, colStatsString); if (log.isDebugEnabled()) { @@ -206,21 +207,29 @@ public TermStatistics termStatistics(Term term, TermStates context) throws IOExc } @Override - public void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) { - outgoing.purpose |= ShardRequest.PURPOSE_SET_TERM_STATS; + protected void doSendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) { ModifiableSolrParams params = outgoing.params; - List terms = (List) rb.req.getContext().get(TERMS_KEY); - if (terms != null) { - Set fields = new HashSet<>(); - for (String t : terms) { - String[] fv = t.split(":"); - fields.add(fv[0]); - } + Set terms = StatsUtil.termsFromEncodedString((String) rb.req.getContext().get(TERMS_KEY)); + if (!terms.isEmpty()) { + Set fields = terms.stream().map(t -> t.field()).collect(Collectors.toSet()); Map globalTermStats = new HashMap<>(); Map globalColStats = new HashMap<>(); // aggregate collection stats, only for the field in terms - - for (String shard : rb.shards) { + String collectionName = rb.req.getCore().getCoreDescriptor().getCollectionName(); + if (collectionName == null) { + collectionName = rb.req.getCore().getCoreDescriptor().getName(); + } + List shards = new ArrayList<>(); + for (String shardUrl : rb.shards) { + String shard = StatsUtil.shardUrlToShard(collectionName, shardUrl); + if (shard == null) { + log.warn("Can't determine shard from collectionName=" + collectionName + " and shardUrl=" + shardUrl + ", skipping..."); + continue; + } else { + shards.add(shard); + } + } + for (String shard : shards) { Map s = getPerShardColStats(rb, shard); if (s == null) { continue; @@ -239,17 +248,18 @@ public void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) { } params.add(COL_STATS_KEY, StatsUtil.colStatsMapToString(globalColStats)); // sum up only from relevant shards - for (String t : terms) { - params.add(TERMS_KEY, t); - for (String shard : rb.shards) { - TermStats termStats = getPerShardTermStats(rb.req, t, shard); + params.add(TERMS_KEY, StatsUtil.termsToEncodedString(terms)); + for (Term t : terms) { + String term = t.toString(); + for (String shard : shards) { + TermStats termStats = getPerShardTermStats(rb.req, term, shard); if (termStats == null || termStats.docFreq == 0) { continue; } - TermStats g = globalTermStats.get(t); + TermStats g = globalTermStats.get(term); if (g == null) { - g = new TermStats(t); - globalTermStats.put(t, g); + g = new TermStats(term); + globalTermStats.put(term, g); } g.add(termStats); } @@ -261,24 +271,18 @@ public void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) { } protected Map getPerShardColStats(ResponseBuilder rb, String shard) { - Map> perShardColStats = (Map>) rb.req.getContext().get(PER_SHARD_COL_STATS); - if (perShardColStats == null) { - perShardColStats = Collections.emptyMap(); - } + Map> perShardColStats = (Map>) rb.req.getContext().getOrDefault(PER_SHARD_COL_STATS, Collections.emptyMap()); return perShardColStats.get(shard); } protected TermStats getPerShardTermStats(SolrQueryRequest req, String t, String shard) { - Map> perShardTermStats = (Map>) req.getContext().get(PER_SHARD_TERM_STATS); - if (perShardTermStats == null) { - perShardTermStats = Collections.emptyMap(); - } + Map> perShardTermStats = (Map>) req.getContext().getOrDefault(PER_SHARD_TERM_STATS, Collections.emptyMap()); Map cache = perShardTermStats.get(shard); return (cache != null) ? cache.get(t) : null; //Term doesn't exist in shard } @Override - public void receiveGlobalStats(SolrQueryRequest req) { + protected void doReceiveGlobalStats(SolrQueryRequest req) { String globalTermStats = req.getParams().get(TERM_STATS_KEY); String globalColStats = req.getParams().get(COL_STATS_KEY); if (globalColStats != null) { @@ -301,34 +305,28 @@ public void receiveGlobalStats(SolrQueryRequest req) { protected void addToGlobalColStats(SolrQueryRequest req, Entry e) { - Map currentGlobalColStats = (Map) req.getContext().get(CURRENT_GLOBAL_COL_STATS); - if (currentGlobalColStats == null) { - currentGlobalColStats = new HashMap<>(); - req.getContext().put(CURRENT_GLOBAL_COL_STATS, currentGlobalColStats); - } + Map currentGlobalColStats = (Map) req.getContext().computeIfAbsent(CURRENT_GLOBAL_COL_STATS, Utils.NEW_HASHMAP_FUN); currentGlobalColStats.put(e.getKey(), e.getValue()); } protected void addToGlobalTermStats(SolrQueryRequest req, Entry e) { - Map currentGlobalTermStats = (Map) req.getContext().get(CURRENT_GLOBAL_TERM_STATS); - if (currentGlobalTermStats == null) { - currentGlobalTermStats = new HashMap<>(); - req.getContext().put(CURRENT_GLOBAL_TERM_STATS, currentGlobalTermStats); - } + Map currentGlobalTermStats = (Map) req.getContext().computeIfAbsent(CURRENT_GLOBAL_TERM_STATS, Utils.NEW_HASHMAP_FUN); currentGlobalTermStats.put(e.getKey(), e.getValue()); } protected static class ExactStatsSource extends StatsSource { private final Map termStatsCache; private final Map colStatsCache; + private final StatsCacheMetrics metrics; - public ExactStatsSource(Map termStatsCache, + public ExactStatsSource(StatsCacheMetrics metrics, Map termStatsCache, Map colStatsCache) { + this.metrics = metrics; this.termStatsCache = termStatsCache; this.colStatsCache = colStatsCache; } - public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, TermStates context) + public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, int docFreq, long totalTermFreq) throws IOException { TermStats termStats = termStatsCache.get(term.toString()); // TermStats == null is also true if term has no docFreq anyway, @@ -336,7 +334,8 @@ public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, // Not sure we need a warning here if (termStats == null) { log.debug("Missing global termStats info for term={}, using local stats", term); - return localSearcher.localTermStatistics(term, context); + metrics.missingGlobalTermStats.increment(); + return localSearcher != null ? localSearcher.localTermStatistics(term, docFreq, totalTermFreq) : null; } else { return termStats.toTermStatistics(); } @@ -348,7 +347,8 @@ public CollectionStatistics collectionStatistics(SolrIndexSearcher localSearcher CollectionStats colStats = colStatsCache.get(field); if (colStats == null) { log.debug("Missing global colStats info for field={}, using local", field); - return localSearcher.localCollectionStatistics(field); + metrics.missingGlobalFieldStats.increment(); + return localSearcher != null ? localSearcher.localCollectionStatistics(field) : null; } else { return colStats.toCollectionStatistics(); } diff --git a/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java index c94695acf7f6..c0b425fcc52b 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java +++ b/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java @@ -21,14 +21,17 @@ import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.LongAdder; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermStates; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; +import org.apache.solr.common.params.CommonParams; import org.apache.solr.core.PluginInfo; import org.apache.solr.handler.component.ResponseBuilder; +import org.apache.solr.handler.component.ShardRequest; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.search.FastLRUCache; import org.apache.solr.search.SolrCache; @@ -38,44 +41,129 @@ /** * Unlike {@link ExactStatsCache} this implementation preserves term stats - * across queries in a set of LRU caches, and based on surface features of a - * query it determines the need to send additional RPC-s. As a result the - * additional RPC-s are needed much less frequently. - * + * across queries in a set of LRU caches (with the same life-cycle as SolrIndexSearcher), + * and based on surface features of a + * query it determines the need to send additional requests to retrieve local term + * and collection statistics from shards. As a result the + * additional requests may be needed much less frequently. *

- * Query terms and their stats are maintained in a set of maps. At the query - * front-end there will be as many maps as there are shards, each maintaining - * the respective shard statistics. At each shard server there is a single map - * that is updated with the global statistics on every request. + * Query terms, their stats and field stats are maintained in LRU caches, with the size by default + * {@link #DEFAULT_MAX_SIZE}, one cache per shard. These caches + * are updated as needed (when term or field statistics are missing). Each instance of the component + * keeps also a global stats cache, which is aggregated from per-shard caches. + *

Cache entries expire after a max idle time, by default {@link #DEFAULT_MAX_IDLE_TIME}. */ public class LRUStatsCache extends ExactStatsCache { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + + public static final int DEFAULT_MAX_SIZE = 200; + public static final int DEFAULT_MAX_IDLE_TIME = 60; + // local stats obtained from shard servers + // map of > private final Map> perShardTermStats = new ConcurrentHashMap<>(); + // map of > private final Map> perShardColStats = new ConcurrentHashMap<>(); // global stats synchronized from the master + + // cache of private final FastLRUCache currentGlobalTermStats = new FastLRUCache<>(); - private final Map currentGlobalColStats = new ConcurrentHashMap<>(); - - // local term context (caching term lookups) + // cache of + private final FastLRUCache currentGlobalColStats = new FastLRUCache<>(); - private final Map lruCacheInitArgs = new HashMap(); + // missing stats to be fetched with the next request + private Set missingColStats = ConcurrentHashMap.newKeySet(); + private Set missingTermStats = ConcurrentHashMap.newKeySet(); + private final Map lruCacheInitArgs = new HashMap<>(); + + private final StatsCacheMetrics ignorableMetrics = new StatsCacheMetrics(); + @Override - public StatsSource get(SolrQueryRequest req) { + protected StatsSource doGet(SolrQueryRequest req) { log.debug("## GET total={}, cache {}", currentGlobalColStats , currentGlobalTermStats.size()); - return new LRUStatsSource(currentGlobalTermStats, currentGlobalColStats); + return new LRUStatsSource(statsCacheMetrics); } - + + @Override + public void clear() { + super.clear(); + perShardTermStats.clear(); + perShardColStats.clear(); + currentGlobalTermStats.clear(); + currentGlobalColStats.clear(); + ignorableMetrics.clear(); + } + @Override public void init(PluginInfo info) { - // TODO: make this configurable via PluginInfo - lruCacheInitArgs.put("size", "100"); + super.init(info); + if (info != null && info.attributes != null) { + lruCacheInitArgs.putAll(info.attributes); + } + lruCacheInitArgs.computeIfAbsent(SolrCache.SIZE_PARAM, s -> String.valueOf(DEFAULT_MAX_SIZE)); + lruCacheInitArgs.computeIfAbsent(SolrCache.MAX_IDLE_TIME_PARAM, t -> String.valueOf(DEFAULT_MAX_IDLE_TIME)); + Map map = new HashMap<>(lruCacheInitArgs); + map.put(CommonParams.NAME, "globalTermStats"); currentGlobalTermStats.init(lruCacheInitArgs, null, null); + currentGlobalTermStats.setState(SolrCache.State.LIVE); + map = new HashMap<>(lruCacheInitArgs); + map.put(CommonParams.NAME, "globalColStats"); + currentGlobalColStats.init(lruCacheInitArgs, null, null); + currentGlobalColStats.setState(SolrCache.State.LIVE); } + + @Override + protected ShardRequest doRetrieveStatsRequest(ResponseBuilder rb) { + // check approximately what terms are needed. + + // NOTE: query rewrite only expands to terms that are present in the local index + // so it's possible that the result will contain less terms than present in all shards. + + // HOWEVER: the absence of these terms is recorded by LRUStatsSource, and they will be + // force-fetched on next request and cached. + + // check for missing stats from previous requests + if (!missingColStats.isEmpty() || !missingColStats.isEmpty()) { + // needs to fetch anyway, so get the full query stats + the missing stats for caching + ShardRequest sreq = super.doRetrieveStatsRequest(rb); + if (!missingColStats.isEmpty()) { + Set requestColStats = missingColStats; + // there's a small window when new items may be added before + // creating the request and clearing, so don't clear - instead replace the instance + missingColStats = ConcurrentHashMap.newKeySet(); + sreq.params.add(FIELDS_KEY, StatsUtil.fieldsToString(requestColStats)); + } + if (!missingTermStats.isEmpty()) { + Set requestTermStats = missingTermStats; + missingTermStats = ConcurrentHashMap.newKeySet(); + sreq.params.add(TERMS_KEY, StatsUtil.termsToEncodedString(requestTermStats)); + } + return sreq; + } + + // rewrite locally to see if there are any missing terms. See the note above for caveats. + LongAdder missing = new LongAdder(); + try { + // use ignorableMetrics to avoid counting this checking as real misses + approxCheckMissingStats(rb, new LRUStatsSource(ignorableMetrics), t -> missing.increment(), f -> missing.increment()); + if (missing.sum() == 0) { + // it should be (approximately) ok to skip the fetching + + // since we already incremented the stats decrement it here + statsCacheMetrics.retrieveStats.decrement(); + statsCacheMetrics.useCachedGlobalStats.increment(); + return null; + } else { + return super.doRetrieveStatsRequest(rb); + } + } catch (IOException e) { + log.warn("Exception checking missing stats for query " + rb.getQuery() + ", forcing retrieving stats", e); + // retrieve anyway + return super.doRetrieveStatsRequest(rb); + } } - + @Override protected void addToGlobalTermStats(SolrQueryRequest req, Entry e) { currentGlobalTermStats.put(e.getKey(), e.getValue()); @@ -95,12 +183,14 @@ protected Map getPerShardColStats(ResponseBuilder rb, St protected void addToPerShardTermStats(SolrQueryRequest req, String shard, String termStatsString) { Map termStats = StatsUtil.termStatsMapFromString(termStatsString); if (termStats != null) { - SolrCache cache = perShardTermStats.get(shard); - if (cache == null) { // initialize - cache = new FastLRUCache<>(); - cache.init(lruCacheInitArgs, null, null); - perShardTermStats.put(shard, cache); - } + SolrCache cache = perShardTermStats.computeIfAbsent(shard, s -> { + FastLRUCache c = new FastLRUCache<>(); + Map map = new HashMap<>(lruCacheInitArgs); + map.put(CommonParams.NAME, s); + c.init(map, null, null); + c.setState(SolrCache.State.LIVE); + return c; + }); for (Entry e : termStats.entrySet()) { cache.put(e.getKey(), e.getValue()); } @@ -123,21 +213,22 @@ protected void printStats(SolrQueryRequest req) { log.debug("## MERGED: perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats); } - static class LRUStatsSource extends StatsSource { - private final SolrCache termStatsCache; - private final Map colStatsCache; - - public LRUStatsSource(SolrCache termStatsCache, Map colStatsCache) { - this.termStatsCache = termStatsCache; - this.colStatsCache = colStatsCache; + class LRUStatsSource extends StatsSource { + private final StatsCacheMetrics metrics; + + LRUStatsSource(StatsCacheMetrics metrics) { + this.metrics = metrics; } + @Override - public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, TermStates context) + public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, int docFreq, long totalTermFreq) throws IOException { - TermStats termStats = termStatsCache.get(term.toString()); + TermStats termStats = currentGlobalTermStats.get(term.toString()); if (termStats == null) { log.debug("## Missing global termStats info: {}, using local", term); - return localSearcher.localTermStatistics(term, context); + missingTermStats.add(term); + metrics.missingGlobalTermStats.increment(); + return localSearcher != null ? localSearcher.localTermStatistics(term, docFreq, totalTermFreq) : null; } else { return termStats.toTermStatistics(); } @@ -146,10 +237,12 @@ public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, @Override public CollectionStatistics collectionStatistics(SolrIndexSearcher localSearcher, String field) throws IOException { - CollectionStats colStats = colStatsCache.get(field); + CollectionStats colStats = currentGlobalColStats.get(field); if (colStats == null) { log.debug("## Missing global colStats info: {}, using local", field); - return localSearcher.localCollectionStatistics(field); + missingColStats.add(field); + metrics.missingGlobalFieldStats.increment(); + return localSearcher != null ? localSearcher.localCollectionStatistics(field) : null; } else { return colStats.toCollectionStatistics(); } diff --git a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java index a0fb5b6d8c05..3a3ebd1af7b7 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java +++ b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java @@ -20,7 +20,6 @@ import java.util.List; -import org.apache.solr.core.PluginInfo; import org.apache.solr.handler.component.ResponseBuilder; import org.apache.solr.handler.component.ShardRequest; import org.apache.solr.handler.component.ShardResponse; @@ -37,27 +36,25 @@ public class LocalStatsCache extends StatsCache { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Override - public StatsSource get(SolrQueryRequest req) { + protected StatsSource doGet(SolrQueryRequest req) { log.debug("## GET {}", req); - return new LocalStatsSource(); - } - - @Override - public void init(PluginInfo info) { + return new LocalStatsSource(statsCacheMetrics); } // by returning null we don't create additional round-trip request. @Override - public ShardRequest retrieveStatsRequest(ResponseBuilder rb) { - log.debug("## RDR {}", rb.req); + protected ShardRequest doRetrieveStatsRequest(ResponseBuilder rb) { + log.debug("## RSR {}", rb.req); + // already incremented the stats - decrement it now + statsCacheMetrics.retrieveStats.decrement(); return null; } @Override - public void mergeToGlobalStats(SolrQueryRequest req, + protected void doMergeToGlobalStats(SolrQueryRequest req, List responses) { if (log.isDebugEnabled()) { - log.debug("## MTGD {}", req); + log.debug("## MTGS {}", req); for (ShardResponse r : responses) { log.debug(" - {}", r); } @@ -65,17 +62,17 @@ public void mergeToGlobalStats(SolrQueryRequest req, } @Override - public void returnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) { - log.debug("## RLD {}", rb.req); + protected void doReturnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) { + log.debug("## RLS {}", rb.req); } @Override - public void receiveGlobalStats(SolrQueryRequest req) { - log.debug("## RGD {}", req); + protected void doReceiveGlobalStats(SolrQueryRequest req) { + log.debug("## RGS {}", req); } @Override - public void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) { - log.debug("## SGD {}", outgoing); + protected void doSendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) { + log.debug("## SGS {}", outgoing); } } diff --git a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsSource.java b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsSource.java index 3a08a6101515..542e35b54f61 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsSource.java +++ b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsSource.java @@ -19,7 +19,6 @@ import java.io.IOException; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermStates; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; import org.apache.solr.search.SolrIndexSearcher; @@ -29,19 +28,23 @@ * local statistics. */ public final class LocalStatsSource extends StatsSource { + private final StatsCache.StatsCacheMetrics metrics; - public LocalStatsSource() { + public LocalStatsSource(StatsCache.StatsCacheMetrics metrics) { + this.metrics = metrics; } @Override - public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, TermStates context) + public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, int docFreq, long totalTermFreq) throws IOException { - return localSearcher.localTermStatistics(term, context); + metrics.missingGlobalTermStats.increment(); + return localSearcher.localTermStatistics(term, docFreq, totalTermFreq); } @Override public CollectionStatistics collectionStatistics(SolrIndexSearcher localSearcher, String field) throws IOException { + metrics.missingGlobalFieldStats.increment(); return localSearcher.localCollectionStatistics(field); } } diff --git a/solr/core/src/java/org/apache/solr/search/stats/StatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/StatsCache.java index ab5790e15ed7..238bb1257bbd 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/StatsCache.java +++ b/solr/core/src/java/org/apache/solr/search/stats/StatsCache.java @@ -16,14 +16,29 @@ */ package org.apache.solr.search.stats; +import java.io.IOException; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.LongAdder; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.Weight; +import org.apache.solr.core.PluginInfo; import org.apache.solr.handler.component.ResponseBuilder; import org.apache.solr.handler.component.ShardRequest; import org.apache.solr.handler.component.ShardResponse; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.search.QueryCommand; +import org.apache.solr.search.SolrCache; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.util.plugin.PluginInfoInitialized; @@ -36,7 +51,7 @@ *

* There are instances of this class at the aggregator node (where the partial * data from shards is aggregated), and on each core involved in a shard request - * (where this data is maintained and updated from the central cache). + * (where this data is maintained and updated from the aggregator's cache). *

*/ public abstract class StatsCache implements PluginInfoInitialized { @@ -44,75 +59,228 @@ public abstract class StatsCache implements PluginInfoInitialized { /** * Map of terms and {@link TermStats}. */ - public static final String TERM_STATS_KEY = "org.apache.solr.stats.termStats"; + public static final String TERM_STATS_KEY = "solr.stats.term"; /** * Value of {@link CollectionStats}. */ - public static final String COL_STATS_KEY = "org.apache.solr.stats.colStats"; + public static final String COL_STATS_KEY = "solr.stats.col"; /** * List of terms in the query. */ - public static final String TERMS_KEY = "org.apache.solr.stats.terms"; + public static final String TERMS_KEY = "solr.stats.terms"; + /** + * List of fields in the query. + */ + public static final String FIELDS_KEY = "solr.stats.fields"; + + public static final class StatsCacheMetrics { + public final LongAdder lookups = new LongAdder(); + public final LongAdder retrieveStats = new LongAdder(); + public final LongAdder receiveGlobalStats = new LongAdder(); + public final LongAdder returnLocalStats = new LongAdder(); + public final LongAdder mergeToGlobalStats = new LongAdder(); + public final LongAdder sendGlobalStats = new LongAdder(); + public final LongAdder useCachedGlobalStats = new LongAdder(); + public final LongAdder missingGlobalTermStats = new LongAdder(); + public final LongAdder missingGlobalFieldStats = new LongAdder(); + + public void clear() { + lookups.reset(); + retrieveStats.reset(); + receiveGlobalStats.reset(); + returnLocalStats.reset(); + mergeToGlobalStats.reset(); + sendGlobalStats.reset(); + useCachedGlobalStats.reset(); + missingGlobalTermStats.reset(); + missingGlobalFieldStats.reset(); + } + + public void getSnapshot(BiConsumer consumer) { + consumer.accept(SolrCache.LOOKUPS_PARAM, lookups.longValue()); + consumer.accept("retrieveStats", retrieveStats.longValue()); + consumer.accept("receiveGlobalStats", receiveGlobalStats.longValue()); + consumer.accept("returnLocalStats", returnLocalStats.longValue()); + consumer.accept("mergeToGlobalStats", mergeToGlobalStats.longValue()); + consumer.accept("sendGlobalStats", sendGlobalStats.longValue()); + consumer.accept("useCachedGlobalStats", useCachedGlobalStats.longValue()); + consumer.accept("missingGlobalTermStats", missingGlobalTermStats.longValue()); + consumer.accept("missingGlobalFieldStats", missingGlobalFieldStats.longValue()); + } + + public String toString() { + Map map = new HashMap<>(); + getSnapshot(map::put); + return map.toString(); + } + } + + protected StatsCacheMetrics statsCacheMetrics = new StatsCacheMetrics(); + protected PluginInfo pluginInfo; + + public StatsCacheMetrics getCacheMetrics() { + return statsCacheMetrics; + } + + @Override + public void init(PluginInfo info) { + this.pluginInfo = info; + } /** * Creates a {@link ShardRequest} to retrieve per-shard stats related to the * current query and the current state of the requester's {@link StatsCache}. + *

This method updates the cache metrics and calls {@link #doRetrieveStatsRequest(ResponseBuilder)}.

* * @param rb contains current request * @return shard request to retrieve stats for terms in the current request, * or null if no additional request is needed (e.g. if the information * in global cache is already sufficient to satisfy this request). */ - public abstract ShardRequest retrieveStatsRequest(ResponseBuilder rb); + public ShardRequest retrieveStatsRequest(ResponseBuilder rb) { + statsCacheMetrics.retrieveStats.increment(); + return doRetrieveStatsRequest(rb); + } + + protected abstract ShardRequest doRetrieveStatsRequest(ResponseBuilder rb); /** * Prepare a local (from the local shard) response to a "retrieve stats" shard * request. + *

This method updates the cache metrics and calls {@link #doReturnLocalStats(ResponseBuilder, SolrIndexSearcher)}.

* * @param rb response builder * @param searcher current local searcher */ - public abstract void returnLocalStats(ResponseBuilder rb, - SolrIndexSearcher searcher); + public void returnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) { + statsCacheMetrics.returnLocalStats.increment(); + doReturnLocalStats(rb, searcher); + } + + protected abstract void doReturnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher); /** * Process shard responses that contain partial local stats. Usually this * entails combining per-shard stats for each term. + *

This method updates the cache metrics and calls {@link #doMergeToGlobalStats(SolrQueryRequest, List)}.

* * @param req query request * @param responses responses from shards containing local stats for each shard */ - public abstract void mergeToGlobalStats(SolrQueryRequest req, - List responses); + public void mergeToGlobalStats(SolrQueryRequest req, + List responses) { + statsCacheMetrics.mergeToGlobalStats.increment(); + doMergeToGlobalStats(req, responses); + } + + protected abstract void doMergeToGlobalStats(SolrQueryRequest req, List responses); /** - * Receive global stats data from the master and update a local cache of stats + * Receive global stats data from the master and update a local cache of global stats * with this global data. This event occurs either as a separate request, or * together with the regular query request, in which case this method is * called first, before preparing a {@link QueryCommand} to be submitted to * the local {@link SolrIndexSearcher}. + *

This method updates the cache metrics and calls {@link #doReceiveGlobalStats(SolrQueryRequest)}.

* * @param req query request with global stats data */ - public abstract void receiveGlobalStats(SolrQueryRequest req); + public void receiveGlobalStats(SolrQueryRequest req) { + statsCacheMetrics.receiveGlobalStats.increment(); + doReceiveGlobalStats(req); + } + + protected abstract void doReceiveGlobalStats(SolrQueryRequest req); /** * Prepare global stats data to be sent out to shards in this request. + *

This method updates the cache metrics and calls {@link #doSendGlobalStats(ResponseBuilder, ShardRequest)}.

* * @param rb response builder * @param outgoing shard request to be sent */ - public abstract void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing); + public void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) { + statsCacheMetrics.sendGlobalStats.increment(); + doSendGlobalStats(rb, outgoing); + } + + protected abstract void doSendGlobalStats(ResponseBuilder rb, ShardRequest outgoing); /** - * Prepare local {@link StatsSource} to provide stats information to perform + * Prepare a {@link StatsSource} that provides stats information to perform * local scoring (to be precise, to build a local {@link Weight} from the * query). + *

This method updates the cache metrics and calls {@link #doGet(SolrQueryRequest)}.

* * @param req query request * @return an instance of {@link StatsSource} to use in creating a query * {@link Weight} */ - public abstract StatsSource get(SolrQueryRequest req); + public StatsSource get(SolrQueryRequest req) { + statsCacheMetrics.lookups.increment(); + return doGet(req); + } + + protected abstract StatsSource doGet(SolrQueryRequest req); + + /** + * Clear cached statistics. + */ + public void clear() { + statsCacheMetrics.clear(); + }; + + /** + * Check if the statsSource is missing some term or field statistics info, + * which then needs to be retrieved. + *

NOTE: this uses the local IndexReader for query rewriting, which may expand to less (or different) + * terms as rewriting the same query on other shards' readers. This in turn may falsely fail to inform the consumers + * about possibly missing stats, which may lead consumers to skip the fetching of full stats. Consequently + * this would lead to incorrect global IDF data for the missing terms (because for these terms only local stats + * would be used).

+ * @param rb request to evaluate against the statsSource + * @param statsSource stats source to check + * @param missingTermStats consumer of missing term stats + * @param missingFieldStats consumer of missing field stats + * @return approximate number of missing term stats and field stats combined + */ + public int approxCheckMissingStats(ResponseBuilder rb, StatsSource statsSource, Consumer missingTermStats, Consumer missingFieldStats) throws IOException { + CheckingIndexSearcher checkingSearcher = new CheckingIndexSearcher(statsSource, rb.req.getSearcher().getIndexReader(), missingTermStats, missingFieldStats); + Query q = rb.getQuery(); + q = checkingSearcher.rewrite(q); + checkingSearcher.createWeight(q, ScoreMode.COMPLETE, 1); + return checkingSearcher.missingFieldsCount + checkingSearcher.missingTermsCount; + } + + static final class CheckingIndexSearcher extends IndexSearcher { + final StatsSource statsSource; + final Consumer missingTermStats; + final Consumer missingFieldStats; + int missingTermsCount, missingFieldsCount; + + CheckingIndexSearcher(StatsSource statsSource, IndexReader reader, Consumer missingTermStats, Consumer missingFieldStats) { + super(reader); + this.statsSource = statsSource; + this.missingTermStats = missingTermStats; + this.missingFieldStats = missingFieldStats; + } + + @Override + public TermStatistics termStatistics(Term term, int docFreq, long totalTermFreq) throws IOException { + if (statsSource.termStatistics(null, term, docFreq, totalTermFreq) == null) { + missingTermStats.accept(term); + missingTermsCount++; + } + return super.termStatistics(term, docFreq, totalTermFreq); + } + @Override + public CollectionStatistics collectionStatistics(String field) throws IOException { + if (statsSource.collectionStatistics(null, field) == null) { + missingFieldStats.accept(field); + missingFieldsCount++; + } + return super.collectionStatistics(field); + } + } } diff --git a/solr/core/src/java/org/apache/solr/search/stats/StatsSource.java b/solr/core/src/java/org/apache/solr/search/stats/StatsSource.java index c187fef16fc2..735e22d05286 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/StatsSource.java +++ b/solr/core/src/java/org/apache/solr/search/stats/StatsSource.java @@ -19,7 +19,6 @@ import java.io.IOException; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermStates; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermStatistics; @@ -34,7 +33,7 @@ */ public abstract class StatsSource { - public abstract TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, TermStates context) + public abstract TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, int docFreq, long totalTermFreq) throws IOException; public abstract CollectionStatistics collectionStatistics(SolrIndexSearcher localSearcher, String field) diff --git a/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java b/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java index 21377d0aeff5..40d41558ac32 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java +++ b/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java @@ -16,25 +16,126 @@ */ package org.apache.solr.search.stats; +import java.io.IOException; +import java.io.UnsupportedEncodingException; import java.lang.invoke.MethodHandles; +import java.net.URLDecoder; +import java.net.URLEncoder; +import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import org.apache.lucene.index.Term; -import org.apache.lucene.util.BytesRef; -import org.apache.solr.common.util.Base64; +import org.apache.solr.common.util.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Various utilities for de/serialization of term stats and collection stats. + *

TODO: serialization format is very simple and does nothing to compress the data.

*/ public class StatsUtil { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + + public static final String ENTRY_SEPARATOR = "!"; + public static final char ENTRY_SEPARATOR_CHAR = '!'; + + /** + * Parse a list of urls separated by "|" in order to retrieve a shard name. + * @param collectionName collection name + * @param shardUrls list of urls + * @return shard name, or shardUrl if no shard info is present, + * or null if impossible to determine (eg. empty string) + */ + public static String shardUrlToShard(String collectionName, String shardUrls) { + // we may get multiple replica urls + String[] urls = shardUrls.split("\\|"); + if (urls.length == 0) { + return null; + } + String[] urlParts = urls[0].split("/"); + String coreName = urlParts[urlParts.length - 1]; + String replicaName = Utils.parseMetricsReplicaName(collectionName, coreName); + String shard; + if (replicaName != null) { + shard = coreName.substring(collectionName.length() + 1); + shard = shard.substring(0, shard.length() - replicaName.length() - 1); + } else { + if (coreName.length() > collectionName.length() && coreName.startsWith(collectionName)) { + shard = coreName.substring(collectionName.length() + 1); + if (shard.isEmpty()) { + shard = urls[0]; + } + } else { + shard = urls[0]; + } + } + return shard; + } + + public static String termsToEncodedString(Collection terms) { + StringBuilder sb = new StringBuilder(); + for (Object o : terms) { + if (sb.length() > 0) { + sb.append(ENTRY_SEPARATOR); + } + if (o instanceof Term) { + sb.append(termToEncodedString((Term) o)); + } else { + sb.append(termToEncodedString(String.valueOf(o))); + } + } + return sb.toString(); + } + + public static Set termsFromEncodedString(String data) { + Set terms = new HashSet<>(); + if (data == null || data.trim().isEmpty()) { + return terms; + } + String[] items = data.split(ENTRY_SEPARATOR); + for (String item : items) { + Term t = termFromEncodedString(item); + if (t != null) { + terms.add(t); + } + } + return terms; + } + + public static Set fieldsFromString(String data) { + Set fields = new HashSet<>(); + if (data == null || data.trim().isEmpty()) { + return fields; + } + String[] items = data.split(ENTRY_SEPARATOR); + for (String item : items) { + if (!item.trim().isEmpty()) { + fields.add(item); + } + } + return fields; + } + + public static String fieldsToString(Collection fields) { + StringBuilder sb = new StringBuilder(); + for (String field : fields) { + if (field.trim().isEmpty()) { + continue; + } + if (sb.length() > 0) { + sb.append(ENTRY_SEPARATOR); + } + sb.append(field); + } + return sb.toString(); + } + /** * Make a String representation of {@link CollectionStats} */ @@ -42,13 +143,13 @@ public static String colStatsToString(CollectionStats colStats) { StringBuilder sb = new StringBuilder(); sb.append(colStats.field); sb.append(','); - sb.append(String.valueOf(colStats.maxDoc)); + sb.append(colStats.maxDoc); sb.append(','); - sb.append(String.valueOf(colStats.docCount)); + sb.append(colStats.docCount); sb.append(','); - sb.append(String.valueOf(colStats.sumTotalTermFreq)); + sb.append(colStats.sumTotalTermFreq); sb.append(','); - sb.append(String.valueOf(colStats.sumDocFreq)); + sb.append(colStats.sumDocFreq); return sb.toString(); } @@ -78,15 +179,73 @@ private static CollectionStats colStatsFromString(String data) { } } - public static String termToString(Term t) { + public static String termToEncodedString(Term t) { StringBuilder sb = new StringBuilder(); sb.append(t.field()).append(':'); - BytesRef bytes = t.bytes(); - sb.append(Base64.byteArrayToBase64(bytes.bytes, bytes.offset, bytes.offset)); + sb.append(encode(t.text())); return sb.toString(); } + + public static final char ESCAPE = '_'; + public static final char ESCAPE_ENTRY_SEPARATOR = '0'; + + public static String encode(String value) { + StringBuilder output = new StringBuilder(value.length() + 2); + for (int i = 0; i < value.length(); i++) { + char c = value.charAt(i); + switch (c) { + case ESCAPE : + output.append(ESCAPE).append(ESCAPE); + break; + case ENTRY_SEPARATOR_CHAR : + output.append(ESCAPE).append(ESCAPE_ENTRY_SEPARATOR); + break; + default : + output.append(c); + } + } + try { + return URLEncoder.encode(output.toString(), "UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("Apparently your JVM doesn't support UTF-8 encoding?", e); + } + } + + public static String decode(String value) throws IOException { + value = URLDecoder.decode(value, "UTF-8"); + StringBuilder output = new StringBuilder(value.length()); + for (int i = 0; i < value.length(); i++) { + char c = value.charAt(i); + // escaped char follows + if (c == ESCAPE && i < value.length() - 1) { + i++; + char next = value.charAt(i); + if (next == ESCAPE) { + output.append(ESCAPE); + } else if (next == ESCAPE_ENTRY_SEPARATOR) { + output.append(ENTRY_SEPARATOR_CHAR); + } else { + throw new IOException("invalid escape sequence in " + value); + } + } else { + output.append(c); + } + } + return output.toString(); + } + + public static String termToEncodedString(String term) { + int idx = term.indexOf(':'); + if (idx == -1) { + log.warn("Invalid term data without ':': '" + term + "'"); + return null; + } + String prefix = term.substring(0, idx + 1); + String value = term.substring(idx + 1); + return prefix + encode(value); + } - private static Term termFromString(String data) { + public static Term termFromEncodedString(String data) { if (data == null || data.trim().length() == 0) { log.warn("Invalid empty term value"); return null; @@ -99,76 +258,50 @@ private static Term termFromString(String data) { String field = data.substring(0, idx); String value = data.substring(idx + 1); try { - return new Term(field, value); - // XXX this would be more correct - // byte[] bytes = Base64.base64ToByteArray(value); - // return new Term(field, new BytesRef(bytes)); + return new Term(field, decode(value)); } catch (Exception e) { log.warn("Invalid term value '" + value + "'"); return null; } } - public static String termStatsToString(TermStats termStats, - boolean includeTerm) { + public static String termStatsToString(TermStats termStats, boolean encode) { StringBuilder sb = new StringBuilder(); - if (includeTerm) { - sb.append(termStats.term).append(','); - } - sb.append(String.valueOf(termStats.docFreq)); + sb.append(encode ? termToEncodedString(termStats.term) : termStats.term).append(','); + sb.append(termStats.docFreq); sb.append(','); - sb.append(String.valueOf(termStats.totalTermFreq)); + sb.append(termStats.totalTermFreq); return sb.toString(); } - private static TermStats termStatsFromString(String data, Term t) { + private static TermStats termStatsFromString(String data) { if (data == null || data.trim().length() == 0) { log.warn("Invalid empty term stats string"); return null; } String[] vals = data.split(","); - if (vals.length < 2) { + if (vals.length < 3) { log.warn("Invalid term stats string, num fields " + vals.length - + " < 2, '" + data + "'"); - return null; - } - Term termToUse; - int idx = 0; - if (vals.length == 3) { - idx++; - // with term - Term term = termFromString(vals[0]); - if (term != null) { - termToUse = term; - if (t != null) { - assert term.equals(t); - } - } else { // failed term decoding - termToUse = t; - } - } else { - termToUse = t; - } - if (termToUse == null) { - log.warn("Missing term in termStats '" + data + "'"); + + " < 3, '" + data + "'"); return null; } + Term term = termFromEncodedString(vals[0]); try { - long docFreq = Long.parseLong(vals[idx++]); - long totalTermFreq = Long.parseLong(vals[idx]); - return new TermStats(termToUse.toString(), docFreq, totalTermFreq); + long docFreq = Long.parseLong(vals[1]); + long totalTermFreq = Long.parseLong(vals[2]); + return new TermStats(term.toString(), docFreq, totalTermFreq); } catch (Exception e) { log.warn("Invalid termStats string '" + data + "'"); return null; } } - + public static Map colStatsMapFromString(String data) { if (data == null || data.trim().length() == 0) { return null; } Map map = new HashMap(); - String[] entries = data.split("!"); + String[] entries = data.split(ENTRY_SEPARATOR); for (String es : entries) { CollectionStats stats = colStatsFromString(es); if (stats != null) { @@ -185,7 +318,7 @@ public static String colStatsMapToString(Map stats) { StringBuilder sb = new StringBuilder(); for (Entry e : stats.entrySet()) { if (sb.length() > 0) { - sb.append('!'); + sb.append(ENTRY_SEPARATOR); } sb.append(colStatsToString(e.getValue())); } @@ -197,9 +330,9 @@ public static Map termStatsMapFromString(String data) { return null; } Map map = new HashMap<>(); - String[] entries = data.split("!"); + String[] entries = data.split(ENTRY_SEPARATOR); for (String es : entries) { - TermStats termStats = termStatsFromString(es, null); + TermStats termStats = termStatsFromString(es); if (termStats != null) { map.put(termStats.term, termStats); } @@ -214,7 +347,7 @@ public static String termStatsMapToString(Map stats) { StringBuilder sb = new StringBuilder(); for (Entry e : stats.entrySet()) { if (sb.length() > 0) { - sb.append('!'); + sb.append(ENTRY_SEPARATOR); } sb.append(termStatsToString(e.getValue(), true)); } diff --git a/solr/core/src/java/org/apache/solr/search/stats/TermStats.java b/solr/core/src/java/org/apache/solr/search/stats/TermStats.java index 9977b285d563..ef059e9e8dc2 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/TermStats.java +++ b/solr/core/src/java/org/apache/solr/search/stats/TermStats.java @@ -33,7 +33,7 @@ public TermStats(String term) { this.term = term; t = makeTerm(term); } - + private Term makeTerm(String s) { int idx = s.indexOf(':'); if (idx == -1) { @@ -68,6 +68,6 @@ public TermStatistics toTermStatistics() { } public String toString() { - return StatsUtil.termStatsToString(this, true); + return StatsUtil.termStatsToString(this, false); } } diff --git a/solr/core/src/java/org/apache/solr/security/AuditEvent.java b/solr/core/src/java/org/apache/solr/security/AuditEvent.java index f9c45be1a256..492384ee69ad 100644 --- a/solr/core/src/java/org/apache/solr/security/AuditEvent.java +++ b/solr/core/src/java/org/apache/solr/security/AuditEvent.java @@ -31,6 +31,7 @@ import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.ZkStateReader; +import org.apache.solr.servlet.SolrRequestParsers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; @@ -129,12 +130,15 @@ public AuditEvent(EventType eventType, Throwable exception, HttpServletRequest h this.solrPort = httpRequest.getLocalPort(); this.solrIp = httpRequest.getLocalAddr(); this.clientIp = httpRequest.getRemoteAddr(); - this.resource = httpRequest.getContextPath(); + this.resource = httpRequest.getPathInfo(); this.httpMethod = httpRequest.getMethod(); this.httpQueryString = httpRequest.getQueryString(); this.headers = getHeadersFromRequest(httpRequest); this.requestUrl = httpRequest.getRequestURL(); this.nodeName = MDC.get(ZkStateReader.NODE_NAME_PROP); + SolrRequestParsers.parseQueryString(httpQueryString).forEach(sp -> { + this.solrParams.put(sp.getKey(), Arrays.asList(sp.getValue())); + }); setRequestType(findRequestType()); @@ -459,14 +463,14 @@ private RequestType findRequestType() { } private static final List ADMIN_PATH_REGEXES = Arrays.asList( - "^/solr/admin/.*", - "^/api/(c|collections)/$", - "^/api/(c|collections)/[^/]+/config$", - "^/api/(c|collections)/[^/]+/schema$", - "^/api/(c|collections)/[^/]+/shards.*", - "^/api/cores.*$", - "^/api/node$", - "^/api/cluster$"); + "^/admin/.*", + "^/(____v2|api)/(c|collections)$", + "^/(____v2|api)/(c|collections)/[^/]+/config$", + "^/(____v2|api)/(c|collections)/[^/]+/schema$", + "^/(____v2|api)/(c|collections)/[^/]+/shards.*", + "^/(____v2|api)/cores.*$", + "^/(____v2|api)/node$", + "^/(____v2|api)/cluster$"); private static final List STREAMING_PATH_REGEXES = Collections.singletonList(".*/stream.*"); diff --git a/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java b/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java index a6c364acf38d..d5ff666d3804 100644 --- a/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java @@ -36,7 +36,6 @@ import com.codahale.metrics.Counter; import com.codahale.metrics.Meter; -import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.ObjectMapper; @@ -45,8 +44,8 @@ import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrjNamedThreadFactory; import org.apache.solr.core.SolrInfoBean; -import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.security.AuditEvent.EventType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -75,14 +74,12 @@ public abstract class AuditLoggerPlugin implements Closeable, Runnable, SolrInfo int blockingQueueSize; protected AuditEventFormatter formatter; - private MetricRegistry registry; private Set metricNames = ConcurrentHashMap.newKeySet(); private ExecutorService executorService; private boolean closed; private MuteRules muteRules; - - protected String registryName; - protected SolrMetricManager metricManager; + + protected SolrMetricsContext solrMetricsContext; protected Meter numErrors = new Meter(); protected Meter numLost = new Meter(); protected Meter numLogged = new Meter(); @@ -239,24 +236,21 @@ public void setFormatter(AuditEventFormatter formatter) { } @Override - public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, final String scope) { + public void initializeMetrics(SolrMetricsContext parentContext, final String scope) { + solrMetricsContext = parentContext.getChildContext(this); String className = this.getClass().getSimpleName(); log.debug("Initializing metrics for {}", className); - this.metricManager = manager; - this.registryName = registryName; - // Metrics - registry = manager.registry(registryName); - numErrors = manager.meter(this, registryName, "errors", getCategory().toString(), scope, className); - numLost = manager.meter(this, registryName, "lost", getCategory().toString(), scope, className); - numLogged = manager.meter(this, registryName, "count", getCategory().toString(), scope, className); - requestTimes = manager.timer(this, registryName, "requestTimes", getCategory().toString(), scope, className); - totalTime = manager.counter(this, registryName, "totalTime", getCategory().toString(), scope, className); + numErrors = solrMetricsContext.meter(this, "errors", getCategory().toString(), scope, className); + numLost = solrMetricsContext.meter(this, "lost", getCategory().toString(), scope, className); + numLogged = solrMetricsContext.meter(this, "count", getCategory().toString(), scope, className); + requestTimes = solrMetricsContext.timer(this, "requestTimes", getCategory().toString(), scope, className); + totalTime = solrMetricsContext.counter(this, "totalTime", getCategory().toString(), scope, className); if (async) { - manager.registerGauge(this, registryName, () -> blockingQueueSize, "queueCapacity", true, "queueCapacity", getCategory().toString(), scope, className); - manager.registerGauge(this, registryName, () -> blockingQueueSize - queue.remainingCapacity(), "queueSize", true, "queueSize", getCategory().toString(), scope, className); - queuedTime = manager.timer(this, registryName, "queuedTime", getCategory().toString(), scope, className); + solrMetricsContext.gauge(this, () -> blockingQueueSize, true, "queueCapacity", getCategory().toString(), scope, className); + solrMetricsContext.gauge(this, () -> blockingQueueSize - queue.remainingCapacity(), true, "queueSize", getCategory().toString(), scope, className); + queuedTime = solrMetricsContext.timer(this, "queuedTime", getCategory().toString(), scope, className); } - manager.registerGauge(this, registryName, () -> async, "async", true, "async", getCategory().toString(), scope, className); + solrMetricsContext.gauge(this, () -> async, true, "async", getCategory().toString(), scope, className); } @Override @@ -280,10 +274,10 @@ public Set getMetricNames() { } @Override - public MetricRegistry getMetricRegistry() { - return registry; + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; } - + /** * Interface for formatting the event */ @@ -325,6 +319,11 @@ public void close() throws IOException { closed = true; log.info("Shutting down async Auditlogger background thread(s)"); executorService.shutdownNow(); + try { + SolrMetricProducer.super.close(); + } catch (Exception e) { + throw new IOException("Exception closing", e); + } } } diff --git a/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java b/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java index a0ec89ab5542..320f661c5fc6 100644 --- a/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java @@ -25,13 +25,12 @@ import com.codahale.metrics.Counter; import com.codahale.metrics.Meter; -import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import org.apache.http.HttpRequest; import org.apache.http.protocol.HttpContext; import org.apache.solr.core.SolrInfoBean; import org.apache.solr.metrics.SolrMetricProducer; -import org.apache.solr.metrics.SolrMetrics; +import org.apache.solr.metrics.SolrMetricsContext; import org.eclipse.jetty.client.api.Request; /** @@ -45,6 +44,7 @@ public abstract class AuthenticationPlugin implements SolrInfoBean, SolrMetricPr // Metrics private Set metricNames = ConcurrentHashMap.newKeySet(); + protected SolrMetricsContext solrMetricsContext; protected Meter numErrors = new Meter(); protected Counter requests = new Counter(); @@ -137,25 +137,24 @@ protected boolean interceptInternodeRequest(Request request) { */ public void closeRequest() { } - protected SolrMetrics metrics; @Override - public SolrMetrics getMetrics() { - return metrics; + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; } @Override - public void initializeMetrics(SolrMetrics metrics) { - this.metrics = metrics.getChildInfo(this); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + this.solrMetricsContext = parentContext.getChildContext(this); // Metrics - numErrors = this.metrics.meter(this, "errors", getCategory().toString()); - requests = this.metrics.counter(this, "requests", getCategory().toString()); - numAuthenticated = this.metrics.counter(this, "authenticated",getCategory().toString()); - numPassThrough = this.metrics.counter(this, "passThrough", getCategory().toString()); - numWrongCredentials = this.metrics.counter(this, "failWrongCredentials",getCategory().toString()); - numMissingCredentials = this.metrics.counter(this, "failMissingCredentials",getCategory().toString()); - requestTimes = this.metrics.timer(this,"requestTimes", getCategory().toString()); - totalTime = this.metrics.counter(this,"totalTime", getCategory().toString()); + numErrors = this.solrMetricsContext.meter(this, "errors", getCategory().toString(), scope); + requests = this.solrMetricsContext.counter(this, "requests", getCategory().toString(), scope); + numAuthenticated = this.solrMetricsContext.counter(this, "authenticated",getCategory().toString(), scope); + numPassThrough = this.solrMetricsContext.counter(this, "passThrough", getCategory().toString(), scope); + numWrongCredentials = this.solrMetricsContext.counter(this, "failWrongCredentials",getCategory().toString(), scope); + numMissingCredentials = this.solrMetricsContext.counter(this, "failMissingCredentials",getCategory().toString(), scope); + requestTimes = this.solrMetricsContext.timer(this,"requestTimes", getCategory().toString(), scope); + totalTime = this.solrMetricsContext.counter(this,"totalTime", getCategory().toString(), scope); } @Override @@ -177,10 +176,4 @@ public Category getCategory() { public Set getMetricNames() { return metricNames; } - - @Override - public MetricRegistry getMetricRegistry() { - return metrics == null ? null : metrics.getRegistry(); - } - } diff --git a/solr/core/src/java/org/apache/solr/security/AuthorizationContext.java b/solr/core/src/java/org/apache/solr/security/AuthorizationContext.java index 65363a7f0948..41236fffd45f 100644 --- a/solr/core/src/java/org/apache/solr/security/AuthorizationContext.java +++ b/solr/core/src/java/org/apache/solr/security/AuthorizationContext.java @@ -33,6 +33,11 @@ public static class CollectionRequest { public CollectionRequest(String collectionName) { this.collectionName = collectionName; } + + @Override + public String toString() { + return getClass().getSimpleName()+ "(" + collectionName + ")"; + } } public abstract SolrParams getParams() ; diff --git a/solr/core/src/java/org/apache/solr/security/JWTAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/JWTAuthPlugin.java index c5ba67c6bc29..e642751e2e5d 100644 --- a/solr/core/src/java/org/apache/solr/security/JWTAuthPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/JWTAuthPlugin.java @@ -22,13 +22,8 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; -import java.io.ByteArrayInputStream; import java.io.IOException; -import java.io.InputStream; import java.lang.invoke.MethodHandles; -import java.net.MalformedURLException; -import java.net.URL; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.security.Principal; import java.time.Instant; @@ -39,6 +34,8 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.StringTokenizer; import java.util.regex.Pattern; @@ -61,17 +58,12 @@ import org.eclipse.jetty.client.api.Request; import org.jose4j.jwa.AlgorithmConstraints; import org.jose4j.jwk.HttpsJwks; -import org.jose4j.jwk.JsonWebKey; -import org.jose4j.jwk.JsonWebKeySet; import org.jose4j.jwt.JwtClaims; import org.jose4j.jwt.MalformedClaimException; import org.jose4j.jwt.consumer.InvalidJwtException; import org.jose4j.jwt.consumer.InvalidJwtSignatureException; import org.jose4j.jwt.consumer.JwtConsumer; import org.jose4j.jwt.consumer.JwtConsumerBuilder; -import org.jose4j.keys.resolvers.HttpsJwksVerificationKeyResolver; -import org.jose4j.keys.resolvers.JwksVerificationKeyResolver; -import org.jose4j.keys.resolvers.VerificationKeyResolver; import org.jose4j.lang.JoseException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -82,11 +74,8 @@ public class JWTAuthPlugin extends AuthenticationPlugin implements SpecProvider, ConfigEditablePlugin { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String PARAM_BLOCK_UNKNOWN = "blockUnknown"; - private static final String PARAM_JWK_URL = "jwkUrl"; - private static final String PARAM_JWK = "jwk"; - private static final String PARAM_ISSUER = "iss"; - private static final String PARAM_AUDIENCE = "aud"; private static final String PARAM_REQUIRE_SUBJECT = "requireSub"; + private static final String PARAM_REQUIRE_ISSUER = "requireIss"; private static final String PARAM_PRINCIPAL_CLAIM = "principalClaim"; private static final String PARAM_REQUIRE_EXPIRATIONTIME = "requireExp"; private static final String PARAM_ALG_WHITELIST = "algWhitelist"; @@ -95,41 +84,39 @@ public class JWTAuthPlugin extends AuthenticationPlugin implements SpecProvider, private static final String PARAM_SCOPE = "scope"; private static final String PARAM_ADMINUI_SCOPE = "adminUiScope"; private static final String PARAM_REDIRECT_URIS = "redirectUris"; - private static final String PARAM_CLIENT_ID = "clientId"; - private static final String PARAM_WELL_KNOWN_URL = "wellKnownUrl"; - private static final String PARAM_AUTHORIZATION_ENDPOINT = "authorizationEndpoint"; + private static final String PARAM_ISSUERS = "issuers"; + private static final String PARAM_REALM = "realm"; - private static final String AUTH_REALM = "solr-jwt"; + private static final String DEFAULT_AUTH_REALM = "solr-jwt"; private static final String CLAIM_SCOPE = "scope"; private static final long RETRY_INIT_DELAY_SECONDS = 30; - - private static final Set PROPS = ImmutableSet.of(PARAM_BLOCK_UNKNOWN, PARAM_JWK_URL, PARAM_JWK, PARAM_ISSUER, - PARAM_AUDIENCE, PARAM_REQUIRE_SUBJECT, PARAM_PRINCIPAL_CLAIM, PARAM_REQUIRE_EXPIRATIONTIME, PARAM_ALG_WHITELIST, - PARAM_JWK_CACHE_DURATION, PARAM_CLAIMS_MATCH, PARAM_SCOPE, PARAM_CLIENT_ID, PARAM_WELL_KNOWN_URL, - PARAM_AUTHORIZATION_ENDPOINT, PARAM_ADMINUI_SCOPE, PARAM_REDIRECT_URIS); + private static final long DEFAULT_REFRESH_REPRIEVE_THRESHOLD = 5000; + static final String PRIMARY_ISSUER = "PRIMARY"; + + private static final Set PROPS = ImmutableSet.of(PARAM_BLOCK_UNKNOWN, + PARAM_REQUIRE_SUBJECT, PARAM_PRINCIPAL_CLAIM, PARAM_REQUIRE_EXPIRATIONTIME, PARAM_ALG_WHITELIST, + PARAM_JWK_CACHE_DURATION, PARAM_CLAIMS_MATCH, PARAM_SCOPE, PARAM_REALM, + PARAM_ADMINUI_SCOPE, PARAM_REDIRECT_URIS, PARAM_REQUIRE_ISSUER, PARAM_ISSUERS, + // These keys are supported for now to enable PRIMARY issuer config through top-level keys + JWTIssuerConfig.PARAM_JWK_URL, JWTIssuerConfig.PARAM_JWKS_URL, JWTIssuerConfig.PARAM_JWK, JWTIssuerConfig.PARAM_ISSUER, + JWTIssuerConfig.PARAM_CLIENT_ID, JWTIssuerConfig.PARAM_WELL_KNOWN_URL, JWTIssuerConfig.PARAM_AUDIENCE, + JWTIssuerConfig.PARAM_AUTHORIZATION_ENDPOINT); private JwtConsumer jwtConsumer; - private String iss; - private String aud; - private boolean requireSubject; private boolean requireExpirationTime; private List algWhitelist; - VerificationKeyResolver verificationKeyResolver; private String principalClaim; private HashMap claimsMatchCompiled; private boolean blockUnknown; private List requiredScopes = new ArrayList<>(); - private String clientId; - private long jwkCacheDuration; - private WellKnownDiscoveryConfig oidcDiscoveryConfig; - private String confIdpConfigUrl; private Map pluginConfig; private Instant lastInitTime = Instant.now(); - private String authorizationEndpoint; private String adminUiScope; private List redirectUris; - private HttpsJwks httpsJkws; - + private List issuerConfigs; + private boolean requireIssuer; + private JWTVerificationkeyResolver verificationKeyResolver; + String realm; /** * Initialize plugin @@ -139,6 +126,8 @@ public JWTAuthPlugin() {} @SuppressWarnings("unchecked") @Override public void init(Map pluginConfig) { + this.pluginConfig = pluginConfig; + this.issuerConfigs = null; List unknownKeys = pluginConfig.keySet().stream().filter(k -> !PROPS.contains(k)).collect(Collectors.toList()); unknownKeys.remove("class"); unknownKeys.remove(""); @@ -147,69 +136,16 @@ public void init(Map pluginConfig) { } blockUnknown = Boolean.parseBoolean(String.valueOf(pluginConfig.getOrDefault(PARAM_BLOCK_UNKNOWN, false))); - clientId = (String) pluginConfig.get(PARAM_CLIENT_ID); - requireSubject = Boolean.parseBoolean(String.valueOf(pluginConfig.getOrDefault(PARAM_REQUIRE_SUBJECT, "true"))); + requireIssuer = Boolean.parseBoolean(String.valueOf(pluginConfig.getOrDefault(PARAM_REQUIRE_ISSUER, "true"))); requireExpirationTime = Boolean.parseBoolean(String.valueOf(pluginConfig.getOrDefault(PARAM_REQUIRE_EXPIRATIONTIME, "true"))); - principalClaim = (String) pluginConfig.getOrDefault(PARAM_PRINCIPAL_CLAIM, "sub"); - confIdpConfigUrl = (String) pluginConfig.get(PARAM_WELL_KNOWN_URL); - Object redirectUrisObj = pluginConfig.get(PARAM_REDIRECT_URIS); - redirectUris = Collections.emptyList(); - if (redirectUrisObj != null) { - if (redirectUrisObj instanceof String) { - redirectUris = Collections.singletonList((String) redirectUrisObj); - } else if (redirectUrisObj instanceof List) { - redirectUris = (List) redirectUrisObj; - } - } - - if (confIdpConfigUrl != null) { - log.debug("Initializing well-known oidc config from {}", confIdpConfigUrl); - oidcDiscoveryConfig = WellKnownDiscoveryConfig.parse(confIdpConfigUrl); - iss = oidcDiscoveryConfig.getIssuer(); - authorizationEndpoint = oidcDiscoveryConfig.getAuthorizationEndpoint(); - } - - if (pluginConfig.containsKey(PARAM_ISSUER)) { - if (iss != null) { - log.debug("Explicitly setting required issuer instead of using issuer from well-known config"); - } - iss = (String) pluginConfig.get(PARAM_ISSUER); - } - - if (pluginConfig.containsKey(PARAM_AUTHORIZATION_ENDPOINT)) { - if (authorizationEndpoint != null) { - log.debug("Explicitly setting authorizationEndpoint instead of using issuer from well-known config"); - } - authorizationEndpoint = (String) pluginConfig.get(PARAM_AUTHORIZATION_ENDPOINT); - } - - if (pluginConfig.containsKey(PARAM_AUDIENCE)) { - if (clientId != null) { - log.debug("Explicitly setting required audience instead of using configured clientId"); - } - aud = (String) pluginConfig.get(PARAM_AUDIENCE); - } else { - aud = clientId; + if (pluginConfig.get(PARAM_REQUIRE_SUBJECT) != null) { + log.warn("Parameter {} is no longer used and may generate error in a later version. A subject claim is now always required", + PARAM_REQUIRE_SUBJECT); } - + principalClaim = (String) pluginConfig.getOrDefault(PARAM_PRINCIPAL_CLAIM, "sub"); algWhitelist = (List) pluginConfig.get(PARAM_ALG_WHITELIST); + realm = (String) pluginConfig.getOrDefault(PARAM_REALM, DEFAULT_AUTH_REALM); - String requiredScopesStr = (String) pluginConfig.get(PARAM_SCOPE); - if (!StringUtils.isEmpty(requiredScopesStr)) { - requiredScopes = Arrays.asList(requiredScopesStr.split("\\s+")); - } - - adminUiScope = (String) pluginConfig.get(PARAM_ADMINUI_SCOPE); - if (adminUiScope == null && requiredScopes.size() > 0) { - adminUiScope = requiredScopes.get(0); - log.warn("No adminUiScope given, using first scope in 'scope' list as required scope for accessing Admin UI"); - } - - if (adminUiScope == null) { - adminUiScope = "solr"; - log.warn("Warning: No adminUiScope provided, fallback to 'solr' as required scope. If this is not correct, the Admin UI login may not work"); - } - Map claimsMatch = (Map) pluginConfig.get(PARAM_CLAIMS_MATCH); claimsMatchCompiled = new HashMap<>(); if (claimsMatch != null) { @@ -218,75 +154,118 @@ public void init(Map pluginConfig) { } } - initJwk(pluginConfig); + String requiredScopesStr = (String) pluginConfig.get(PARAM_SCOPE); + if (!StringUtils.isEmpty(requiredScopesStr)) { + requiredScopes = Arrays.asList(requiredScopesStr.split("\\s+")); + } - lastInitTime = Instant.now(); - } + long jwkCacheDuration = Long.parseLong((String) pluginConfig.getOrDefault(PARAM_JWK_CACHE_DURATION, "3600")); + JWTIssuerConfig.setHttpsJwksFactory(new JWTIssuerConfig.HttpsJwksFactory(jwkCacheDuration, DEFAULT_REFRESH_REPRIEVE_THRESHOLD)); - @SuppressWarnings("unchecked") - private void initJwk(Map pluginConfig) { - this.pluginConfig = pluginConfig; - String confJwkUrl = (String) pluginConfig.get(PARAM_JWK_URL); - Map confJwk = (Map) pluginConfig.get(PARAM_JWK); - jwkCacheDuration = Long.parseLong((String) pluginConfig.getOrDefault(PARAM_JWK_CACHE_DURATION, "3600")); + issuerConfigs = new ArrayList<>(); - jwtConsumer = null; - int jwkConfigured = confIdpConfigUrl != null ? 1 : 0; - jwkConfigured += confJwkUrl != null ? 1 : 0; - jwkConfigured += confJwk != null ? 1 : 0; - if (jwkConfigured > 1) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "JWTAuthPlugin needs to configure exactly one of " + - PARAM_WELL_KNOWN_URL + ", " + PARAM_JWK_URL + " and " + PARAM_JWK); - } - if (jwkConfigured == 0) { - log.warn("Initialized JWTAuthPlugin without any JWK config. Requests with jwk header will fail."); - } - if (oidcDiscoveryConfig != null) { - String jwkUrl = oidcDiscoveryConfig.getJwksUrl(); - setupJwkUrl(jwkUrl); - } else if (confJwkUrl != null) { - setupJwkUrl(confJwkUrl); - } else if (confJwk != null) { - try { - JsonWebKeySet jwks = parseJwkSet(confJwk); - verificationKeyResolver = new JwksVerificationKeyResolver(jwks.getJsonWebKeys()); - httpsJkws = null; - } catch (JoseException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Invalid JWTAuthPlugin configuration, " + PARAM_JWK + " parse error", e); + // Try to parse an issuer from top level config, and add first (primary issuer) + Optional topLevelIssuer = parseIssuerFromTopLevelConfig(pluginConfig); + topLevelIssuer.ifPresent(ic -> { + issuerConfigs.add(ic); + log.warn("JWTAuthPlugin issuer is configured using top-level configuration keys. Please consider using the 'issuers' array instead."); + }); + + // Add issuers from 'issuers' key + issuerConfigs.addAll(parseIssuers(pluginConfig)); + verificationKeyResolver = new JWTVerificationkeyResolver(issuerConfigs, requireIssuer); + + if (issuerConfigs.size() > 0 && getPrimaryIssuer().getAuthorizationEndpoint() != null) { + adminUiScope = (String) pluginConfig.get(PARAM_ADMINUI_SCOPE); + if (adminUiScope == null && requiredScopes.size() > 0) { + adminUiScope = requiredScopes.get(0); + log.warn("No adminUiScope given, using first scope in 'scope' list as required scope for accessing Admin UI"); + } + + if (adminUiScope == null) { + adminUiScope = "solr"; + log.info("No adminUiScope provided, fallback to 'solr' as required scope for Admin UI login may not work"); + } + + Object redirectUrisObj = pluginConfig.get(PARAM_REDIRECT_URIS); + redirectUris = Collections.emptyList(); + if (redirectUrisObj != null) { + if (redirectUrisObj instanceof String) { + redirectUris = Collections.singletonList((String) redirectUrisObj); + } else if (redirectUrisObj instanceof List) { + redirectUris = (List) redirectUrisObj; + } } } + initConsumer(); - log.debug("JWK configured"); + + lastInitTime = Instant.now(); } - void setupJwkUrl(String url) { - // The HttpsJwks retrieves and caches keys from a the given HTTPS JWKS endpoint. + @SuppressWarnings("unchecked") + private Optional parseIssuerFromTopLevelConfig(Map conf) { try { - URL jwkUrl = new URL(url); - if (!"https".equalsIgnoreCase(jwkUrl.getProtocol())) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, PARAM_JWK_URL + " must be an HTTPS url"); + if (conf.get(JWTIssuerConfig.PARAM_JWK_URL) != null) { + log.warn("Configuration uses deprecated key {}. Please use {} instead", JWTIssuerConfig.PARAM_JWK_URL, JWTIssuerConfig.PARAM_JWKS_URL); + } + JWTIssuerConfig primary = new JWTIssuerConfig(PRIMARY_ISSUER) + .setIss((String) conf.get(JWTIssuerConfig.PARAM_ISSUER)) + .setAud((String) conf.get(JWTIssuerConfig.PARAM_AUDIENCE)) + .setJwksUrl(conf.get(JWTIssuerConfig.PARAM_JWKS_URL) != null ? conf.get(JWTIssuerConfig.PARAM_JWKS_URL) : conf.get(JWTIssuerConfig.PARAM_JWK_URL)) + .setAuthorizationEndpoint((String) conf.get(JWTIssuerConfig.PARAM_AUTHORIZATION_ENDPOINT)) + .setClientId((String) conf.get(JWTIssuerConfig.PARAM_CLIENT_ID)) + .setWellKnownUrl((String) conf.get(JWTIssuerConfig.PARAM_WELL_KNOWN_URL)); + if (conf.get(JWTIssuerConfig.PARAM_JWK) != null) { + primary.setJsonWebKeySet(JWTIssuerConfig.parseJwkSet((Map) conf.get(JWTIssuerConfig.PARAM_JWK))); + } + if (primary.isValid()) { + log.debug("Found issuer in top level config"); + primary.init(); + return Optional.of(primary); + } else { + log.debug("No issuer configured in top level config"); + return Optional.empty(); } - } catch (MalformedURLException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, PARAM_JWK_URL + " must be a valid URL"); + } catch (JoseException je) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Failed parsing issuer from top level config", je); } - httpsJkws = new HttpsJwks(url); - httpsJkws.setDefaultCacheDuration(jwkCacheDuration); - httpsJkws.setRefreshReprieveThreshold(5000); - verificationKeyResolver = new HttpsJwksVerificationKeyResolver(httpsJkws); } + /** + * Fetch the primary issuer to be used for Admin UI authentication. Callers of this method must ensure that at least + * one issuer is configured. The primary issuer is defined as the first issuer configured in the list. + * @return JWTIssuerConfig object for the primary issuer + */ + JWTIssuerConfig getPrimaryIssuer() { + if (issuerConfigs.size() == 0) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No issuers configured"); + } + return issuerConfigs.get(0); + } + + /** + * Initialize optional additional issuers configured in 'issuers' config map + * @param pluginConfig the main config object + * @return a list of parsed {@link JWTIssuerConfig} objects + */ @SuppressWarnings("unchecked") - JsonWebKeySet parseJwkSet(Map jwkObj) throws JoseException { - JsonWebKeySet webKeySet = new JsonWebKeySet(); - if (jwkObj.containsKey("keys")) { - List jwkList = (List) jwkObj.get("keys"); - for (Object jwkO : jwkList) { - webKeySet.addJsonWebKey(JsonWebKey.Factory.newJwk((Map) jwkO)); + List parseIssuers(Map pluginConfig) { + List configs = new ArrayList<>(); + try { + List> issuers = (List>) pluginConfig.get(PARAM_ISSUERS); + if (issuers != null) { + issuers.forEach(issuerConf -> { + JWTIssuerConfig ic = new JWTIssuerConfig(issuerConf); + ic.init(); + configs.add(ic); + log.debug("Found issuer with name {} and issuerId {}", ic.getName(), ic.getIss()); + }); } - } else { - webKeySet = new JsonWebKeySet(JsonWebKey.Factory.newJwk(jwkObj)); + return configs; + } catch(ClassCastException cce) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parameter " + PARAM_ISSUERS + " has wrong format.", cce); } - return webKeySet; } /** @@ -319,13 +298,22 @@ public boolean doAuthenticate(ServletRequest servletRequest, ServletResponse ser } JWTAuthenticationResponse authResponse = authenticate(header); - if (AuthCode.SIGNATURE_INVALID.equals(authResponse.getAuthCode()) && httpsJkws != null) { - log.warn("Signature validation failed. Refreshing JWKs from IdP before trying again: {}", - authResponse.getJwtException() == null ? "" : authResponse.getJwtException().getMessage()); - httpsJkws.refresh(); - authResponse = authenticate(header); - } String exceptionMessage = authResponse.getJwtException() != null ? authResponse.getJwtException().getMessage() : ""; + if (AuthCode.SIGNATURE_INVALID.equals(authResponse.getAuthCode())) { + String issuer = jwtConsumer.processToClaims(header).getIssuer(); + if (issuer != null) { + Optional issuerConfig = issuerConfigs.stream().filter(ic -> issuer.equals(ic.getIss())).findFirst(); + if (issuerConfig.isPresent() && issuerConfig.get().usesHttpsJwk()) { + log.info("Signature validation failed for issuer {}. Refreshing JWKs from IdP before trying again: {}", + issuer, exceptionMessage); + for (HttpsJwks httpsJwks : issuerConfig.get().getHttpsJwks()) { + httpsJwks.refresh(); + } + authResponse = authenticate(header); // Retry + exceptionMessage = authResponse.getJwtException() != null ? authResponse.getJwtException().getMessage() : ""; + } + } + } switch (authResponse.getAuthCode()) { case AUTHENTICATED: @@ -396,74 +384,68 @@ public Principal getUserPrincipal() { */ protected JWTAuthenticationResponse authenticate(String authorizationHeader) { if (authorizationHeader != null) { - StringTokenizer st = new StringTokenizer(authorizationHeader); - if (st.hasMoreTokens()) { - String bearer = st.nextToken(); - if (bearer.equalsIgnoreCase("Bearer") && st.hasMoreTokens()) { + String jwtCompact = parseAuthorizationHeader(authorizationHeader); + if (jwtCompact != null) { + try { try { - String jwtCompact = st.nextToken(); - try { - JwtClaims jwtClaims = jwtConsumer.processToClaims(jwtCompact); - String principal = jwtClaims.getStringClaimValue(principalClaim); - if (principal == null || principal.isEmpty()) { - return new JWTAuthenticationResponse(AuthCode.PRINCIPAL_MISSING, "Cannot identify principal from JWT. Required claim " + principalClaim + " missing. Cannot authenticate"); - } - if (claimsMatchCompiled != null) { - for (Map.Entry entry : claimsMatchCompiled.entrySet()) { - String claim = entry.getKey(); - if (jwtClaims.hasClaim(claim)) { - if (!entry.getValue().matcher(jwtClaims.getStringClaimValue(claim)).matches()) { - return new JWTAuthenticationResponse(AuthCode.CLAIM_MISMATCH, - "Claim " + claim + "=" + jwtClaims.getStringClaimValue(claim) - + " does not match required regular expression " + entry.getValue().pattern()); - } - } else { - return new JWTAuthenticationResponse(AuthCode.CLAIM_MISMATCH, "Claim " + claim + " is required but does not exist in JWT"); + JwtClaims jwtClaims = jwtConsumer.processToClaims(jwtCompact); + String principal = jwtClaims.getStringClaimValue(principalClaim); + if (principal == null || principal.isEmpty()) { + return new JWTAuthenticationResponse(AuthCode.PRINCIPAL_MISSING, "Cannot identify principal from JWT. Required claim " + principalClaim + " missing. Cannot authenticate"); + } + if (claimsMatchCompiled != null) { + for (Map.Entry entry : claimsMatchCompiled.entrySet()) { + String claim = entry.getKey(); + if (jwtClaims.hasClaim(claim)) { + if (!entry.getValue().matcher(jwtClaims.getStringClaimValue(claim)).matches()) { + return new JWTAuthenticationResponse(AuthCode.CLAIM_MISMATCH, + "Claim " + claim + "=" + jwtClaims.getStringClaimValue(claim) + + " does not match required regular expression " + entry.getValue().pattern()); } + } else { + return new JWTAuthenticationResponse(AuthCode.CLAIM_MISMATCH, "Claim " + claim + " is required but does not exist in JWT"); } } - if (!requiredScopes.isEmpty() && !jwtClaims.hasClaim(CLAIM_SCOPE)) { - // Fail if we require scopes but they don't exist - return new JWTAuthenticationResponse(AuthCode.CLAIM_MISMATCH, "Claim " + CLAIM_SCOPE + " is required but does not exist in JWT"); + } + if (!requiredScopes.isEmpty() && !jwtClaims.hasClaim(CLAIM_SCOPE)) { + // Fail if we require scopes but they don't exist + return new JWTAuthenticationResponse(AuthCode.CLAIM_MISMATCH, "Claim " + CLAIM_SCOPE + " is required but does not exist in JWT"); + } + Set scopes = Collections.emptySet(); + Object scopesObj = jwtClaims.getClaimValue(CLAIM_SCOPE); + if (scopesObj != null) { + if (scopesObj instanceof String) { + scopes = new HashSet<>(Arrays.asList(((String) scopesObj).split("\\s+"))); + } else if (scopesObj instanceof List) { + scopes = new HashSet<>(jwtClaims.getStringListClaimValue(CLAIM_SCOPE)); } - Set scopes = Collections.emptySet(); - Object scopesObj = jwtClaims.getClaimValue(CLAIM_SCOPE); - if (scopesObj != null) { - if (scopesObj instanceof String) { - scopes = new HashSet<>(Arrays.asList(((String) scopesObj).split("\\s+"))); - } else if (scopesObj instanceof List) { - scopes = new HashSet<>(jwtClaims.getStringListClaimValue(CLAIM_SCOPE)); + // Validate that at least one of the required scopes are present in the scope claim + if (!requiredScopes.isEmpty()) { + if (scopes.stream().noneMatch(requiredScopes::contains)) { + return new JWTAuthenticationResponse(AuthCode.SCOPE_MISSING, "Claim " + CLAIM_SCOPE + " does not contain any of the required scopes: " + requiredScopes); } - // Validate that at least one of the required scopes are present in the scope claim - if (!requiredScopes.isEmpty()) { - if (scopes.stream().noneMatch(requiredScopes::contains)) { - return new JWTAuthenticationResponse(AuthCode.SCOPE_MISSING, "Claim " + CLAIM_SCOPE + " does not contain any of the required scopes: " + requiredScopes); - } - } - final Set finalScopes = new HashSet<>(scopes); - finalScopes.remove("openid"); // Remove standard scope - // Pass scopes with principal to signal to any Authorization plugins that user has some verified role claims - return new JWTAuthenticationResponse(AuthCode.AUTHENTICATED, new JWTPrincipalWithUserRoles(principal, jwtCompact, jwtClaims.getClaimsMap(), finalScopes)); - } else { - return new JWTAuthenticationResponse(AuthCode.AUTHENTICATED, new JWTPrincipal(principal, jwtCompact, jwtClaims.getClaimsMap())); - } - } catch (InvalidJwtSignatureException ise) { - return new JWTAuthenticationResponse(AuthCode.SIGNATURE_INVALID, ise); - } catch (InvalidJwtException e) { - // Whether or not the JWT has expired being one common reason for invalidity - if (e.hasExpired()) { - return new JWTAuthenticationResponse(AuthCode.JWT_EXPIRED, "Authentication failed due to expired JWT token. Expired at " + e.getJwtContext().getJwtClaims().getExpirationTime()); - } - if (e.getCause() != null && e.getCause() instanceof JoseException && e.getCause().getMessage().contains("Invalid JOSE Compact Serialization")) { - return new JWTAuthenticationResponse(AuthCode.JWT_PARSE_ERROR, e.getCause().getMessage()); } - return new JWTAuthenticationResponse(AuthCode.JWT_VALIDATION_EXCEPTION, e); + final Set finalScopes = new HashSet<>(scopes); + finalScopes.remove("openid"); // Remove standard scope + // Pass scopes with principal to signal to any Authorization plugins that user has some verified role claims + return new JWTAuthenticationResponse(AuthCode.AUTHENTICATED, new JWTPrincipalWithUserRoles(principal, jwtCompact, jwtClaims.getClaimsMap(), finalScopes)); + } else { + return new JWTAuthenticationResponse(AuthCode.AUTHENTICATED, new JWTPrincipal(principal, jwtCompact, jwtClaims.getClaimsMap())); } - } catch (MalformedClaimException e) { - return new JWTAuthenticationResponse(AuthCode.JWT_PARSE_ERROR, "Malformed claim, error was: " + e.getMessage()); + } catch (InvalidJwtSignatureException ise) { + return new JWTAuthenticationResponse(AuthCode.SIGNATURE_INVALID, ise); + } catch (InvalidJwtException e) { + // Whether or not the JWT has expired being one common reason for invalidity + if (e.hasExpired()) { + return new JWTAuthenticationResponse(AuthCode.JWT_EXPIRED, "Authentication failed due to expired JWT token. Expired at " + e.getJwtContext().getJwtClaims().getExpirationTime()); + } + if (e.getCause() != null && e.getCause() instanceof JoseException && e.getCause().getMessage().contains("Invalid JOSE Compact Serialization")) { + return new JWTAuthenticationResponse(AuthCode.JWT_PARSE_ERROR, e.getCause().getMessage()); + } + return new JWTAuthenticationResponse(AuthCode.JWT_VALIDATION_EXCEPTION, e); } - } else { - return new JWTAuthenticationResponse(AuthCode.AUTZ_HEADER_PROBLEM, "Authorization header is not in correct format"); + } catch (MalformedClaimException e) { + return new JWTAuthenticationResponse(AuthCode.JWT_PARSE_ERROR, "Malformed claim, error was: " + e.getMessage()); } } else { return new JWTAuthenticationResponse(AuthCode.AUTZ_HEADER_PROBLEM, "Authorization header is not in correct format"); @@ -479,18 +461,31 @@ protected JWTAuthenticationResponse authenticate(String authorizationHeader) { } } + private String parseAuthorizationHeader(String authorizationHeader) { + StringTokenizer st = new StringTokenizer(authorizationHeader); + if (st.hasMoreTokens()) { + String bearer = st.nextToken(); + if (bearer.equalsIgnoreCase("Bearer") && st.hasMoreTokens()) { + return st.nextToken(); + } + } + return null; + } + private void initConsumer() { JwtConsumerBuilder jwtConsumerBuilder = new JwtConsumerBuilder() .setAllowedClockSkewInSeconds(30); // allow some leeway in validating time based claims to account for clock skew - if (iss != null) - jwtConsumerBuilder.setExpectedIssuer(iss); // whom the JWT needs to have been issued by - if (aud != null) { - jwtConsumerBuilder.setExpectedAudience(aud); // to whom the JWT is intended for + String[] issuers = issuerConfigs.stream().map(JWTIssuerConfig::getIss).filter(Objects::nonNull).toArray(String[]::new); + if (issuers.length > 0) { + jwtConsumerBuilder.setExpectedIssuers(requireIssuer, issuers); // whom the JWT needs to have been issued by + } + String[] audiences = issuerConfigs.stream().map(JWTIssuerConfig::getAud).filter(Objects::nonNull).toArray(String[]::new); + if (audiences.length > 0) { + jwtConsumerBuilder.setExpectedAudience(audiences); // to whom the JWT is intended for } else { jwtConsumerBuilder.setSkipDefaultAudienceValidation(); } - if (requireSubject) - jwtConsumerBuilder.setRequireSubject(); + jwtConsumerBuilder.setRequireSubject(); if (requireExpirationTime) jwtConsumerBuilder.setRequireExpirationTime(); if (algWhitelist != null) @@ -542,7 +537,7 @@ private enum BearerWwwAuthErrorCode { invalid_request, invalid_token, insufficie private void authenticationFailure(HttpServletResponse response, String message, int httpCode, BearerWwwAuthErrorCode responseError) throws IOException { List wwwAuthParams = new ArrayList<>(); - wwwAuthParams.add("Bearer realm=\"" + AUTH_REALM + "\""); + wwwAuthParams.add("Bearer realm=\"" + realm + "\""); if (responseError != null) { wwwAuthParams.add("error=\"" + responseError + "\""); wwwAuthParams.add("error_description=\"" + message + "\""); @@ -554,16 +549,16 @@ private void authenticationFailure(HttpServletResponse response, String message, } protected String generateAuthDataHeader() { + JWTIssuerConfig primaryIssuer = getPrimaryIssuer(); Map data = new HashMap<>(); - data.put(PARAM_AUTHORIZATION_ENDPOINT, authorizationEndpoint); - data.put("client_id", clientId); + data.put(JWTIssuerConfig.PARAM_AUTHORIZATION_ENDPOINT, primaryIssuer.getAuthorizationEndpoint()); + data.put("client_id", primaryIssuer.getClientId()); data.put("scope", adminUiScope); data.put("redirect_uris", redirectUris); String headerJson = Utils.toJSONString(data); return Base64.byteArrayToBase64(headerJson.getBytes(StandardCharsets.UTF_8)); } - /** * Response for authentication attempt */ @@ -641,73 +636,6 @@ AuthCode getAuthCode() { } } - /** - * Config object for a OpenId Connect well-known config - * Typically exposed through /.well-known/openid-configuration endpoint - */ - public static class WellKnownDiscoveryConfig { - private static Map securityConf; - - WellKnownDiscoveryConfig(Map securityConf) { - WellKnownDiscoveryConfig.securityConf = securityConf; - } - - public static WellKnownDiscoveryConfig parse(String urlString) { - try { - URL url = new URL(urlString); - if (!Arrays.asList("https", "file").contains(url.getProtocol())) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Well-known config URL must be HTTPS or file"); - } - return parse(url.openStream()); - } catch (MalformedURLException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Well-known config URL " + urlString + " is malformed", e); - } catch (IOException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Well-known config could not be read from url " + urlString, e); - } - } - - public static WellKnownDiscoveryConfig parse(String json, Charset charset) { - return parse(new ByteArrayInputStream(json.getBytes(charset))); - } - - @SuppressWarnings("unchecked") - public static WellKnownDiscoveryConfig parse(InputStream configStream) { - securityConf = (Map) Utils.fromJSON(configStream); - return new WellKnownDiscoveryConfig(securityConf); - } - - - public String getJwksUrl() { - return (String) securityConf.get("jwks_uri"); - } - - public String getIssuer() { - return (String) securityConf.get("issuer"); - } - - public String getAuthorizationEndpoint() { - return (String) securityConf.get("authorization_endpoint"); - } - - public String getUserInfoEndpoint() { - return (String) securityConf.get("userinfo_endpoint"); - } - - public String getTokenEndpoint() { - return (String) securityConf.get("token_endpoint"); - } - - @SuppressWarnings("unchecked") - public List getScopesSupported() { - return (List) securityConf.get("scopes_supported"); - } - - @SuppressWarnings("unchecked") - public List getResponseTypesSupported() { - return (List) securityConf.get("response_types_supported"); - } - } - @Override protected boolean interceptInternodeRequest(HttpRequest httpRequest, HttpContext httpContext) { if (httpContext instanceof HttpClientContext) { @@ -731,4 +659,17 @@ protected boolean interceptInternodeRequest(Request request) { } return false; } + + public List getIssuerConfigs() { + return issuerConfigs; + } + + /** + * Lookup issuer config by its name + * @param name name property of config + * @return issuer config object or null if not found + */ + public JWTIssuerConfig getIssuerConfigByName(String name) { + return issuerConfigs.stream().filter(ic -> name.equals(ic.getName())).findAny().orElse(null); + } } diff --git a/solr/core/src/java/org/apache/solr/security/JWTIssuerConfig.java b/solr/core/src/java/org/apache/solr/security/JWTIssuerConfig.java new file mode 100644 index 000000000000..65ff808e9446 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/security/JWTIssuerConfig.java @@ -0,0 +1,438 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.security; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.lang.invoke.MethodHandles; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.apache.solr.common.SolrException; +import org.apache.solr.common.util.Utils; +import org.jose4j.jwk.HttpsJwks; +import org.jose4j.jwk.JsonWebKey; +import org.jose4j.jwk.JsonWebKeySet; +import org.jose4j.lang.JoseException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Holds information about an IdP (issuer), such as issuer ID, JWK url(s), keys etc + */ +public class JWTIssuerConfig { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + static final String PARAM_ISS_NAME = "name"; + @Deprecated // Remove this option at some point + static final String PARAM_JWK_URL = "jwkUrl"; + static final String PARAM_JWKS_URL = "jwksUrl"; + static final String PARAM_JWK = "jwk"; + static final String PARAM_ISSUER = "iss"; + static final String PARAM_AUDIENCE = "aud"; + static final String PARAM_WELL_KNOWN_URL = "wellKnownUrl"; + static final String PARAM_AUTHORIZATION_ENDPOINT = "authorizationEndpoint"; + static final String PARAM_CLIENT_ID = "clientId"; + + private static HttpsJwksFactory httpsJwksFactory = + new HttpsJwksFactory(3600, 5000); + private String iss; + private String aud; + private JsonWebKeySet jsonWebKeySet; + private String name; + private List jwksUrl; + private List httpsJwks; + private String wellKnownUrl; + private WellKnownDiscoveryConfig wellKnownDiscoveryConfig; + private String clientId; + private String authorizationEndpoint; + + /** + * Create config for further configuration with setters, builder style. + * Once all values are set, call {@link #init()} before further use + * + * @param name a unique name for this issuer + */ + public JWTIssuerConfig(String name) { + this.name = name; + } + + /** + * Initialize issuer config from a generic configuration map + * + * @param configMap map of configuration keys anv values + */ + public JWTIssuerConfig(Map configMap) { + parseConfigMap(configMap); + } + + /** + * Call this to validate and initialize an object which is populated with setters. + * Init will fetch wellKnownUrl if relevant + * @throws SolrException if issuer is missing + */ + public void init() { + if (!isValid()) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Configuration is not valid"); + } + if (wellKnownUrl != null) { + wellKnownDiscoveryConfig = fetchWellKnown(wellKnownUrl); + if (iss == null) { + iss = wellKnownDiscoveryConfig.getIssuer(); + } + if (jwksUrl == null) { + jwksUrl = Collections.singletonList(wellKnownDiscoveryConfig.getJwksUrl()); + } + if (authorizationEndpoint == null) { + authorizationEndpoint = wellKnownDiscoveryConfig.getAuthorizationEndpoint(); + } + } + if (iss == null && usesHttpsJwk() && !JWTAuthPlugin.PRIMARY_ISSUER.equals(name)) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Missing required config 'iss' for issuer " + getName()); + } + } + + /** + * Parses configuration for one IssuerConfig and sets all variables found + * @throws SolrException if unknown parameter names found in config + */ + protected void parseConfigMap(Map configMap) { + HashMap conf = new HashMap<>(configMap); // Clone + setName((String) conf.get(PARAM_ISS_NAME)); + setWellKnownUrl((String) conf.get(PARAM_WELL_KNOWN_URL)); + setIss((String) conf.get(PARAM_ISSUER)); + setClientId((String) conf.get(PARAM_CLIENT_ID)); + setAud((String) conf.get(PARAM_AUDIENCE)); + if (conf.get(PARAM_JWK_URL) != null) { + log.warn("Configuration uses deprecated key {}. Please use {} instead", PARAM_JWK_URL, PARAM_JWKS_URL); + } + Object confJwksUrl = conf.get(PARAM_JWKS_URL) != null ? conf.get(PARAM_JWKS_URL) : conf.get(PARAM_JWK_URL); + setJwksUrl(confJwksUrl); + setJsonWebKeySet(conf.get(PARAM_JWK)); + setAuthorizationEndpoint((String) conf.get(PARAM_AUTHORIZATION_ENDPOINT)); + + conf.remove(PARAM_WELL_KNOWN_URL); + conf.remove(PARAM_ISSUER); + conf.remove(PARAM_ISS_NAME); + conf.remove(PARAM_CLIENT_ID); + conf.remove(PARAM_AUDIENCE); + conf.remove(PARAM_JWKS_URL); + conf.remove(PARAM_JWK_URL); + conf.remove(PARAM_JWK); + conf.remove(PARAM_AUTHORIZATION_ENDPOINT); + + if (!conf.isEmpty()) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown configuration key " + conf.keySet() + " for issuer " + name); + } + } + + /** + * Setter that takes a jwk config object, parses it into a {@link JsonWebKeySet} and sets it + * @param jwksObject the config object to parse + */ + @SuppressWarnings("unchecked") + protected void setJsonWebKeySet(Object jwksObject) { + try { + if (jwksObject != null) { + jsonWebKeySet = parseJwkSet((Map) jwksObject); + } + } catch (JoseException e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Failed parsing parameter 'jwk' for issuer " + getName(), e); + } + } + + @SuppressWarnings("unchecked") + protected static JsonWebKeySet parseJwkSet(Map jwkObj) throws JoseException { + JsonWebKeySet webKeySet = new JsonWebKeySet(); + if (jwkObj.containsKey("keys")) { + List jwkList = (List) jwkObj.get("keys"); + for (Object jwkO : jwkList) { + webKeySet.addJsonWebKey(JsonWebKey.Factory.newJwk((Map) jwkO)); + } + } else { + webKeySet = new JsonWebKeySet(JsonWebKey.Factory.newJwk(jwkObj)); + } + return webKeySet; + } + + private WellKnownDiscoveryConfig fetchWellKnown(String wellKnownUrl) { + return WellKnownDiscoveryConfig.parse(wellKnownUrl); + } + + public String getIss() { + return iss; + } + + public JWTIssuerConfig setIss(String iss) { + this.iss = iss; + return this; + } + + public String getName() { + return name; + } + + public JWTIssuerConfig setName(String name) { + this.name = name; + return this; + } + + public String getWellKnownUrl() { + return wellKnownUrl; + } + + public JWTIssuerConfig setWellKnownUrl(String wellKnownUrl) { + this.wellKnownUrl = wellKnownUrl; + return this; + } + + public List getJwksUrls() { + return jwksUrl; + } + + public JWTIssuerConfig setJwksUrl(List jwksUrl) { + this.jwksUrl = jwksUrl; + return this; + } + + /** + * Setter that converts from String or List into a list + * @param jwksUrlListOrString object that should be either string or list + * @return this for builder pattern + * @throws SolrException if wrong type + */ + @SuppressWarnings("unchecked") + public JWTIssuerConfig setJwksUrl(Object jwksUrlListOrString) { + if (jwksUrlListOrString instanceof String) + this.jwksUrl = Collections.singletonList((String) jwksUrlListOrString); + else if (jwksUrlListOrString instanceof List) + this.jwksUrl = (List) jwksUrlListOrString; + else if (jwksUrlListOrString != null) + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parameter " + PARAM_JWKS_URL + " must be either List or String"); + return this; + } + + public List getHttpsJwks() { + if (httpsJwks == null) { + httpsJwks = httpsJwksFactory.createList(getJwksUrls()); + } + return httpsJwks; + } + + /** + * Set the factory to use when creating HttpsJwks objects + * @param httpsJwksFactory factory with custom settings + */ + public static void setHttpsJwksFactory(HttpsJwksFactory httpsJwksFactory) { + JWTIssuerConfig.httpsJwksFactory = httpsJwksFactory; + } + + public JsonWebKeySet getJsonWebKeySet() { + return jsonWebKeySet; + } + + public JWTIssuerConfig setJsonWebKeySet(JsonWebKeySet jsonWebKeySet) { + this.jsonWebKeySet = jsonWebKeySet; + return this; + } + + /** + * Check if the issuer is backed by HttpsJwk url(s) + * @return true if keys are fetched over https + */ + public boolean usesHttpsJwk() { + return getJwksUrls() != null && !getJwksUrls().isEmpty(); + } + + public WellKnownDiscoveryConfig getWellKnownDiscoveryConfig() { + return wellKnownDiscoveryConfig; + } + + public String getAud() { + return aud; + } + + public JWTIssuerConfig setAud(String aud) { + this.aud = aud; + return this; + } + + public String getClientId() { + return clientId; + } + + public JWTIssuerConfig setClientId(String clientId) { + this.clientId = clientId; + return this; + } + + public String getAuthorizationEndpoint() { + return authorizationEndpoint; + } + + public JWTIssuerConfig setAuthorizationEndpoint(String authorizationEndpoint) { + this.authorizationEndpoint = authorizationEndpoint; + return this; + } + + public Map asConfig() { + HashMap config = new HashMap<>(); + putIfNotNull(config, PARAM_ISS_NAME, name); + putIfNotNull(config, PARAM_ISSUER, iss); + putIfNotNull(config, PARAM_AUDIENCE, aud); + putIfNotNull(config, PARAM_JWKS_URL, jwksUrl); + putIfNotNull(config, PARAM_WELL_KNOWN_URL, wellKnownUrl); + putIfNotNull(config, PARAM_CLIENT_ID, clientId); + putIfNotNull(config, PARAM_AUTHORIZATION_ENDPOINT, authorizationEndpoint); + if (jsonWebKeySet != null) { + putIfNotNull(config, PARAM_JWK, jsonWebKeySet.getJsonWebKeys()); + } + return config; + } + + private void putIfNotNull(HashMap config, String paramName, Object value) { + if (value != null) { + config.put(paramName, value); + } + } + + /** + * Validates that this config has a name and either jwksUrl, wellkKownUrl or jwk + * @return true if a configuration is found and is valid, otherwise false + * @throws SolrException if configuration is present but wrong + */ + public boolean isValid() { + int jwkConfigured = wellKnownUrl != null ? 1 : 0; + jwkConfigured += jwksUrl != null ? 2 : 0; + jwkConfigured += jsonWebKeySet != null ? 2 : 0; + if (jwkConfigured > 3) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "JWTAuthPlugin needs to configure exactly one of " + + PARAM_WELL_KNOWN_URL + ", " + PARAM_JWKS_URL + " and " + PARAM_JWK); + } + if (jwkConfigured > 0 && name == null) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, + "Parameter 'name' is required for issuer configurations"); + } + return jwkConfigured > 0; + } + + /** + * + */ + static class HttpsJwksFactory { + private final long jwkCacheDuration; + private final long refreshReprieveThreshold; + + public HttpsJwksFactory(long jwkCacheDuration, long refreshReprieveThreshold) { + this.jwkCacheDuration = jwkCacheDuration; + this.refreshReprieveThreshold = refreshReprieveThreshold; + } + + private HttpsJwks create(String url) { + try { + URL jwksUrl = new URL(url); + if (!"https".equalsIgnoreCase(jwksUrl.getProtocol())) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, PARAM_JWKS_URL + " must use HTTPS"); + } + } catch (MalformedURLException e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Url " + url + " configured in " + PARAM_JWKS_URL + " is not a valid URL"); + } + HttpsJwks httpsJkws = new HttpsJwks(url); + httpsJkws.setDefaultCacheDuration(jwkCacheDuration); + httpsJkws.setRefreshReprieveThreshold(refreshReprieveThreshold); + return httpsJkws; + } + + public List createList(List jwkUrls) { + return jwkUrls.stream().map(this::create).collect(Collectors.toList()); + } + } + + /** + * Config object for a OpenId Connect well-known config + * Typically exposed through /.well-known/openid-configuration endpoint + */ + public static class WellKnownDiscoveryConfig { + private Map securityConf; + + WellKnownDiscoveryConfig(Map securityConf) { + this.securityConf = securityConf; + } + + public static WellKnownDiscoveryConfig parse(String urlString) { + try { + URL url = new URL(urlString); + if (!Arrays.asList("https", "file").contains(url.getProtocol())) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Well-known config URL must be HTTPS or file"); + } + return parse(url.openStream()); + } catch (MalformedURLException e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Well-known config URL " + urlString + " is malformed", e); + } catch (IOException e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Well-known config could not be read from url " + urlString, e); + } + } + + public static WellKnownDiscoveryConfig parse(String json, Charset charset) { + return parse(new ByteArrayInputStream(json.getBytes(charset))); + } + + @SuppressWarnings("unchecked") + public static WellKnownDiscoveryConfig parse(InputStream configStream) { + return new WellKnownDiscoveryConfig((Map) Utils.fromJSON(configStream)); + } + + + public String getJwksUrl() { + return (String) securityConf.get("jwks_uri"); + } + + public String getIssuer() { + return (String) securityConf.get("issuer"); + } + + public String getAuthorizationEndpoint() { + return (String) securityConf.get("authorization_endpoint"); + } + + public String getUserInfoEndpoint() { + return (String) securityConf.get("userinfo_endpoint"); + } + + public String getTokenEndpoint() { + return (String) securityConf.get("token_endpoint"); + } + + @SuppressWarnings("unchecked") + public List getScopesSupported() { + return (List) securityConf.get("scopes_supported"); + } + + @SuppressWarnings("unchecked") + public List getResponseTypesSupported() { + return (List) securityConf.get("response_types_supported"); + } + } +} diff --git a/solr/core/src/java/org/apache/solr/security/JWTVerificationkeyResolver.java b/solr/core/src/java/org/apache/solr/security/JWTVerificationkeyResolver.java new file mode 100644 index 000000000000..50fb4ad052b3 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/security/JWTVerificationkeyResolver.java @@ -0,0 +1,153 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.security; + +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.security.Key; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.solr.common.SolrException; +import org.jose4j.jwk.HttpsJwks; +import org.jose4j.jwk.JsonWebKey; +import org.jose4j.jwk.VerificationJwkSelector; +import org.jose4j.jws.JsonWebSignature; +import org.jose4j.jwt.JwtClaims; +import org.jose4j.jwt.MalformedClaimException; +import org.jose4j.jwt.consumer.InvalidJwtException; +import org.jose4j.jwx.JsonWebStructure; +import org.jose4j.keys.resolvers.VerificationKeyResolver; +import org.jose4j.lang.JoseException; +import org.jose4j.lang.UnresolvableKeyException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Resolves jws signature verification keys from a set of {@link JWTIssuerConfig} objects, which + * may represent any valid configuration in Solr's security.json, i.e. static list of JWKs + * or keys retrieved from HTTPs JWK endpoints. + * + * This implementation maintains a map of issuers, each with its own list of {@link JsonWebKey}, + * and resolves correct key from correct issuer similar to HttpsJwksVerificationKeyResolver. + * If issuer claim is not required, we will select the first IssuerConfig if there is exactly one such config. + * + * If a key is not found, and issuer is backed by HTTPsJWKs, we attempt one cache refresh before failing. + */ +public class JWTVerificationkeyResolver implements VerificationKeyResolver { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private VerificationJwkSelector verificationJwkSelector = new VerificationJwkSelector(); + + private Map issuerConfigs = new HashMap<>(); + private final boolean requireIssuer; + + /** + * Resolves key from a JWKs from one or more IssuerConfigs + * @param issuerConfigs Collection of configuration objects for the issuer(s) + * @param requireIssuer if true, will require 'iss' claim on jws + */ + public JWTVerificationkeyResolver(Collection issuerConfigs, boolean requireIssuer) { + this.requireIssuer = requireIssuer; + issuerConfigs.forEach(ic -> { + this.issuerConfigs.put(ic.getIss(), ic); + }); + } + + @Override + public Key resolveKey(JsonWebSignature jws, List nestingContext) throws UnresolvableKeyException { + JsonWebKey theChosenOne; + List jsonWebKeys = new ArrayList<>(); + + String keysSource = "N/A"; + try { + String tokenIssuer = JwtClaims.parse(jws.getUnverifiedPayload()).getIssuer(); + JWTIssuerConfig issuerConfig; + if (tokenIssuer == null) { + if (requireIssuer) { + throw new UnresolvableKeyException("Token does not contain required issuer claim"); + } else if (issuerConfigs.size() == 1) { + issuerConfig = issuerConfigs.values().iterator().next(); + } else { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, + "Signature verifiction not supported for multiple issuers without 'iss' claim in token."); + } + } else { + issuerConfig = issuerConfigs.get(tokenIssuer); + if (issuerConfig == null) { + if (issuerConfigs.size() > 1) { + throw new UnresolvableKeyException("No issuers configured for iss='" + tokenIssuer + "', cannot validate signature"); + } else if (issuerConfigs.size() == 1) { + issuerConfig = issuerConfigs.values().iterator().next(); + log.debug("No issuer matching token's iss claim, but exactly one configured, selecting that one"); + } else { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, + "Signature verifiction failed due to no configured issuer with id " + tokenIssuer); + } + } + } + + // Add all keys into a master list + if (issuerConfig.usesHttpsJwk()) { + keysSource = "[" + String.join(", ", issuerConfig.getJwksUrls()) + "]"; + for (HttpsJwks hjwks : issuerConfig.getHttpsJwks()) { + jsonWebKeys.addAll(hjwks.getJsonWebKeys()); + } + } else { + keysSource = "static list of keys in security.json"; + jsonWebKeys.addAll(issuerConfig.getJsonWebKeySet().getJsonWebKeys()); + } + + theChosenOne = verificationJwkSelector.select(jws, jsonWebKeys); + if (theChosenOne == null && issuerConfig.usesHttpsJwk()) { + log.debug("Refreshing JWKs from all {} locations, as no suitable verification key for JWS w/ header {} was found in {}", + issuerConfig.getHttpsJwks().size(), jws.getHeaders().getFullHeaderAsJsonString(), jsonWebKeys); + + jsonWebKeys.clear(); + for (HttpsJwks hjwks : issuerConfig.getHttpsJwks()) { + hjwks.refresh(); + jsonWebKeys.addAll(hjwks.getJsonWebKeys()); + } + theChosenOne = verificationJwkSelector.select(jws, jsonWebKeys); + } + } catch (JoseException | IOException | InvalidJwtException | MalformedClaimException e) { + StringBuilder sb = new StringBuilder(); + sb.append("Unable to find a suitable verification key for JWS w/ header ").append(jws.getHeaders().getFullHeaderAsJsonString()); + sb.append(" due to an unexpected exception (").append(e).append(") while obtaining or using keys from source "); + sb.append(keysSource); + throw new UnresolvableKeyException(sb.toString(), e); + } + + if (theChosenOne == null) { + StringBuilder sb = new StringBuilder(); + sb.append("Unable to find a suitable verification key for JWS w/ header ").append(jws.getHeaders().getFullHeaderAsJsonString()); + sb.append(" from ").append(jsonWebKeys.size()).append(" keys from source ").append(keysSource); + throw new UnresolvableKeyException(sb.toString()); + } + + return theChosenOne.getKey(); + } + + Set getIssuerConfigs() { + return new HashSet<>(issuerConfigs.values()); + } +} diff --git a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java index 16b39a414354..79b4d29f9d45 100644 --- a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java +++ b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java @@ -39,7 +39,6 @@ enum Name { CORE_READ_PERM("core-admin-read", null), CORE_EDIT_PERM("core-admin-edit", null), READ_PERM("read", "*"), - CUSTOM_PERM("custom-op", null),//custom operation , user-defined UPDATE_PERM("update", "*"), CONFIG_EDIT_PERM("config-edit", unmodifiableSet(new HashSet<>(asList("*", null)))), CONFIG_READ_PERM("config-read", "*"), @@ -52,8 +51,6 @@ enum Name { AUTOSCALING_WRITE_PERM("autoscaling-write", null), AUTOSCALING_HISTORY_READ_PERM("autoscaling-history-read", null), METRICS_HISTORY_READ_PERM("metrics-history-read", null), - BLOB_READ("blob-read", null), - BLOB_WRITE("blob-write", null), ALL("all", unmodifiableSet(new HashSet<>(asList("*", null)))) ; final String name; diff --git a/solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPlugin.java b/solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPlugin.java index e574179c6088..8fc6d2462e99 100644 --- a/solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPlugin.java @@ -84,16 +84,24 @@ public List get(Object key) { @Override public AuthorizationResponse authorize(AuthorizationContext context) { List collectionRequests = context.getCollectionRequests(); + log.debug("Attempting to authorize request to [{}] of type: [{}], associated with collections [{}]", + context.getResource(), context.getRequestType(), collectionRequests); + if (context.getRequestType() == AuthorizationContext.RequestType.ADMIN) { + log.debug("Authorizing an ADMIN request, checking admin permissions"); MatchStatus flag = checkCollPerm(mapping.get(null), context); return flag.rsp; } for (AuthorizationContext.CollectionRequest collreq : collectionRequests) { //check permissions for each collection + log.debug("Authorizing collection-aware request, checking perms applicable to specific collection [{}]", + collreq.collectionName); MatchStatus flag = checkCollPerm(mapping.get(collreq.collectionName), context); if (flag != MatchStatus.NO_PERMISSIONS_FOUND) return flag.rsp; } + + log.debug("Authorizing collection-aware request, checking perms applicable to all (*) collections"); //check wildcard (all=*) permissions. MatchStatus flag = checkCollPerm(mapping.get("*"), context); return flag.rsp; @@ -103,6 +111,14 @@ private MatchStatus checkCollPerm(Map> pathVsPerms, AuthorizationContext context) { if (pathVsPerms == null) return MatchStatus.NO_PERMISSIONS_FOUND; + if (log.isTraceEnabled()) { + log.trace("Following perms are associated with collection"); + for (String pathKey : pathVsPerms.keySet()) { + final List permsAssociatedWithPath = pathVsPerms.get(pathKey); + log.trace("Path: [{}], Perms: [{}]", pathKey, permsAssociatedWithPath); + } + } + String path = context.getResource(); MatchStatus flag = checkPathPerm(pathVsPerms.get(path), context); if (flag != MatchStatus.NO_PERMISSIONS_FOUND) return flag; @@ -110,14 +126,18 @@ private MatchStatus checkCollPerm(Map> pathVsPerms, } private MatchStatus checkPathPerm(List permissions, AuthorizationContext context) { - if (permissions == null || permissions.isEmpty()) return MatchStatus.NO_PERMISSIONS_FOUND; + if (permissions == null || permissions.isEmpty()) { + return MatchStatus.NO_PERMISSIONS_FOUND; + } Principal principal = context.getUserPrincipal(); + log.trace("Following perms are associated with this collection and path: [{}]", permissions); final Permission governingPermission = findFirstGoverningPermission(permissions, context); if (governingPermission == null) { - log.debug("No permissions configured for the resource {} . So allowed to access", context.getResource()); + log.debug("No perms configured for the resource {} . So allowed to access", context.getResource()); return MatchStatus.NO_PERMISSIONS_FOUND; } + log.debug("Found perm [{}] to govern resource [{}]", governingPermission, context.getResource()); return determineIfPermissionPermitsPrincipal(principal, governingPermission); } @@ -132,6 +152,7 @@ private Permission findFirstGoverningPermission(List permissions, Au } private boolean permissionAppliesToRequest(Permission permission, AuthorizationContext context) { + log.trace("Testing whether permission [{}] applies to request [{}]", permission, context.getResource()); if (PermissionNameProvider.values.containsKey(permission.name)) { return predefinedPermissionAppliesToRequest(permission, context); } else { @@ -140,50 +161,66 @@ private boolean permissionAppliesToRequest(Permission permission, AuthorizationC } private boolean predefinedPermissionAppliesToRequest(Permission predefinedPermission, AuthorizationContext context) { + log.trace("Permission [{}] is a predefined perm", predefinedPermission); if (predefinedPermission.wellknownName == PermissionNameProvider.Name.ALL) { + log.trace("'ALL' perm applies to all requests; perm applies."); return true; //'ALL' applies to everything! } else if (! (context.getHandler() instanceof PermissionNameProvider)) { + log.trace("Request handler [{}] is not a PermissionNameProvider, perm doesnt apply", context.getHandler()); return false; // We're not 'ALL', and the handler isn't associated with any other predefined permissions } else { PermissionNameProvider handler = (PermissionNameProvider) context.getHandler(); PermissionNameProvider.Name permissionName = handler.getPermissionName(context); - return permissionName != null && predefinedPermission.name.equals(permissionName.name); + boolean applies = permissionName != null && predefinedPermission.name.equals(permissionName.name); + log.trace("Request handler [{}] is associated with predefined perm [{}]? {}", + handler, predefinedPermission.name, applies); + return applies; } } private boolean customPermissionAppliesToRequest(Permission customPermission, AuthorizationContext context) { + log.trace("Permission [{}] is a custom permission", customPermission); if (customPermission.method != null && !customPermission.method.contains(context.getHttpMethod())) { + log.trace("Custom permission requires method [{}] but request had method [{}]; permission doesn't apply", + customPermission.method, context.getHttpMethod()); //this permissions HTTP method does not match this rule. try other rules return false; } if (customPermission.params != null) { for (Map.Entry> e : customPermission.params.entrySet()) { String[] paramVal = context.getParams().getParams(e.getKey()); - if(!e.getValue().apply(paramVal)) return false; + if(!e.getValue().apply(paramVal)) { + log.trace("Request has param [{}] which is incompatible with custom perm [{}]; perm doesnt apply", + e.getKey(), customPermission); + return false; + } } } + log.trace("Perm [{}] matches method and params for request; permission applies", customPermission); return true; } private MatchStatus determineIfPermissionPermitsPrincipal(Principal principal, Permission governingPermission) { if (governingPermission.role == null) { - //no role is assigned permission.That means everybody is allowed to access + log.debug("Governing permission [{}] has no role; permitting access", governingPermission); return MatchStatus.PERMITTED; } if (principal == null) { - log.info("request has come without principal. failed permission {} ", governingPermission); - //this resource needs a principal but the request has come without - //any credential. + log.debug("Governing permission [{}] has role, but request principal cannot be identified; forbidding access", governingPermission); return MatchStatus.USER_REQUIRED; } else if (governingPermission.role.contains("*")) { + log.debug("Governing permission [{}] allows all roles; permitting access", governingPermission); return MatchStatus.PERMITTED; } + Set userRoles = usersVsRoles.get(principal.getName()); for (String role : governingPermission.role) { - Set userRoles = usersVsRoles.get(principal.getName()); - if (userRoles != null && userRoles.contains(role)) return MatchStatus.PERMITTED; + if (userRoles != null && userRoles.contains(role)) { + log.debug("Governing permission [{}] allows access to role [{}]; permitting access", governingPermission, role); + return MatchStatus.PERMITTED; + } } log.info("This resource is configured to have a permission {}, The principal {} does not have the right role ", governingPermission, principal); return MatchStatus.FORBIDDEN; diff --git a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java index 3307dc0ed69a..adfbc5da4532 100644 --- a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java +++ b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java @@ -141,6 +141,8 @@ public class HttpSolrCall { public static final String ORIGINAL_USER_PRINCIPAL_HEADER = "originalUserPrincipal"; + public static final String INTERNAL_REQUEST_COUNT = "_forwardedCount"; + public static final Random random; static { // We try to make things reproducible in the context of our tests by initializing the random instance @@ -482,7 +484,7 @@ protected void extractHandlerFromURLPath(SolrRequestParsers parser) throws Excep } } - protected void extractRemotePath(String collectionName, String origCorename) throws UnsupportedEncodingException, KeeperException, InterruptedException { + protected void extractRemotePath(String collectionName, String origCorename) throws UnsupportedEncodingException, KeeperException, InterruptedException, SolrException { assert core == null; coreUrl = getRemoteCoreUrl(collectionName, origCorename); // don't proxy for internal update requests @@ -508,31 +510,45 @@ Action authorize() throws IOException { AuthorizationContext context = getAuthCtx(); log.debug("AuthorizationContext : {}", context); AuthorizationResponse authResponse = cores.getAuthorizationPlugin().authorize(context); - if (authResponse.statusCode == AuthorizationResponse.PROMPT.statusCode) { + int statusCode = authResponse.statusCode; + + if (statusCode == AuthorizationResponse.PROMPT.statusCode) { Map headers = (Map) getReq().getAttribute(AuthenticationPlugin.class.getName()); if (headers != null) { for (Map.Entry e : headers.entrySet()) response.setHeader(e.getKey(), e.getValue()); } log.debug("USER_REQUIRED "+req.getHeader("Authorization")+" "+ req.getUserPrincipal()); + sendError(statusCode, + "Authentication failed, Response code: " + statusCode); if (shouldAudit(EventType.REJECTED)) { cores.getAuditLoggerPlugin().doAudit(new AuditEvent(EventType.REJECTED, req, context)); } + return RETURN; } - if (!(authResponse.statusCode == HttpStatus.SC_ACCEPTED) && !(authResponse.statusCode == HttpStatus.SC_OK)) { - log.info("USER_REQUIRED auth header {} context : {} ", req.getHeader("Authorization"), context); - sendError(authResponse.statusCode, - "Unauthorized request, Response code: " + authResponse.statusCode); + if (statusCode == AuthorizationResponse.FORBIDDEN.statusCode) { + log.debug("UNAUTHORIZED auth header {} context : {}, msg: {}", req.getHeader("Authorization"), context, authResponse.getMessage()); + sendError(statusCode, + "Unauthorized request, Response code: " + statusCode); if (shouldAudit(EventType.UNAUTHORIZED)) { cores.getAuditLoggerPlugin().doAudit(new AuditEvent(EventType.UNAUTHORIZED, req, context)); } return RETURN; } + if (!(statusCode == HttpStatus.SC_ACCEPTED) && !(statusCode == HttpStatus.SC_OK)) { + log.warn("ERROR {} during authentication: {}", statusCode, authResponse.getMessage()); + sendError(statusCode, + "ERROR during authorization, Response code: " + statusCode); + if (shouldAudit(EventType.ERROR)) { + cores.getAuditLoggerPlugin().doAudit(new AuditEvent(EventType.ERROR, req, context)); + } + return RETURN; + } if (shouldAudit(EventType.AUTHORIZED)) { cores.getAuditLoggerPlugin().doAudit(new AuditEvent(EventType.AUTHORIZED, req, context)); } return null; } - + /** * This method processes the request. */ @@ -680,12 +696,19 @@ void destroy() { } } + private String getQuerySting() { + int internalRequestCount = queryParams.getInt(INTERNAL_REQUEST_COUNT, 0); + ModifiableSolrParams updatedQueryParams = new ModifiableSolrParams(queryParams); + updatedQueryParams.set(INTERNAL_REQUEST_COUNT, internalRequestCount + 1); + return updatedQueryParams.toQueryString(); + } + //TODO using Http2Client private void remoteQuery(String coreUrl, HttpServletResponse resp) throws IOException { HttpRequestBase method; HttpEntity httpEntity = null; try { - String urlstr = coreUrl + queryParams.toQueryString(); + String urlstr = coreUrl + getQuerySting(); boolean isPostOrPutRequest = "POST".equals(req.getMethod()) || "PUT".equals(req.getMethod()); if ("GET".equals(req.getMethod())) { @@ -1040,13 +1063,15 @@ private void getSlicesForCollections(ClusterState clusterState, } } - protected String getRemoteCoreUrl(String collectionName, String origCorename) { + protected String getRemoteCoreUrl(String collectionName, String origCorename) throws SolrException { ClusterState clusterState = cores.getZkController().getClusterState(); final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName); Slice[] slices = (docCollection != null) ? docCollection.getActiveSlicesArr() : null; List activeSlices = new ArrayList<>(); boolean byCoreName = false; + int totalReplicas = 0; + if (slices == null) { byCoreName = true; activeSlices = new ArrayList<>(); @@ -1060,6 +1085,9 @@ protected String getRemoteCoreUrl(String collectionName, String origCorename) { } } + for (Slice s: activeSlices) { + totalReplicas += s.getReplicas().size(); + } if (activeSlices.isEmpty()) { return null; } @@ -1073,7 +1101,13 @@ protected String getRemoteCoreUrl(String collectionName, String origCorename) { String coreUrl = getCoreUrl(collectionName, origCorename, clusterState, activeSlices, byCoreName, true); + // Avoid getting into a recursive loop of requests being forwarded by + // stopping forwarding and erroring out after (totalReplicas) forwards if (coreUrl == null) { + if (queryParams.getInt(INTERNAL_REQUEST_COUNT, 0) > totalReplicas){ + throw new SolrException(SolrException.ErrorCode.INVALID_STATE, + "No active replicas found for collection: " + collectionName); + } coreUrl = getCoreUrl(collectionName, origCorename, clusterState, activeSlices, byCoreName, false); } diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java index 2664b66794ad..8c5847d739fd 100644 --- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java +++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java @@ -78,15 +78,16 @@ import org.apache.solr.metrics.MetricsMap; import org.apache.solr.metrics.OperatingSystemMetricSet; import org.apache.solr.metrics.SolrMetricManager; +import org.apache.solr.metrics.SolrMetricProducer; import org.apache.solr.security.AuditEvent; import org.apache.solr.security.AuditEvent.EventType; import org.apache.solr.security.AuthenticationPlugin; import org.apache.solr.security.PKIAuthenticationPlugin; import org.apache.solr.security.PublicKeyHandler; import org.apache.solr.util.SolrFileCleaningTracker; +import org.apache.solr.util.tracing.GlobalTracer; import org.apache.solr.util.StartupLoggingUtils; import org.apache.solr.util.configuration.SSLConfigurationsFactory; -import org.apache.solr.util.tracing.GlobalTracer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -108,7 +109,7 @@ public class SolrDispatchFilter extends BaseSolrFilter { private boolean isV2Enabled = !"true".equals(System.getProperty("disable.v2.api", "false")); - private final String metricTag = Integer.toHexString(hashCode()); + private final String metricTag = SolrMetricProducer.getUniqueMetricTag(this, null); private SolrMetricManager metricManager; private String registryName; private volatile boolean closeOnDestroy = true; @@ -180,7 +181,6 @@ public void init(FilterConfig config) throws ServletException final Path solrHomePath = solrHome == null ? SolrResourceLoader.locateSolrHome() : Paths.get(solrHome); coresInit = createCoreContainer(solrHomePath, extraProperties); SolrResourceLoader.ensureUserFilesDataDir(solrHomePath); - SolrResourceLoader.ensureBlobsDir(solrHomePath); this.httpClient = coresInit.getUpdateShardHandler().getDefaultHttpClient(); setupJvmMetrics(coresInit); log.debug("user.dir=" + System.getProperty("user.dir")); @@ -628,8 +628,8 @@ public ServletInputStream getInputStream() throws IOException { public void close() { // even though we skip closes, we let local tests know not to close so that a full understanding can take // place - assert !Thread.currentThread().getStackTrace()[2].getClassName().matches( - "org\\.apache\\.(?:solr|lucene).*") : CLOSE_STREAM_MSG; + assert Thread.currentThread().getStackTrace()[2].getClassName().matches( + "org\\.apache\\.(?:solr|lucene).*") ? false : true : CLOSE_STREAM_MSG; this.stream = ClosedServletInputStream.CLOSED_SERVLET_INPUT_STREAM; } }; @@ -663,8 +663,9 @@ public ServletOutputStream getOutputStream() throws IOException { public void close() { // even though we skip closes, we let local tests know not to close so that a full understanding can take // place - assert !Thread.currentThread().getStackTrace()[2].getClassName().matches( - "org\\.apache\\.(?:solr|lucene).*") : CLOSE_STREAM_MSG; + assert Thread.currentThread().getStackTrace()[2].getClassName().matches( + "org\\.apache\\.(?:solr|lucene).*") ? false + : true : CLOSE_STREAM_MSG; stream = ClosedServletOutputStream.CLOSED_SERVLET_OUTPUT_STREAM; } }; diff --git a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java index 6d9e9ea53e0f..d7654b60c566 100644 --- a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java +++ b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java @@ -20,11 +20,10 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; -import com.codahale.metrics.MetricRegistry; import org.apache.solr.core.SolrInfoBean; import org.apache.solr.metrics.MetricsMap; -import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.search.SolrCacheBase; /** @@ -54,17 +53,13 @@ public class Metrics extends SolrCacheBase implements SolrInfoBean, SolrMetricPr public AtomicLong shardBuffercacheLost = new AtomicLong(0); private MetricsMap metricsMap; - private MetricRegistry registry; private Set metricNames = ConcurrentHashMap.newKeySet(); - private SolrMetricManager metricManager; - private String registryName; + private SolrMetricsContext solrMetricsContext; private long previous = System.nanoTime(); @Override - public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) { - this.metricManager = manager; - this.registryName = registryName; - registry = manager.registry(registryName); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + solrMetricsContext = parentContext.getChildContext(this); metricsMap = new MetricsMap((detailed, map) -> { long now = System.nanoTime(); long delta = Math.max(now - previous, 1); @@ -108,7 +103,7 @@ public void initializeMetrics(SolrMetricManager manager, String registryName, St previous = now; }); - manager.registerGauge(this, registryName, metricsMap, tag, true, getName(), getCategory().toString(), scope); + solrMetricsContext.gauge(this, metricsMap, true, getName(), getCategory().toString(), scope); } private float getPerSecond(long value, double seconds) { @@ -133,8 +128,7 @@ public Set getMetricNames() { } @Override - public MetricRegistry getMetricRegistry() { - return registry; + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; } - } diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java index 2bf60cbd3a36..0739edeb68ab 100644 --- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java +++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java @@ -32,8 +32,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.solr.core.SolrInfoBean; import org.apache.solr.metrics.MetricsMap; -import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,9 +51,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer { private final ConcurrentMap> cache; private final Set metricNames = ConcurrentHashMap.newKeySet(); - private MetricRegistry registry; - private SolrMetricManager metricManager; - private String registryName; + private SolrMetricsContext solrMetricsContext; public HdfsLocalityReporter() { cache = new ConcurrentHashMap<>(); @@ -89,17 +87,20 @@ public Set getMetricNames() { @Override public MetricRegistry getMetricRegistry() { - return registry; + return solrMetricsContext != null ? solrMetricsContext.getMetricRegistry() : null; + } + + @Override + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; } /** * Provide statistics on HDFS block locality, both in terms of bytes and block counts. */ @Override - public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) { - this.metricManager = manager; - this.registryName = registryName; - registry = manager.registry(registryName); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + solrMetricsContext = parentContext.getChildContext(this); MetricsMap metricsMap = new MetricsMap((detailed, map) -> { long totalBytes = 0; long localBytes = 0; @@ -149,7 +150,7 @@ public void initializeMetrics(SolrMetricManager manager, String registryName, St map.put(LOCALITY_BLOCKS_RATIO, localCount / (double) totalCount); } }); - manager.registerGauge(this, registryName, metricsMap, tag, true, "hdfsLocality", getCategory().toString(), scope); + solrMetricsContext.gauge(this, metricsMap, true, "hdfsLocality", getCategory().toString(), scope); } /** diff --git a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java index 2b621a8f4519..cacaed264cce 100644 --- a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java +++ b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java @@ -51,8 +51,8 @@ import org.apache.solr.common.util.NamedList; import org.apache.solr.core.SolrConfig.UpdateHandlerInfo; import org.apache.solr.core.SolrCore; -import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestInfo; @@ -96,8 +96,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState LongAdder numDocsPending = new LongAdder(); LongAdder numErrors = new LongAdder(); Meter numErrorsCumulative; - SolrMetricManager metricManager; - String registryName; + SolrMetricsContext solrMetricsContext; // tracks when auto-commit should occur protected final CommitTracker commitTracker; @@ -170,48 +169,51 @@ public DirectUpdateHandler2(SolrCore core, UpdateHandler updateHandler) { } @Override - public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) { - this.metricManager = manager; - this.registryName = registryName; - this.registry = manager.registry(registryName); - commitCommands = manager.meter(this, registryName, "commits", getCategory().toString(), scope); - manager.registerGauge(this, registryName, () -> commitTracker.getCommitCount(), tag, true, "autoCommits", getCategory().toString(), scope); - manager.registerGauge(this, registryName, () -> softCommitTracker.getCommitCount(), tag, true, "softAutoCommits", getCategory().toString(), scope); + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; + } + + @Override + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + solrMetricsContext = parentContext.getChildContext(this); + commitCommands = solrMetricsContext.meter(this, "commits", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> commitTracker.getCommitCount(), true, "autoCommits", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> softCommitTracker.getCommitCount(), true, "softAutoCommits", getCategory().toString(), scope); if (commitTracker.getDocsUpperBound() > 0) { - manager.registerGauge(this, registryName, () -> commitTracker.getDocsUpperBound(), tag, true, "autoCommitMaxDocs", + solrMetricsContext.gauge(this, () -> commitTracker.getDocsUpperBound(), true, "autoCommitMaxDocs", getCategory().toString(), scope); } if (commitTracker.getTimeUpperBound() > 0) { - manager.registerGauge(this, registryName, () -> "" + commitTracker.getTimeUpperBound() + "ms", tag, true, "autoCommitMaxTime", + solrMetricsContext.gauge(this, () -> "" + commitTracker.getTimeUpperBound() + "ms", true, "autoCommitMaxTime", getCategory().toString(), scope); } if (commitTracker.getTLogFileSizeUpperBound() > 0) { - manager.registerGauge(this, registryName, () -> commitTracker.getTLogFileSizeUpperBound(), tag, true, "autoCommitMaxSize", + solrMetricsContext.gauge(this, () -> commitTracker.getTLogFileSizeUpperBound(), true, "autoCommitMaxSize", getCategory().toString(), scope); } if (softCommitTracker.getDocsUpperBound() > 0) { - manager.registerGauge(this, registryName, () -> softCommitTracker.getDocsUpperBound(), tag, true, "softAutoCommitMaxDocs", + solrMetricsContext.gauge(this, () -> softCommitTracker.getDocsUpperBound(), true, "softAutoCommitMaxDocs", getCategory().toString(), scope); } if (softCommitTracker.getTimeUpperBound() > 0) { - manager.registerGauge(this, registryName, () -> "" + softCommitTracker.getTimeUpperBound() + "ms", tag, true, "softAutoCommitMaxTime", + solrMetricsContext.gauge(this, () -> "" + softCommitTracker.getTimeUpperBound() + "ms", true, "softAutoCommitMaxTime", getCategory().toString(), scope); } - optimizeCommands = manager.meter(this, registryName, "optimizes", getCategory().toString(), scope); - rollbackCommands = manager.meter(this, registryName, "rollbacks", getCategory().toString(), scope); - splitCommands = manager.meter(this, registryName, "splits", getCategory().toString(), scope); - mergeIndexesCommands = manager.meter(this, registryName, "merges", getCategory().toString(), scope); - expungeDeleteCommands = manager.meter(this, registryName, "expungeDeletes", getCategory().toString(), scope); - manager.registerGauge(this, registryName, () -> numDocsPending.longValue(), tag, true, "docsPending", getCategory().toString(), scope); - manager.registerGauge(this, registryName, () -> addCommands.longValue(), tag, true, "adds", getCategory().toString(), scope); - manager.registerGauge(this, registryName, () -> deleteByIdCommands.longValue(), tag, true, "deletesById", getCategory().toString(), scope); - manager.registerGauge(this, registryName, () -> deleteByQueryCommands.longValue(), tag, true, "deletesByQuery", getCategory().toString(), scope); - manager.registerGauge(this, registryName, () -> numErrors.longValue(), tag, true, "errors", getCategory().toString(), scope); + optimizeCommands = solrMetricsContext.meter(this, "optimizes", getCategory().toString(), scope); + rollbackCommands = solrMetricsContext.meter(this, "rollbacks", getCategory().toString(), scope); + splitCommands = solrMetricsContext.meter(this, "splits", getCategory().toString(), scope); + mergeIndexesCommands = solrMetricsContext.meter(this, "merges", getCategory().toString(), scope); + expungeDeleteCommands = solrMetricsContext.meter(this, "expungeDeletes", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> numDocsPending.longValue(), true, "docsPending", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> addCommands.longValue(), true, "adds", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> deleteByIdCommands.longValue(), true, "deletesById", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> deleteByQueryCommands.longValue(), true, "deletesByQuery", getCategory().toString(), scope); + solrMetricsContext.gauge(this, () -> numErrors.longValue(), true, "errors", getCategory().toString(), scope); - addCommandsCumulative = manager.meter(this, registryName, "cumulativeAdds", getCategory().toString(), scope); - deleteByIdCommandsCumulative = manager.meter(this, registryName, "cumulativeDeletesById", getCategory().toString(), scope); - deleteByQueryCommandsCumulative = manager.meter(this, registryName, "cumulativeDeletesByQuery", getCategory().toString(), scope); - numErrorsCumulative = manager.meter(this, registryName, "cumulativeErrors", getCategory().toString(), scope); + addCommandsCumulative = solrMetricsContext.meter(this, "cumulativeAdds", getCategory().toString(), scope); + deleteByIdCommandsCumulative = solrMetricsContext.meter(this, "cumulativeDeletesById", getCategory().toString(), scope); + deleteByQueryCommandsCumulative = solrMetricsContext.meter(this, "cumulativeDeletesByQuery", getCategory().toString(), scope); + numErrorsCumulative = solrMetricsContext.meter(this, "cumulativeErrors", getCategory().toString(), scope); } private void deleteAll() throws IOException { @@ -805,6 +807,11 @@ public void close() throws IOException { softCommitTracker.close(); numDocsPending.reset(); + try { + SolrMetricProducer.super.close(); + } catch (Exception e) { + throw new IOException("Error closing", e); + } } @@ -915,7 +922,7 @@ public void split(SplitIndexCommand cmd) throws IOException { } /** - * Calls either {@link IndexWriter#updateDocValues} or {@link IndexWriter#updateDocument}(s) as + * Calls either {@link IndexWriter#updateDocValues} or IndexWriter#updateDocument(s) as * needed based on {@link AddUpdateCommand#isInPlaceUpdate}. *

* If the this is an UPDATE_INPLACE cmd, then all fields included in diff --git a/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java b/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java index 59106849b103..5098cd1daa7c 100644 --- a/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java +++ b/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java @@ -433,11 +433,6 @@ private int getRfFromResponse(InputStream inputStream) { } } catch (Exception e) { log.warn("Failed to parse response from {} during replication factor accounting", node, e); - } finally { - try { - inputStream.close(); - } catch (Exception ignore) { - } } } return Integer.MAX_VALUE; diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java index 27e0198fe3cf..8cfdd3950a78 100644 --- a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java +++ b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java @@ -42,7 +42,7 @@ import org.apache.solr.core.DirectoryFactory.DirContext; import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrInfoBean; -import org.apache.solr.metrics.SolrMetricManager; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.schema.IndexSchema; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -88,8 +88,7 @@ public class SolrIndexWriter extends IndexWriter { private final AtomicLong runningMajorMergesDocs = new AtomicLong(); private final AtomicLong runningMinorMergesDocs = new AtomicLong(); - private final SolrMetricManager metricManager; - private final String registryName; + private final SolrMetricsContext solrMetricsContext; // merge diagnostics. private final Map runningMerges = new ConcurrentHashMap<>(); @@ -120,8 +119,7 @@ public SolrIndexWriter(String name, Directory d, IndexWriterConfig conf) throws // no metrics mergeTotals = false; mergeDetails = false; - metricManager = null; - registryName = null; + solrMetricsContext = null; } private SolrIndexWriter(SolrCore core, String name, String path, Directory directory, boolean create, IndexSchema schema, SolrIndexConfig config, IndexDeletionPolicy delPolicy, Codec codec) throws IOException { @@ -135,8 +133,7 @@ private SolrIndexWriter(SolrCore core, String name, String path, Directory direc infoStream = getConfig().getInfoStream(); this.directory = directory; numOpens.incrementAndGet(); - metricManager = core.getCoreContainer().getMetricManager(); - registryName = core.getCoreMetricManager().getRegistryName(); + solrMetricsContext = core.getSolrMetricsContext().getChildContext(this); if (config.metricsInfo != null && config.metricsInfo.initArgs != null) { Object v = config.metricsInfo.initArgs.get("majorMergeDocs"); if (v != null) { @@ -160,21 +157,21 @@ private SolrIndexWriter(SolrCore core, String name, String path, Directory direc } if (mergeDetails) { mergeTotals = true; // override - majorMergedDocs = metricManager.meter(null, registryName, "docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major"); - majorDeletedDocs = metricManager.meter(null, registryName, "deletedDocs", SolrInfoBean.Category.INDEX.toString(), "merge", "major"); + majorMergedDocs = solrMetricsContext.meter(null, "docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major"); + majorDeletedDocs = solrMetricsContext.meter(null, "deletedDocs", SolrInfoBean.Category.INDEX.toString(), "merge", "major"); } if (mergeTotals) { - minorMerge = metricManager.timer(null, registryName, "minor", SolrInfoBean.Category.INDEX.toString(), "merge"); - majorMerge = metricManager.timer(null, registryName, "major", SolrInfoBean.Category.INDEX.toString(), "merge"); - mergeErrors = metricManager.counter(null, registryName, "errors", SolrInfoBean.Category.INDEX.toString(), "merge"); + minorMerge = solrMetricsContext.timer(null, "minor", SolrInfoBean.Category.INDEX.toString(), "merge"); + majorMerge = solrMetricsContext.timer(null, "major", SolrInfoBean.Category.INDEX.toString(), "merge"); + mergeErrors = solrMetricsContext.counter(null, "errors", SolrInfoBean.Category.INDEX.toString(), "merge"); String tag = core.getMetricTag(); - metricManager.registerGauge(null, registryName, () -> runningMajorMerges.get(), tag, true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "major"); - metricManager.registerGauge(null, registryName, () -> runningMinorMerges.get(), tag, true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "minor"); - metricManager.registerGauge(null, registryName, () -> runningMajorMergesDocs.get(), tag, true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major"); - metricManager.registerGauge(null, registryName, () -> runningMinorMergesDocs.get(), tag, true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "minor"); - metricManager.registerGauge(null, registryName, () -> runningMajorMergesSegments.get(), tag, true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "major"); - metricManager.registerGauge(null, registryName, () -> runningMinorMergesSegments.get(), tag, true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "minor"); - flushMeter = metricManager.meter(null, registryName, "flush", SolrInfoBean.Category.INDEX.toString()); + solrMetricsContext.gauge(null, () -> runningMajorMerges.get(), true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "major"); + solrMetricsContext.gauge(null, () -> runningMinorMerges.get(), true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "minor"); + solrMetricsContext.gauge(null, () -> runningMajorMergesDocs.get(), true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major"); + solrMetricsContext.gauge(null, () -> runningMinorMergesDocs.get(), true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "minor"); + solrMetricsContext.gauge(null, () -> runningMajorMergesSegments.get(), true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "major"); + solrMetricsContext.gauge(null, () -> runningMinorMergesSegments.get(), true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "minor"); + flushMeter = solrMetricsContext.meter(null, "flush", SolrInfoBean.Category.INDEX.toString()); } } } @@ -345,6 +342,9 @@ private void cleanup() throws IOException { if (directoryFactory != null) { directoryFactory.release(directory); } + if (solrMetricsContext != null) { + solrMetricsContext.unregister(); + } } } diff --git a/solr/core/src/java/org/apache/solr/update/UpdateHandler.java b/solr/core/src/java/org/apache/solr/update/UpdateHandler.java index c8dbc10568cc..59dae8a369a6 100644 --- a/solr/core/src/java/org/apache/solr/update/UpdateHandler.java +++ b/solr/core/src/java/org/apache/solr/update/UpdateHandler.java @@ -22,7 +22,6 @@ import java.util.Vector; import java.util.concurrent.ConcurrentHashMap; -import com.codahale.metrics.MetricRegistry; import org.apache.solr.core.DirectoryFactory; import org.apache.solr.core.HdfsDirectoryFactory; import org.apache.solr.core.PluginInfo; @@ -57,7 +56,6 @@ public abstract class UpdateHandler implements SolrInfoBean { protected final UpdateLog ulog; protected Set metricNames = ConcurrentHashMap.newKeySet(); - protected MetricRegistry registry; private void parseEventListeners() { final Class clazz = SolrEventListener.class; @@ -211,8 +209,4 @@ public Category getCategory() { public Set getMetricNames() { return metricNames; } - @Override - public MetricRegistry getMetricRegistry() { - return registry; - } } diff --git a/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java b/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java index 8e3486be2962..fe966cbc9296 100644 --- a/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java +++ b/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java @@ -25,7 +25,6 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; -import com.codahale.metrics.MetricRegistry; import com.google.common.annotations.VisibleForTesting; import org.apache.http.client.HttpClient; import org.apache.http.impl.client.CloseableHttpClient; @@ -40,6 +39,7 @@ import org.apache.solr.core.SolrInfoBean; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.security.HttpClientBuilderPlugin; import org.apache.solr.update.processor.DistributedUpdateProcessor; import org.apache.solr.update.processor.DistributingUpdateProcessorFactory; @@ -90,7 +90,7 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoBean { private final Set metricNames = ConcurrentHashMap.newKeySet(); - private MetricRegistry registry; + private SolrMetricsContext solrMetricsContext; private int socketTimeout = HttpClientUtil.DEFAULT_SO_TIMEOUT; private int connectionTimeout = HttpClientUtil.DEFAULT_CONNECT_TIMEOUT; @@ -179,14 +179,14 @@ public String getName() { } @Override - public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) { - registry = manager.registry(registryName); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + solrMetricsContext = parentContext.getChildContext(this); String expandedScope = SolrMetricManager.mkName(scope, getCategory().name()); - updateHttpListenerFactory.initializeMetrics(manager, registryName, tag, expandedScope); - defaultConnectionManager.initializeMetrics(manager, registryName, tag, expandedScope); - updateExecutor = MetricUtils.instrumentedExecutorService(updateExecutor, this, registry, + updateHttpListenerFactory.initializeMetrics(solrMetricsContext, expandedScope); + defaultConnectionManager.initializeMetrics(solrMetricsContext, expandedScope); + updateExecutor = MetricUtils.instrumentedExecutorService(updateExecutor, this, solrMetricsContext.getMetricRegistry(), SolrMetricManager.mkName("updateOnlyExecutor", expandedScope, "threadPool")); - recoveryExecutor = MetricUtils.instrumentedExecutorService(recoveryExecutor, this, registry, + recoveryExecutor = MetricUtils.instrumentedExecutorService(recoveryExecutor, this, solrMetricsContext.getMetricRegistry(), SolrMetricManager.mkName("recoveryExecutor", expandedScope, "threadPool")); } @@ -206,8 +206,8 @@ public Set getMetricNames() { } @Override - public MetricRegistry getMetricRegistry() { - return registry; + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; } // if you are looking for a client to use, it's probably this one. @@ -259,6 +259,11 @@ public void close() { } catch (Exception e) { throw new RuntimeException(e); } finally { + try { + SolrMetricProducer.super.close(); + } catch (Exception e) { + // do nothing + } IOUtils.closeQuietly(updateOnlyClient); HttpClientUtil.close(recoveryOnlyClient); HttpClientUtil.close(defaultClient); diff --git a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java index 79faf2158f3c..f0972dbae9a7 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java +++ b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java @@ -195,8 +195,9 @@ public static Set computeInPlaceUpdatableFields(AddUpdateCommand cmd) th } // else it's a atomic update map... Map fieldValueMap = (Map)fieldValue; - for (String op : fieldValueMap.keySet()) { - Object obj = fieldValueMap.get(op); + for (Entry entry : fieldValueMap.entrySet()) { + String op = entry.getKey(); + Object obj = entry.getValue(); if (!op.equals("set") && !op.equals("inc")) { // not a supported in-place update op return Collections.emptySet(); @@ -540,9 +541,9 @@ protected void doRemoveRegex(SolrInputDocument toDoc, SolrInputField sif, Object private Collection preparePatterns(Object fieldVal) { final Collection patterns = new LinkedHashSet<>(1); if (fieldVal instanceof Collection) { - Collection patternVals = (Collection) fieldVal; - for (String patternVal : patternVals) { - patterns.add(Pattern.compile(patternVal)); + Collection patternVals = (Collection) fieldVal; + for (Object patternVal : patternVals) { + patterns.add(Pattern.compile(patternVal.toString())); } } else { patterns.add(Pattern.compile(fieldVal.toString())); diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java index 01a2e2502ce4..98b3ab42762d 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java @@ -1090,15 +1090,17 @@ protected void doLocalCommit(CommitUpdateCommand cmd) throws IOException { } @Override - public void finish() throws IOException { - assertNotFinished(); + public final void finish() throws IOException { + assert ! finished : "lifecycle sanity check"; + finished = true; + + doDistribFinish(); super.finish(); } - protected void assertNotFinished() { - assert ! finished : "lifecycle sanity check"; - finished = true; + protected void doDistribFinish() throws IOException { + // no-op for derived classes to implement } /** diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java index 63e2dfd82a45..d3c1087ff5d6 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java @@ -89,6 +89,16 @@ public class DistributedZkUpdateProcessor extends DistributedUpdateProcessor { private boolean readOnlyCollection = false; private CoreUpdateTracker sharedCoreTracker; + // The cached immutable clusterState for the update... usually refreshed for each individual update. + // Different parts of this class used to request current clusterState views, which lead to subtle bugs and race conditions + // such as SOLR-13815 (live split data loss.) Most likely, the only valid reasons for updating clusterState should be on + // certain types of failure + retry. + // Note: there may be other races related to + // 1) cluster topology change across multiple adds + // 2) use of methods directly on zkController that use a different clusterState + // 3) in general, not controlling carefully enough exactly when our view of clusterState is updated + protected ClusterState clusterState; + // should we clone the document before sending it to replicas? // this is set to true in the constructor if the next processors in the chain // are custom and may modify the SolrInputDocument racing with its serialization for replication @@ -104,7 +114,7 @@ public DistributedZkUpdateProcessor(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { this(req,rsp,next, new CoreUpdateTracker(req.getCore().getCoreContainer())); } - + @VisibleForTesting protected DistributedZkUpdateProcessor(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next, CoreUpdateTracker sharedCoreTracker) { @@ -115,7 +125,8 @@ protected DistributedZkUpdateProcessor(SolrQueryRequest req, cmdDistrib = new SolrCmdDistributor(cc.getUpdateShardHandler()); cloneRequiredOnLeader = isCloneRequiredOnLeader(next); collection = cloudDesc.getCollectionName(); - DocCollection coll = zkController.getClusterState().getCollectionOrNull(collection); + clusterState = zkController.getClusterState(); + DocCollection coll = clusterState.getCollectionOrNull(collection); if (coll != null) { // check readOnly property in coll state readOnlyCollection = coll.isReadOnly(); @@ -151,6 +162,7 @@ protected Replica.Type computeReplicaType() { @Override public void processCommit(CommitUpdateCommand cmd) throws IOException { + clusterState = zkController.getClusterState(); assert TestInjection.injectFailUpdateRequests(); @@ -219,7 +231,7 @@ public void processCommit(CommitUpdateCommand cmd) throws IOException { params.set(DISTRIB_UPDATE_PARAM, DistribPhase.FROMLEADER.toString()); params.set(COMMIT_END_POINT, "replicas"); - + useNodes = getReplicaNodesForLeader(cloudDesc.getShardId(), leaderReplica); @@ -241,6 +253,8 @@ public void processCommit(CommitUpdateCommand cmd) throws IOException { @Override public void processAdd(AddUpdateCommand cmd) throws IOException { + clusterState = zkController.getClusterState(); + assert TestInjection.injectFailUpdateRequests(); if (isReadOnly()) { @@ -264,7 +278,7 @@ public void processAdd(AddUpdateCommand cmd) throws IOException { protected void doDistribAdd(AddUpdateCommand cmd) throws IOException { if (isLeader && !isSubShardLeader) { - DocCollection coll = zkController.getClusterState().getCollection(collection); + DocCollection coll = clusterState.getCollection(collection); List subShardLeaders = getSubShardLeaders(coll, cloudDesc.getShardId(), cmd.getRootIdUsingRouteParam(), cmd.getSolrInputDocument()); // the list will actually have only one element for an add request if (subShardLeaders != null && !subShardLeaders.isEmpty()) { @@ -275,7 +289,7 @@ protected void doDistribAdd(AddUpdateCommand cmd) throws IOException { params.set(DISTRIB_FROM_PARENT, cloudDesc.getShardId()); cmdDistrib.distribAdd(cmd, subShardLeaders, params, true); } - final List nodesByRoutingRules = getNodesByRoutingRules(zkController.getClusterState(), coll, cmd.getRootIdUsingRouteParam(), cmd.getSolrInputDocument()); + final List nodesByRoutingRules = getNodesByRoutingRules(clusterState, coll, cmd.getRootIdUsingRouteParam(), cmd.getSolrInputDocument()); if (nodesByRoutingRules != null && !nodesByRoutingRules.isEmpty()) { ModifiableSolrParams params = new ModifiableSolrParams(filterParams(req.getParams())); params.set(DISTRIB_UPDATE_PARAM, DistribPhase.FROMLEADER.toString()); @@ -319,6 +333,8 @@ protected void doDistribAdd(AddUpdateCommand cmd) throws IOException { @Override public void processDelete(DeleteUpdateCommand cmd) throws IOException { + clusterState = zkController.getClusterState(); + if (isReadOnly()) { throw new SolrException(ErrorCode.FORBIDDEN, "Collection " + collection + " is read-only."); } @@ -344,7 +360,7 @@ protected void doDeleteById(DeleteUpdateCommand cmd) throws IOException { @Override protected void doDistribDeleteById(DeleteUpdateCommand cmd) throws IOException { if (isLeader && !isSubShardLeader) { - DocCollection coll = zkController.getClusterState().getCollection(collection); + DocCollection coll = clusterState.getCollection(collection); List subShardLeaders = getSubShardLeaders(coll, cloudDesc.getShardId(), cmd.getId(), null); // the list will actually have only one element for an add request if (subShardLeaders != null && !subShardLeaders.isEmpty()) { @@ -356,7 +372,7 @@ protected void doDistribDeleteById(DeleteUpdateCommand cmd) throws IOException { cmdDistrib.distribDelete(cmd, subShardLeaders, params, true, null, null); } - final List nodesByRoutingRules = getNodesByRoutingRules(zkController.getClusterState(), coll, cmd.getId(), null); + final List nodesByRoutingRules = getNodesByRoutingRules(clusterState, coll, cmd.getId(), null); if (nodesByRoutingRules != null && !nodesByRoutingRules.isEmpty()) { ModifiableSolrParams params = new ModifiableSolrParams(filterParams(req.getParams())); params.set(DISTRIB_UPDATE_PARAM, DistribPhase.FROMLEADER.toString()); @@ -399,7 +415,7 @@ protected void doDeleteByQuery(DeleteUpdateCommand cmd) throws IOException { // - log + execute the local DBQ DistribPhase phase = DistribPhase.parseParam(req.getParams().get(DISTRIB_UPDATE_PARAM)); - DocCollection coll = zkController.getClusterState().getCollection(collection); + DocCollection coll = clusterState.getCollection(collection); if (DistribPhase.NONE == phase) { if (rollupReplicationTracker == null) { @@ -518,7 +534,7 @@ protected void doDistribDeleteByQuery(DeleteUpdateCommand cmd, List nodesByRoutingRules = getNodesByRoutingRules(zkController.getClusterState(), coll, null, null); + final List nodesByRoutingRules = getNodesByRoutingRules(clusterState, coll, null, null); if (nodesByRoutingRules != null && !nodesByRoutingRules.isEmpty()) { params = new ModifiableSolrParams(filterParams(req.getParams())); params.set(DISTRIB_UPDATE_PARAM, DistribPhase.FROMLEADER.toString()); @@ -622,8 +638,8 @@ protected List setupRequest(String id, SolrInputDocumen return null; } - ClusterState cstate = zkController.getClusterState(); - DocCollection coll = cstate.getCollection(collection); + clusterState = zkController.getClusterState(); + DocCollection coll = clusterState.getCollection(collection); Slice slice = coll.getRouter().getTargetSlice(id, doc, route, req.getParams(), coll); if (slice == null) { @@ -684,7 +700,6 @@ protected List setupRequest(String id, SolrInputDocumen // that means I want to forward onto my replicas... // so get the replicas... forwardToLeader = false; - ClusterState clusterState = zkController.getZkStateReader().getClusterState(); String leaderCoreNodeName = leaderReplica.getName(); List replicas = clusterState.getCollection(collection) .getSlice(shardId) @@ -768,7 +783,6 @@ private void checkReplicationTracker(UpdateCommand cmd) { private List getCollectionUrls(String collection, EnumSet types, boolean onlyLeaders) { - ClusterState clusterState = zkController.getClusterState(); final DocCollection docCollection = clusterState.getCollectionOrNull(collection); if (collection == null || docCollection.getSlicesMap() == null) { throw new ZooKeeperException(SolrException.ErrorCode.BAD_REQUEST, @@ -839,7 +853,6 @@ protected boolean amISubShardLeader(DocCollection coll, Slice parentSlice, Strin } protected List getReplicaNodesForLeader(String shardId, Replica leaderReplica) { - ClusterState clusterState = zkController.getZkStateReader().getClusterState(); String leaderCoreNodeName = leaderReplica.getName(); List replicas = clusterState.getCollection(collection) .getSlice(shardId) @@ -894,7 +907,7 @@ protected List getSubShardLeaders(DocCollection coll, S || coll.getRouter().isTargetSlice(docId, doc, req.getParams(), aslice.getName(), coll))) { Replica sliceLeader = aslice.getLeader(); // slice leader can be null because node/shard is created zk before leader election - if (sliceLeader != null && zkController.getClusterState().liveNodesContain(sliceLeader.getNodeName())) { + if (sliceLeader != null && clusterState.liveNodesContain(sliceLeader.getNodeName())) { if (nodes == null) nodes = new ArrayList<>(); ZkCoreNodeProps nodeProps = new ZkCoreNodeProps(sliceLeader); nodes.add(new SolrCmdDistributor.StdNode(nodeProps, coll.getName(), aslice.getName())); @@ -991,7 +1004,6 @@ private void doDefensiveChecks(DistribPhase phase) { if (isReplayOrPeersync) return; String from = req.getParams().get(DISTRIB_FROM); - ClusterState clusterState = zkController.getClusterState(); DocCollection docCollection = clusterState.getCollection(collection); Slice mySlice = docCollection.getSlice(cloudDesc.getShardId()); @@ -1051,38 +1063,35 @@ protected void doClose() { @Override public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException { + clusterState = zkController.getClusterState(); + if (isReadOnly()) { throw new SolrException(ErrorCode.FORBIDDEN, "Collection " + collection + " is read-only."); } - + super.processMergeIndexes(cmd); } @Override public void processRollback(RollbackUpdateCommand cmd) throws IOException { + clusterState = zkController.getClusterState(); + if (isReadOnly()) { throw new SolrException(ErrorCode.FORBIDDEN, "Collection " + collection + " is read-only."); } super.processRollback(cmd); } - @Override - public void finish() throws IOException { - assertNotFinished(); - - doFinish(); - } - private void writeToShareStore() throws SolrException { - log.info("Attempting to initiate index update write to shared store for collection=" + cloudDesc.getCollectionName() + + log.info("Attempting to initiate index update write to shared store for collection=" + cloudDesc.getCollectionName() + " and shard=" + cloudDesc.getShardId() + " using core=" + req.getCore().getName()); - - sharedCoreTracker.persistShardIndexToSharedStore(zkController.zkStateReader.getClusterState(), - cloudDesc.getCollectionName(), - cloudDesc.getShardId(), + + sharedCoreTracker.persistShardIndexToSharedStore(zkController.zkStateReader.getClusterState(), + cloudDesc.getCollectionName(), + cloudDesc.getShardId(), req.getCore().getName()); } - + private void readFromSharedStoreIfNecessary() throws SolrException { String coreName = req.getCore().getName(); String shardName = cloudDesc.getShardId(); @@ -1095,11 +1104,13 @@ private void readFromSharedStoreIfNecessary() throws SolrException { } BlobStoreUtils.syncLocalCoreWithSharedStore(collectionName,coreName,shardName,req.getCore().getCoreContainer()); } - - + + // TODO: optionally fail if n replicas are not reached... - private void doFinish() { + protected void doDistribFinish() { + clusterState = zkController.getClusterState(); + boolean shouldUpdateTerms = isLeader && isIndexChanged; if (shouldUpdateTerms) { ZkShardTerms zkShardTerms = zkController.getShardTerms(cloudDesc.getCollectionName(), cloudDesc.getShardId()); @@ -1108,34 +1119,34 @@ private void doFinish() { } zkController.getShardTerms(collection, cloudDesc.getShardId()).ensureHighestTermsAreNotZero(); } - + /** - * Track the updated core for push to Blob store. - * - * Only, the leader node pushes the updates to blob store but the leader can be change mid update, + * Track the updated core for push to Blob store. + * + * Only, the leader node pushes the updates to blob store but the leader can be change mid update, * so we don't stop peers from pushing updates to the blob store. - * - * We also need to check for isLeader here because a peer can also receive commit message if the request was directly send to the peer. + * + * We also need to check for isLeader here because a peer can also receive commit message if the request was directly send to the peer. */ if ( updateCommand != null && - updateCommand.getClass() == CommitUpdateCommand.class && + updateCommand.getClass() == CommitUpdateCommand.class && isLeader && replicaType.equals(Replica.Type.SHARED) && !((CommitUpdateCommand) updateCommand).softCommit) { /* - * TODO SPLITSHARD triggers soft commits. + * TODO SPLITSHARD triggers soft commits. * We don't persist on softCommit because there is nothing to so we should ignore those kinds of commits. * Configuring behavior based on soft/hard commit seems like we're getting into an abstraction deeper then * what the DUP is concerned about so we may want to consider moving this code somewhere more appropriate - * in the future (deeper in the stack) + * in the future (deeper in the stack) */ writeToShareStore(); } - + // TODO: if not a forward and replication req is not specified, we could // send in a background thread cmdDistrib.finish(); - + List errors = cmdDistrib.getErrors(); // TODO - we may need to tell about more than one error... @@ -1263,6 +1274,7 @@ private void doFinish() { if (0 < errorsForClient.size()) { throw new DistributedUpdatesAsyncException(errorsForClient); } + } /** diff --git a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java index c45f0c67eea6..eb3c08b21694 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java +++ b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java @@ -328,7 +328,7 @@ public boolean equals(Object obj) { public static class LazyUpdateProcessorFactoryHolder extends PluginBag.PluginHolder { private volatile UpdateRequestProcessorFactory lazyFactory; - public LazyUpdateProcessorFactoryHolder(final PluginBag.PluginHolder holder) { + public LazyUpdateProcessorFactoryHolder(final PluginBag.LazyPluginHolder holder) { super(holder.getPluginInfo()); lazyFactory = new LazyUpdateRequestProcessorFactory(holder); } @@ -340,20 +340,26 @@ public UpdateRequestProcessorFactory get() { } public class LazyUpdateRequestProcessorFactory extends UpdateRequestProcessorFactory { - private final PluginBag.PluginHolder holder; + private final PluginBag.LazyPluginHolder holder; + UpdateRequestProcessorFactory delegate; - public LazyUpdateRequestProcessorFactory(PluginBag.PluginHolder holder) { + public LazyUpdateRequestProcessorFactory(PluginBag.LazyPluginHolder holder) { this.holder = holder; } public UpdateRequestProcessorFactory getDelegate() { - return holder.get(); + return delegate; } @Override public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { - return holder.get().getInstance(req, rsp, next); + if (delegate != null) return delegate.getInstance(req, rsp, next); + synchronized (this) { + if (delegate == null) + delegate = (UpdateRequestProcessorFactory) holder.get(); + } + return delegate.getInstance(req, rsp, next); } } } diff --git a/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java b/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java index ec6be6233af5..c8c774923e4b 100644 --- a/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java +++ b/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java @@ -18,12 +18,15 @@ import java.lang.invoke.MethodHandles; import java.lang.ref.WeakReference; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.TimeUnit; +//import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.ReentrantLock; import org.apache.lucene.util.Accountable; @@ -31,6 +34,7 @@ import org.apache.solr.common.util.Cache; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.solr.common.util.TimeSource; import static org.apache.lucene.util.RamUsageEstimator.HASHTABLE_RAM_BYTES_PER_ENTRY; import static org.apache.lucene.util.RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED; @@ -67,11 +71,21 @@ public class ConcurrentLFUCache implements Cache, Accountable { private final EvictionListener evictionListener; private CleanupThread cleanupThread; private boolean timeDecay; - private final AtomicLong ramBytes = new AtomicLong(0); + private long maxIdleTimeNs; + private final TimeSource timeSource = TimeSource.NANO_TIME; + private final AtomicLong oldestEntry = new AtomicLong(0L); + private final LongAdder ramBytes = new LongAdder(); public ConcurrentLFUCache(int upperWaterMark, final int lowerWaterMark, int acceptableSize, int initialSize, boolean runCleanupThread, boolean runNewThreadForCleanup, EvictionListener evictionListener, boolean timeDecay) { + this(upperWaterMark, lowerWaterMark, acceptableSize, initialSize, runCleanupThread, + runNewThreadForCleanup, evictionListener, timeDecay, -1); + } + + public ConcurrentLFUCache(int upperWaterMark, final int lowerWaterMark, int acceptableSize, + int initialSize, boolean runCleanupThread, boolean runNewThreadForCleanup, + EvictionListener evictionListener, boolean timeDecay, int maxIdleTimeSec) { setUpperWaterMark(upperWaterMark); setLowerWaterMark(lowerWaterMark); setAcceptableWaterMark(acceptableSize); @@ -79,12 +93,13 @@ public ConcurrentLFUCache(int upperWaterMark, final int lowerWaterMark, int acce this.evictionListener = evictionListener; setNewThreadForCleanup(runNewThreadForCleanup); setTimeDecay(timeDecay); + setMaxIdleTime(maxIdleTimeSec); setRunCleanupThread(runCleanupThread); } public ConcurrentLFUCache(int size, int lowerWatermark) { this(size, lowerWatermark, (int) Math.floor((lowerWatermark + size) / 2), - (int) Math.ceil(0.75 * size), false, false, null, true); + (int) Math.ceil(0.75 * size), false, false, null, true, -1); } public void setAlive(boolean live) { @@ -110,13 +125,25 @@ public void setTimeDecay(boolean timeDecay) { this.timeDecay = timeDecay; } + public void setMaxIdleTime(int maxIdleTime) { + long oldMaxIdleTimeNs = maxIdleTimeNs; + maxIdleTimeNs = maxIdleTime > 0 ? TimeUnit.NANOSECONDS.convert(maxIdleTime, TimeUnit.SECONDS) : Long.MAX_VALUE; + if (cleanupThread != null && maxIdleTimeNs < oldMaxIdleTimeNs) { + cleanupThread.wakeThread(); + } + } + public synchronized void setNewThreadForCleanup(boolean newThreadForCleanup) { this.newThreadForCleanup = newThreadForCleanup; + if (newThreadForCleanup) { + setRunCleanupThread(false); + } } public synchronized void setRunCleanupThread(boolean runCleanupThread) { this.runCleanupThread = runCleanupThread; if (this.runCleanupThread) { + newThreadForCleanup = false; if (cleanupThread == null) { cleanupThread = new CleanupThread(this); cleanupThread.start(); @@ -133,22 +160,21 @@ public synchronized void setRunCleanupThread(boolean runCleanupThread) { public V get(K key) { CacheEntry e = map.get(key); if (e == null) { - if (islive) stats.missCounter.incrementAndGet(); - return null; - } - if (islive) { - e.lastAccessed = stats.accessCounter.incrementAndGet(); - e.hits.incrementAndGet(); + if (islive) stats.missCounter.increment(); + } else if (islive) { + e.lastAccessed = timeSource.getEpochTimeNs(); + stats.accessCounter.increment(); + e.hits.increment(); } - return e.value; + return e != null ? e.value : null; } @Override public V remove(K key) { CacheEntry cacheEntry = map.remove(key); if (cacheEntry != null) { - stats.size.decrementAndGet(); - ramBytes.addAndGet(-cacheEntry.ramBytesUsed() - HASHTABLE_RAM_BYTES_PER_ENTRY); + stats.size.decrement(); + ramBytes.add(-cacheEntry.ramBytesUsed() - HASHTABLE_RAM_BYTES_PER_ENTRY); return cacheEntry.value; } return null; @@ -157,21 +183,33 @@ public V remove(K key) { @Override public V put(K key, V val) { if (val == null) return null; - CacheEntry e = new CacheEntry<>(key, val, stats.accessCounter.incrementAndGet()); - CacheEntry oldCacheEntry = map.put(key, e); + CacheEntry e = new CacheEntry<>(key, val, timeSource.getEpochTimeNs()); + return putCacheEntry(e); + } + + /** + * Visible for testing to create synthetic cache entries. + * @lucene.internal + */ + public V putCacheEntry(CacheEntry e) { + stats.accessCounter.increment(); + // initialize oldestEntry + oldestEntry.updateAndGet(x -> x > e.lastAccessed || x == 0 ? e.lastAccessed : x); + CacheEntry oldCacheEntry = map.put(e.key, e); int currentSize; if (oldCacheEntry == null) { - currentSize = stats.size.incrementAndGet(); - ramBytes.addAndGet(e.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY); // added key + value + entry + stats.size.increment(); + currentSize = stats.size.intValue(); + ramBytes.add(e.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY); // added key + value + entry } else { - currentSize = stats.size.get(); - ramBytes.addAndGet(-oldCacheEntry.ramBytesUsed()); - ramBytes.addAndGet(e.ramBytesUsed()); + currentSize = stats.size.intValue(); + ramBytes.add(-oldCacheEntry.ramBytesUsed()); + ramBytes.add(e.ramBytesUsed()); } if (islive) { - stats.putCounter.incrementAndGet(); + stats.putCounter.increment(); } else { - stats.nonLivePutCounter.incrementAndGet(); + stats.nonLivePutCounter.increment(); } // Check if we need to clear out old entries from the cache. @@ -184,7 +222,9 @@ public V put(K key, V val) { // // Thread safety note: isCleaning read is piggybacked (comes after) other volatile reads // in this method. - if (currentSize > upperWaterMark && !isCleaning) { + boolean evictByIdleTime = maxIdleTimeNs != Long.MAX_VALUE; + long idleCutoff = evictByIdleTime ? timeSource.getEpochTimeNs() - maxIdleTimeNs : -1L; + if ((currentSize > upperWaterMark || (evictByIdleTime && oldestEntry.get() < idleCutoff)) && !isCleaning) { if (newThreadForCleanup) { new Thread(this::markAndSweep).start(); } else if (cleanupThread != null) { @@ -198,35 +238,66 @@ public V put(K key, V val) { /** * Removes items from the cache to bring the size down to the lowerWaterMark. + *

Visible for unit testing.

+ * @lucene.internal */ - private void markAndSweep() { + public void markAndSweep() { if (!markAndSweepLock.tryLock()) return; try { long lowHitCount = this.lowHitCount; isCleaning = true; this.lowHitCount = lowHitCount; // volatile write to make isCleaning visible - int sz = stats.size.get(); - if (sz <= upperWaterMark) { + int sz = stats.size.intValue(); + boolean evictByIdleTime = maxIdleTimeNs != Long.MAX_VALUE; + long idleCutoff = evictByIdleTime ? timeSource.getEpochTimeNs() - maxIdleTimeNs : -1L; + if (sz <= upperWaterMark && (evictByIdleTime && oldestEntry.get() > idleCutoff)) { /* SOLR-7585: Even though we acquired a lock, multiple threads might detect a need for calling this method. * Locking keeps these from executing at the same time, so they run sequentially. The second and subsequent * sequential runs of this method don't need to be done, since there are no elements to remove. */ return; } - + + // first evict by idleTime - it's less costly to do an additional pass over the + // map than to manage the outdated entries in a TreeSet + if (evictByIdleTime) { + long currentOldestEntry = Long.MAX_VALUE; + Iterator>> iterator = map.entrySet().iterator(); + while (iterator.hasNext()) { + Map.Entry> entry = iterator.next(); + entry.getValue().lastAccessedCopy = entry.getValue().lastAccessed; + if (entry.getValue().lastAccessedCopy < idleCutoff) { + iterator.remove(); + postRemoveEntry(entry.getValue()); + stats.evictionIdleCounter.increment(); + } else { + if (entry.getValue().lastAccessedCopy < currentOldestEntry) { + currentOldestEntry = entry.getValue().lastAccessedCopy; + } + } + } + if (currentOldestEntry != Long.MAX_VALUE) { + oldestEntry.set(currentOldestEntry); + } + // refresh size and maybe return + sz = stats.size.intValue(); + if (sz <= upperWaterMark) { + return; + } + } int wantToRemove = sz - lowerWaterMark; - + TreeSet> tree = new TreeSet<>(); - + for (CacheEntry ce : map.values()) { // set hitsCopy to avoid later Atomic reads. Primitive types are faster than the atomic get(). - ce.hitsCopy = ce.hits.get(); + ce.hitsCopy = ce.hits.longValue(); ce.lastAccessedCopy = ce.lastAccessed; if (timeDecay) { - ce.hits.set(ce.hitsCopy >>> 1); + ce.hits.reset(); + ce.hits.add(ce.hitsCopy >>> 1); } - if (tree.size() < wantToRemove) { tree.add(ce); } else { @@ -253,6 +324,18 @@ private void markAndSweep() { for (CacheEntry e : tree) { evictEntry(e.key); } + if (evictByIdleTime) { + // do a full pass because we don't what is the max. age of remaining items + long currentOldestEntry = Long.MAX_VALUE; + for (CacheEntry e : map.values()) { + if (e.lastAccessedCopy < currentOldestEntry) { + currentOldestEntry = e.lastAccessedCopy; + } + } + if (currentOldestEntry != Long.MAX_VALUE) { + oldestEntry.set(currentOldestEntry); + } + } } finally { isCleaning = false; // set before markAndSweep.unlock() for visibility markAndSweepLock.unlock(); @@ -261,10 +344,14 @@ private void markAndSweep() { private void evictEntry(K key) { CacheEntry o = map.remove(key); + postRemoveEntry(o); + } + + private void postRemoveEntry(CacheEntry o) { if (o == null) return; - ramBytes.addAndGet(-(o.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY)); - stats.size.decrementAndGet(); - stats.evictionCounter.incrementAndGet(); + ramBytes.add(-(o.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY)); + stats.size.decrement(); + stats.evictionCounter.increment(); if (evictionListener != null) evictionListener.evictedEntry(o.key, o.value); } @@ -287,7 +374,7 @@ public Map getLeastUsedItems(int n) { try { for (Map.Entry> entry : map.entrySet()) { CacheEntry ce = entry.getValue(); - ce.hitsCopy = ce.hits.get(); + ce.hitsCopy = ce.hits.longValue(); ce.lastAccessedCopy = ce.lastAccessed; if (tree.size() < n) { tree.add(ce); @@ -331,7 +418,7 @@ public Map getMostUsedItems(int n) { try { for (Map.Entry> entry : map.entrySet()) { CacheEntry ce = entry.getValue(); - ce.hitsCopy = ce.hits.get(); + ce.hitsCopy = ce.hits.longValue(); ce.lastAccessedCopy = ce.lastAccessed; if (tree.size() < n) { tree.add(ce); @@ -357,13 +444,13 @@ public Map getMostUsedItems(int n) { } public int size() { - return stats.size.get(); + return stats.size.intValue(); } @Override public void clear() { map.clear(); - ramBytes.set(0); + ramBytes.reset(); } public Map> getMap() { @@ -372,7 +459,7 @@ public Map> getMap() { @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + ramBytes.get(); + return BASE_RAM_BYTES_USED + ramBytes.sum(); } public static class CacheEntry implements Comparable>, Accountable { @@ -383,7 +470,7 @@ public static class CacheEntry implements Comparable>, Ac final K key; final V value; final long ramBytesUsed; - volatile AtomicLong hits = new AtomicLong(0); + final LongAdder hits = new LongAdder(); long hitsCopy = 0; volatile long lastAccessed = 0; long lastAccessedCopy = 0; @@ -420,7 +507,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return "key: " + key + " value: " + value + " hits:" + hits.get(); + return "key: " + key + " value: " + value + " hits:" + hits.longValue(); } @Override @@ -449,51 +536,63 @@ public Stats getStats() { public static class Stats implements Accountable { private static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Stats.class) + - 5 * RamUsageEstimator.primitiveSizes.get(long.class) + - RamUsageEstimator.primitiveSizes.get(int.class); - - private final AtomicLong accessCounter = new AtomicLong(0), - putCounter = new AtomicLong(0), - nonLivePutCounter = new AtomicLong(0), - missCounter = new AtomicLong(); - private final AtomicInteger size = new AtomicInteger(); - private AtomicLong evictionCounter = new AtomicLong(); + // LongAdder + 7 * ( + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + + RamUsageEstimator.primitiveSizes.get(long.class) + + 2 * (RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.primitiveSizes.get(long.class)) + ); + + private final LongAdder accessCounter = new LongAdder(); + private final LongAdder putCounter = new LongAdder(); + private final LongAdder nonLivePutCounter = new LongAdder(); + private final LongAdder missCounter = new LongAdder(); + private final LongAdder size = new LongAdder(); + private LongAdder evictionCounter = new LongAdder(); + private LongAdder evictionIdleCounter = new LongAdder(); public long getCumulativeLookups() { - return (accessCounter.get() - putCounter.get() - nonLivePutCounter.get()) + missCounter.get(); + return (accessCounter.longValue() - putCounter.longValue() - nonLivePutCounter.longValue()) + missCounter.longValue(); } public long getCumulativeHits() { - return accessCounter.get() - putCounter.get() - nonLivePutCounter.get(); + return accessCounter.longValue() - putCounter.longValue() - nonLivePutCounter.longValue(); } public long getCumulativePuts() { - return putCounter.get(); + return putCounter.longValue(); } public long getCumulativeEvictions() { - return evictionCounter.get(); + return evictionCounter.longValue(); + } + + public long getCumulativeIdleEvictions() { + return evictionIdleCounter.longValue(); } public int getCurrentSize() { - return size.get(); + return size.intValue(); } public long getCumulativeNonLivePuts() { - return nonLivePutCounter.get(); + return nonLivePutCounter.longValue(); } public long getCumulativeMisses() { - return missCounter.get(); + return missCounter.longValue(); } public void add(Stats other) { - accessCounter.addAndGet(other.accessCounter.get()); - putCounter.addAndGet(other.putCounter.get()); - nonLivePutCounter.addAndGet(other.nonLivePutCounter.get()); - missCounter.addAndGet(other.missCounter.get()); - evictionCounter.addAndGet(other.evictionCounter.get()); - size.set(Math.max(size.get(), other.size.get())); + accessCounter.add(other.accessCounter.longValue()); + putCounter.add(other.putCounter.longValue()); + nonLivePutCounter.add(other.nonLivePutCounter.longValue()); + missCounter.add(other.missCounter.longValue()); + evictionCounter.add(other.evictionCounter.longValue()); + evictionIdleCounter.add(other.evictionIdleCounter.longValue()); + long maxSize = Math.max(size.longValue(), other.size.longValue()); + size.reset(); + size.add(maxSize); } @Override @@ -518,15 +617,18 @@ public CleanupThread(ConcurrentLFUCache c) { @Override public void run() { while (true) { + ConcurrentLFUCache c = cache.get(); + if(c == null) break; synchronized (this) { if (stop) break; + long waitTimeMs = c.maxIdleTimeNs != Long.MAX_VALUE ? TimeUnit.MILLISECONDS.convert(c.maxIdleTimeNs, TimeUnit.NANOSECONDS) : 0L; try { - this.wait(); + this.wait(waitTimeMs); } catch (InterruptedException e) { } } if (stop) break; - ConcurrentLFUCache c = cache.get(); + c = cache.get(); if (c == null) break; c.markAndSweep(); } diff --git a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java index 75f608f3e851..4b35b6303ba8 100644 --- a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java +++ b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java @@ -21,17 +21,20 @@ import org.apache.solr.common.util.Cache; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.solr.common.util.TimeSource; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.TimeUnit; +//import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.ReentrantLock; @@ -71,15 +74,24 @@ public class ConcurrentLRUCache implements Cache, Accountable { private final Stats stats = new Stats(); private int acceptableWaterMark; private long oldestEntry = 0; // not volatile, only accessed in the cleaning method + private final TimeSource timeSource = TimeSource.NANO_TIME; + private final AtomicLong oldestEntryNs = new AtomicLong(0); + private long maxIdleTimeNs; private final EvictionListener evictionListener; private CleanupThread cleanupThread; private boolean runCleanupThread; private long ramLowerWatermark, ramUpperWatermark; - private final AtomicLong ramBytes = new AtomicLong(0); + private final LongAdder ramBytes = new LongAdder(); public ConcurrentLRUCache(long ramLowerWatermark, long ramUpperWatermark, boolean runCleanupThread, EvictionListener evictionListener) { + this(ramLowerWatermark, ramUpperWatermark, runCleanupThread, evictionListener, -1); + } + + public ConcurrentLRUCache(long ramLowerWatermark, long ramUpperWatermark, + boolean runCleanupThread, EvictionListener evictionListener, + int maxIdleTimeSec) { this.ramLowerWatermark = ramLowerWatermark; this.ramUpperWatermark = ramUpperWatermark; @@ -91,12 +103,20 @@ public ConcurrentLRUCache(long ramLowerWatermark, long ramUpperWatermark, this.lowerWaterMark = Integer.MIN_VALUE; this.upperWaterMark = Integer.MAX_VALUE; + setMaxIdleTime(maxIdleTimeSec); setRunCleanupThread(runCleanupThread); } public ConcurrentLRUCache(int upperWaterMark, final int lowerWaterMark, int acceptableWatermark, int initialSize, boolean runCleanupThread, boolean runNewThreadForCleanup, EvictionListener evictionListener) { + this(upperWaterMark, lowerWaterMark, acceptableWatermark, initialSize, runCleanupThread, + runNewThreadForCleanup, evictionListener, -1); + } + + public ConcurrentLRUCache(int upperWaterMark, final int lowerWaterMark, int acceptableWatermark, + int initialSize, boolean runCleanupThread, boolean runNewThreadForCleanup, + EvictionListener evictionListener, int maxIdleTimeSec) { if (upperWaterMark < 1) throw new IllegalArgumentException("upperWaterMark must be > 0"); if (lowerWaterMark >= upperWaterMark) throw new IllegalArgumentException("lowerWaterMark must be < upperWaterMark"); @@ -106,14 +126,15 @@ public ConcurrentLRUCache(int upperWaterMark, final int lowerWaterMark, int acce this.lowerWaterMark = lowerWaterMark; this.acceptableWaterMark = acceptableWatermark; this.evictionListener = evictionListener; - setRunCleanupThread(runCleanupThread); this.ramLowerWatermark = Long.MIN_VALUE; this.ramUpperWatermark = Long.MAX_VALUE; + setMaxIdleTime(maxIdleTimeSec); + setRunCleanupThread(runCleanupThread); } public ConcurrentLRUCache(int size, int lowerWatermark) { this(size, lowerWatermark, (int) Math.floor((lowerWatermark + size) / 2), - (int) Math.ceil(0.75 * size), false, false, null); + (int) Math.ceil(0.75 * size), false, false, null, -1); } public void setAlive(boolean live) { @@ -147,6 +168,14 @@ public void setRamLowerWatermark(long ramLowerWatermark) { this.ramLowerWatermark = ramLowerWatermark; } + public void setMaxIdleTime(int maxIdleTime) { + long oldMaxIdleTimeNs = maxIdleTimeNs; + maxIdleTimeNs = maxIdleTime > 0 ? TimeUnit.NANOSECONDS.convert(maxIdleTime, TimeUnit.SECONDS) : Long.MAX_VALUE; + if (cleanupThread != null && maxIdleTimeNs < oldMaxIdleTimeNs) { + cleanupThread.wakeThread(); + } + } + public synchronized void setRunCleanupThread(boolean runCleanupThread) { this.runCleanupThread = runCleanupThread; if (this.runCleanupThread) { @@ -177,8 +206,8 @@ public V get(K key) { public V remove(K key) { CacheEntry cacheEntry = map.remove(key); if (cacheEntry != null) { - stats.size.decrementAndGet(); - ramBytes.addAndGet(-cacheEntry.ramBytesUsed() - HASHTABLE_RAM_BYTES_PER_ENTRY); + stats.size.decrement(); + ramBytes.add(-cacheEntry.ramBytesUsed() - HASHTABLE_RAM_BYTES_PER_ENTRY); return cacheEntry.value; } return null; @@ -187,16 +216,27 @@ public V remove(K key) { @Override public V put(K key, V val) { if (val == null) return null; - CacheEntry e = new CacheEntry<>(key, val, stats.accessCounter.incrementAndGet()); - CacheEntry oldCacheEntry = map.put(key, e); + CacheEntry e = new CacheEntry<>(key, val, timeSource.getEpochTimeNs(), stats.accessCounter.incrementAndGet()); + return putCacheEntry(e); + } + + /** + * Visible for testing to create synthetic cache entries. + * @lucene.internal + */ + public V putCacheEntry(CacheEntry e) { + // initialize oldestEntryNs + oldestEntryNs.updateAndGet(x -> x > e.createTime || x == 0 ? e.createTime : x); + CacheEntry oldCacheEntry = map.put(e.key, e); int currentSize; if (oldCacheEntry == null) { - currentSize = stats.size.incrementAndGet(); - ramBytes.addAndGet(e.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY); // added key + value + entry + stats.size.increment(); + currentSize = stats.size.intValue(); + ramBytes.add(e.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY); // added key + value + entry } else { - currentSize = stats.size.get(); - ramBytes.addAndGet(-oldCacheEntry.ramBytesUsed()); - ramBytes.addAndGet(e.ramBytesUsed()); + currentSize = stats.size.intValue(); + ramBytes.add(-oldCacheEntry.ramBytesUsed()); + ramBytes.add(e.ramBytesUsed()); } if (islive) { stats.putCounter.increment(); @@ -214,7 +254,8 @@ public V put(K key, V val) { // // Thread safety note: isCleaning read is piggybacked (comes after) other volatile reads // in this method. - if ((currentSize > upperWaterMark || ramBytes.get() > ramUpperWatermark) && !isCleaning) { + long idleCutoff = timeSource.getEpochTimeNs() - maxIdleTimeNs; + if ((currentSize > upperWaterMark || ramBytes.sum() > ramUpperWatermark || oldestEntryNs.get() < idleCutoff) && !isCleaning) { if (newThreadForCleanup) { new Thread(this::markAndSweep).start(); } else if (cleanupThread != null){ @@ -228,16 +269,11 @@ public V put(K key, V val) { /** * Removes items from the cache to bring the size down - * to an acceptable value ('acceptableWaterMark'). - *

- * It is done in two stages. In the first stage, least recently used items are evicted. - * If, after the first stage, the cache size is still greater than 'acceptableSize' - * config parameter, the second stage takes over. - *

- * The second stage is more intensive and tries to bring down the cache size - * to the 'lowerWaterMark' config parameter. + * to an acceptable value. + *

Visible for unit testing.

+ * @lucene.internal */ - private void markAndSweep() { + public void markAndSweep() { // if we want to keep at least 1000 entries, then timestamps of // current through current-1000 are guaranteed not to be the oldest (but that does // not mean there are 1000 entries in that group... it's actually anywhere between @@ -248,6 +284,12 @@ private void markAndSweep() { if (!markAndSweepLock.tryLock()) return; try { + if (maxIdleTimeNs != Long.MAX_VALUE) { + long idleCutoff = timeSource.getEpochTimeNs() - maxIdleTimeNs; + if (oldestEntryNs.get() < idleCutoff) { + markAndSweepByIdleTime(); + } + } if (upperWaterMark < size()) { markAndSweepByCacheSize(); } else if (ramUpperWatermark < ramBytesUsed()) { @@ -263,9 +305,35 @@ private void markAndSweep() { } /* - Must be called after acquiring markAndSweeoLock + Must be called after acquiring markAndSweepLock + */ + private void markAndSweepByIdleTime() { + assert markAndSweepLock.isHeldByCurrentThread() : "markAndSweepLock held by another thread"; + Iterator>> iterator = map.entrySet().iterator(); + long idleCutoff = timeSource.getEpochTimeNs() - maxIdleTimeNs; + long currentOldestEntry = Long.MAX_VALUE; + while (iterator.hasNext()) { + Map.Entry> entry = iterator.next(); + if (entry.getValue().createTime < idleCutoff) { + iterator.remove(); + stats.evictionIdleCounter.increment(); + postRemoveEntry(entry.getValue()); + } else { + if (entry.getValue().createTime < currentOldestEntry) { + currentOldestEntry = entry.getValue().createTime; + } + } + } + if (currentOldestEntry != Long.MAX_VALUE) { + oldestEntryNs.set(currentOldestEntry); + } + } + + /* + Must be called after acquiring markAndSweepLock */ private void markAndSweepByRamSize() { + assert markAndSweepLock.isHeldByCurrentThread() : "markAndSweepLock held by another thread"; List> entriesInAccessOrder = new ArrayList<>(map.size()); map.forEach((o, kvCacheEntry) -> { kvCacheEntry.lastAccessedCopy = kvCacheEntry.lastAccessed; // important because we want to avoid volatile read during comparisons @@ -278,22 +346,32 @@ private void markAndSweepByRamSize() { for (int i = entriesInAccessOrder.size() - 1; i >= 0; i--) { CacheEntry kvCacheEntry = entriesInAccessOrder.get(i); evictEntry(kvCacheEntry.key); - if (ramBytes.get() <= ramLowerWatermark) { + if (ramBytes.sum() <= ramLowerWatermark) { break; // we are done! } } } /* - Must be called after acquiring markAndSweeoLock + * Removes items from the cache to bring the size down + * to an acceptable value ('acceptableWaterMark'). + *

+ * It is done in two stages. In the first stage, least recently used items are evicted. + * If, after the first stage, the cache size is still greater than 'acceptableSize' + * config parameter, the second stage takes over. + *

+ *

The second stage is more intensive and tries to bring down the cache size + * to the 'lowerWaterMark' config parameter.

+ * Must be called after acquiring markAndSweepLock */ private void markAndSweepByCacheSize() { + assert markAndSweepLock.isHeldByCurrentThread() : "markAndSweepLock held by another thread"; long oldestEntry = this.oldestEntry; isCleaning = true; this.oldestEntry = oldestEntry; // volatile write to make isCleaning visible long timeCurrent = stats.accessCounter.longValue(); - int sz = stats.size.get(); + int sz = stats.size.intValue(); int numRemoved = 0; int numKept = 0; @@ -507,10 +585,14 @@ public CacheEntry myInsertWithOverflow(CacheEntry element) { private void evictEntry(K key) { CacheEntry o = map.remove(key); + postRemoveEntry(o); + } + + private void postRemoveEntry(CacheEntry o) { if (o == null) return; - ramBytes.addAndGet(-(o.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY)); - stats.size.decrementAndGet(); - stats.evictionCounter.incrementAndGet(); + ramBytes.add(-(o.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY)); + stats.size.decrement(); + stats.evictionCounter.increment(); if(evictionListener != null) evictionListener.evictedEntry(o.key,o.value); } @@ -580,13 +662,13 @@ public Map getLatestAccessedItems(int n) { } public int size() { - return stats.size.get(); + return stats.size.intValue(); } @Override public void clear() { map.clear(); - ramBytes.set(0); + ramBytes.reset(); } public Map> getMap() { @@ -598,14 +680,16 @@ public static class CacheEntry implements Comparable>, Acco final K key; final V value; + final long createTime; final long ramBytesUsed; // cache volatile long lastAccessed = 0; long lastAccessedCopy = 0; - public CacheEntry(K key, V value, long lastAccessed) { + public CacheEntry(K key, V value, long createTime, long lastAccessed) { this.key = key; this.value = value; + this.createTime = createTime; this.lastAccessed = lastAccessed; this.ramBytesUsed = BASE_RAM_BYTES_USED + @@ -670,22 +754,21 @@ public static class Stats implements Accountable { // accounts for field refs RamUsageEstimator.shallowSizeOfInstance(Stats.class) + // LongAdder - 3 * ( + 6 * ( RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + RamUsageEstimator.primitiveSizes.get(long.class) + 2 * (RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.primitiveSizes.get(long.class)) ) + // AtomicLong - 2 * RamUsageEstimator.primitiveSizes.get(long.class) + - // AtomicInteger - RamUsageEstimator.primitiveSizes.get(int.class); + RamUsageEstimator.primitiveSizes.get(long.class); private final AtomicLong accessCounter = new AtomicLong(0); private final LongAdder putCounter = new LongAdder(); private final LongAdder nonLivePutCounter = new LongAdder(); private final LongAdder missCounter = new LongAdder(); - private final AtomicInteger size = new AtomicInteger(); - private AtomicLong evictionCounter = new AtomicLong(); + private final LongAdder size = new LongAdder(); + private LongAdder evictionCounter = new LongAdder(); + private LongAdder evictionIdleCounter = new LongAdder(); public long getCumulativeLookups() { return (accessCounter.longValue() - putCounter.longValue() - nonLivePutCounter.longValue()) + missCounter.longValue(); @@ -700,11 +783,15 @@ public long getCumulativePuts() { } public long getCumulativeEvictions() { - return evictionCounter.get(); + return evictionCounter.longValue(); + } + + public long getCumulativeIdleEvictions() { + return evictionIdleCounter.longValue(); } public int getCurrentSize() { - return size.get(); + return size.intValue(); } public long getCumulativeNonLivePuts() { @@ -720,8 +807,10 @@ public void add(Stats other) { putCounter.add(other.putCounter.longValue()); nonLivePutCounter.add(other.nonLivePutCounter.longValue()); missCounter.add(other.missCounter.longValue()); - evictionCounter.addAndGet(other.evictionCounter.get()); - size.set(Math.max(size.get(), other.size.get())); + evictionCounter.add(other.evictionCounter.longValue()); + long maxSize = Math.max(size.longValue(), other.size.longValue()); + size.reset(); + size.add(maxSize); } @Override @@ -746,15 +835,18 @@ public CleanupThread(ConcurrentLRUCache c) { @Override public void run() { while (true) { + ConcurrentLRUCache c = cache.get(); + if(c == null) break; synchronized (this) { if (stop) break; + long waitTimeMs = c.maxIdleTimeNs != Long.MAX_VALUE ? TimeUnit.MILLISECONDS.convert(c.maxIdleTimeNs, TimeUnit.NANOSECONDS) : 0L; try { - this.wait(); + this.wait(waitTimeMs); } catch (InterruptedException e) {} } if (stop) break; - ConcurrentLRUCache c = cache.get(); - if(c == null) break; + c = cache.get(); + if (c == null) break; c.markAndSweep(); } } @@ -787,7 +879,7 @@ protected void finalize() throws Throwable { @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + ramBytes.get(); + return BASE_RAM_BYTES_USED + ramBytes.sum(); } @Override diff --git a/solr/core/src/java/org/apache/solr/util/CryptoKeys.java b/solr/core/src/java/org/apache/solr/util/CryptoKeys.java index 5518d8705b48..faf67fda306c 100644 --- a/solr/core/src/java/org/apache/solr/util/CryptoKeys.java +++ b/solr/core/src/java/org/apache/solr/util/CryptoKeys.java @@ -21,6 +21,7 @@ import javax.crypto.IllegalBlockSizeException; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; + import java.lang.invoke.MethodHandles; import java.nio.ByteBuffer; import java.nio.charset.Charset; @@ -72,11 +73,11 @@ public String verify(String sig, ByteBuffer data) { boolean verified; try { verified = CryptoKeys.verify(entry.getValue(), Base64.base64ToByteArray(sig), data); - log.debug("verified {} ", verified); + log.info("verified {} ", verified); if (verified) return entry.getKey(); } catch (Exception e) { exception = e; - log.debug("NOT verified "); + log.info("NOT verified "); } } @@ -103,17 +104,22 @@ public static PublicKey getX509PublicKey(byte[] buf) * @param data The data tha is signed */ public static boolean verify(PublicKey publicKey, byte[] sig, ByteBuffer data) throws InvalidKeyException, SignatureException { - data = ByteBuffer.wrap(data.array(), data.arrayOffset(), data.limit()); + int oldPos = data.position(); + Signature signature = null; try { - Signature signature = Signature.getInstance("SHA1withRSA"); + signature = Signature.getInstance("SHA1withRSA"); signature.initVerify(publicKey); signature.update(data); - return signature.verify(sig); + boolean verify = signature.verify(sig); + return verify; + } catch (NoSuchAlgorithmException e) { - //wil not happen - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); + //will not happen + } finally { + //Signature.update resets the position. set it back to old + data.position(oldPos); } - + return false; } private static byte[][] evpBytesTokey(int key_len, int iv_len, MessageDigest md, diff --git a/solr/core/src/java/org/apache/solr/util/JmxUtil.java b/solr/core/src/java/org/apache/solr/util/JmxUtil.java index f27a55e7efca..16dc4e86939b 100644 --- a/solr/core/src/java/org/apache/solr/util/JmxUtil.java +++ b/solr/core/src/java/org/apache/solr/util/JmxUtil.java @@ -23,6 +23,7 @@ import javax.management.remote.JMXServiceURL; import java.io.IOException; +import java.lang.management.ManagementFactory; import java.util.List; /** @@ -31,12 +32,16 @@ public final class JmxUtil { /** - * Retrieve the first MBeanServer found. + * Retrieve the first MBeanServer found and if not found return the platform mbean server * * @return the first MBeanServer found */ public static MBeanServer findFirstMBeanServer() { - return findMBeanServerForAgentId(null); + MBeanServer mBeanServer = findMBeanServerForAgentId(null); + if (mBeanServer == null) { + return ManagementFactory.getPlatformMBeanServer(); + } + return mBeanServer; } /** diff --git a/solr/core/src/java/org/apache/solr/util/RedactionUtils.java b/solr/core/src/java/org/apache/solr/util/RedactionUtils.java index 2661a289c93a..56909f40706a 100644 --- a/solr/core/src/java/org/apache/solr/util/RedactionUtils.java +++ b/solr/core/src/java/org/apache/solr/util/RedactionUtils.java @@ -17,16 +17,20 @@ package org.apache.solr.util; -import java.util.Collection; +import java.util.Comparator; +import java.util.HashMap; import java.util.HashSet; +import java.util.Map; import java.util.Set; -import java.util.TreeSet; +import java.util.TreeMap; import java.util.regex.Pattern; public class RedactionUtils { public static final String SOLR_REDACTION_SYSTEM_PATTERN_PROP = "solr.redaction.system.pattern"; private static Pattern pattern = Pattern.compile(System.getProperty(SOLR_REDACTION_SYSTEM_PATTERN_PROP, ".*password.*"), Pattern.CASE_INSENSITIVE); private static final String REDACT_STRING = "--REDACTED--"; + public static final String NODE_REDACTION_PREFIX = "N_"; + public static final String COLL_REDACTION_PREFIX = "COLL_"; private static boolean redactSystemProperty = Boolean.parseBoolean(System.getProperty("solr.redaction.system.enabled", "true")); @@ -52,27 +56,72 @@ public static void setRedactSystemProperty(boolean redactSystemProperty) { } /** - * Replace actual names found in a string with meaningless randomized names. - * @param names actual names - * @param redactionPrefix prefix to use for redacted names - * @param data string to redact - * @return redacted string where all actual names have been replaced. + * A helper class to build unique mappings from original to redacted names. */ - public static String redactNames(Collection names, String redactionPrefix, String data) { - Set uniqueNames = new TreeSet<>(names); - Set uniqueCode = new HashSet<>(); - // minimal(ish) hash - int codeShift = 0; - int codeSpace = names.size(); - for (String name : uniqueNames) { + public static final class RedactionContext { + private Map redactions = new HashMap<>(); + Map> uniqueCodes = new HashMap<>(); + // minimal(ish) hash per prefix + Map codeSpaces = new HashMap<>(); + + /** + * Add a name to be redacted. + * @param name original name + * @param redactionPrefix prefix for the redacted name + */ + public void addName(String name, String redactionPrefix) { + if (redactions.containsKey(name)) { + return; + } + int codeSpace = codeSpaces.computeIfAbsent(redactionPrefix, p -> 4); int code = Math.abs(name.hashCode() % codeSpace); + Set uniqueCode = uniqueCodes.computeIfAbsent(redactionPrefix, p -> new HashSet<>()); while (uniqueCode.contains(code)) { - codeShift++; - codeSpace = names.size() << codeShift; + codeSpace = codeSpace << 1; + codeSpaces.put(redactionPrefix, codeSpace); code = Math.abs(name.hashCode() % codeSpace); } uniqueCode.add(code); - data = data.replaceAll("\\Q" + name + "\\E", redactionPrefix + Integer.toString(code, Character.MAX_RADIX)); + redactions.put(name, redactionPrefix + Integer.toString(code, Character.MAX_RADIX)); + } + + /** + * Add a name that needs to be mapped to the same redacted format as another one. + * @param original original name already mapped (will be added automatically if missing) + * @param equivalent another name that needs to be mapped to the same redacted name + * @param redactionPrefix prefix for the redacted name + */ + public void addEquivalentName(String original, String equivalent, String redactionPrefix) { + if (!redactions.containsKey(original)) { + addName(original, redactionPrefix); + } + String redaction = redactions.get(original); + redactions.put(equivalent, redaction); + } + + /** + * Get a map of original to redacted names. + */ + public Map getRedactions() { + return redactions; + } + } + + /** + * Replace actual names found in a string with redacted names. + * @param redactions a map of original to redacted names + * @param data string to redact + * @return redacted string where all actual names have been replaced. + */ + public static String redactNames(Map redactions, String data) { + // replace the longest first to avoid partial replacements + Map sorted = new TreeMap<>(Comparator + .comparing(String::length) + .reversed() + .thenComparing(String::compareTo)); + sorted.putAll(redactions); + for (Map.Entry entry : sorted.entrySet()) { + data = data.replaceAll("\\Q" + entry.getKey() + "\\E", entry.getValue()); } return data; } diff --git a/solr/core/src/java/org/apache/solr/util/ReflectMapWriter.java b/solr/core/src/java/org/apache/solr/util/ReflectMapWriter.java new file mode 100644 index 000000000000..955574049ce8 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/util/ReflectMapWriter.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.util; + +import java.io.IOException; +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.solr.common.MapWriter; + +public interface ReflectMapWriter extends MapWriter { + + @Override + default void writeMap(EntryWriter ew) throws IOException { + for (Field field : this.getClass().getDeclaredFields()) { + JsonProperty prop = field.getAnnotation(JsonProperty.class); + if (prop == null) continue; + int modifiers = field.getModifiers(); + if (Modifier.isPublic(modifiers) && !Modifier.isStatic(modifiers)) { + String fname = prop.value().isEmpty() ? field.getName() : prop.value(); + try { + if (field.getType() == int.class) { + ew.put(fname, field.getInt(this)); + } else if (field.getType() == float.class) { + ew.put(fname, field.getFloat(this)); + } else if (field.getType() == double.class) { + ew.put(fname, field.getDouble(this)); + } else if (field.getType() == boolean.class) { + ew.put(fname, field.getBoolean(this)); + } else if (field.getType() == long.class) { + ew.put(fname, field.getLong(this)); + } else { + ew.putIfNotNull(fname, field.get(this)); + } + } catch (IllegalAccessException e) { + //it should not happen + } + } + } + } + +} diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java index f17d4b85d137..a958316e4a1f 100755 --- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java +++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java @@ -26,7 +26,6 @@ import java.io.PrintStream; import java.lang.invoke.MethodHandles; import java.net.ConnectException; -import java.net.MalformedURLException; import java.net.Socket; import java.net.SocketException; import java.net.URL; @@ -860,8 +859,6 @@ public static Object atPath(String jsonPath, Map json) { public static class AutoscalingTool extends ToolBase { - static final String NODE_REDACTION_PREFIX = "N_"; - static final String COLL_REDACTION_PREFIX = "COLL_"; public AutoscalingTool() { this(System.out); @@ -984,9 +981,10 @@ protected void runImpl(CommandLine cli) throws Exception { } } } + boolean redact = cli.hasOption("r"); if (cli.hasOption("save")) { File targetDir = new File(cli.getOptionValue("save")); - cloudManager.saveSnapshot(targetDir, true); + cloudManager.saveSnapshot(targetDir, true, redact); System.err.println("- saved autoscaling snapshot to " + targetDir.getAbsolutePath()); } HashSet liveNodes = new HashSet<>(); @@ -996,7 +994,6 @@ protected void runImpl(CommandLine cli) throws Exception { boolean withSortedNodes = cli.hasOption("n"); boolean withClusterState = cli.hasOption("c"); boolean withStats = cli.hasOption("stats"); - boolean redact = cli.hasOption("r"); if (cli.hasOption("all")) { withSuggestions = true; withDiagnostics = true; @@ -1005,25 +1002,11 @@ protected void runImpl(CommandLine cli) throws Exception { withStats = true; } // prepare to redact also host names / IPs in base_url and other properties - Set redactNames = new HashSet<>(); - for (String nodeName : liveNodes) { - String urlString = Utils.getBaseUrlForNodeName(nodeName, "http"); - try { - URL u = new URL(urlString); - // protocol format - redactNames.add(u.getHost() + ":" + u.getPort()); - // node name format - redactNames.add(u.getHost() + "_" + u.getPort() + "_"); - } catch (MalformedURLException e) { - log.warn("Invalid URL for node name " + nodeName + ", replacing including protocol and path", e); - redactNames.add(urlString); - redactNames.add(Utils.getBaseUrlForNodeName(nodeName, "https")); - } - } - // redact collection names too - Set redactCollections = new HashSet<>(); ClusterState clusterState = cloudManager.getClusterStateProvider().getClusterState(); - clusterState.forEachCollection(coll -> redactCollections.add(coll.getName())); + RedactionUtils.RedactionContext ctx = null; + if (redact) { + ctx = SimUtils.getRedactionContext(clusterState); + } if (!withSuggestions && !withDiagnostics) { withSuggestions = true; } @@ -1041,13 +1024,12 @@ protected void runImpl(CommandLine cli) throws Exception { } Map simulationResults = new HashMap<>(); simulate(cloudManager, config, simulationResults, saveSimulated, withClusterState, - withStats, withSuggestions, withSortedNodes, withDiagnostics, iterations); + withStats, withSuggestions, withSortedNodes, withDiagnostics, iterations, redact); results.put("simulation", simulationResults); } String data = Utils.toJSONString(results); if (redact) { - data = RedactionUtils.redactNames(redactCollections, COLL_REDACTION_PREFIX, data); - data = RedactionUtils.redactNames(redactNames, NODE_REDACTION_PREFIX, data); + data = RedactionUtils.redactNames(ctx.getRedactions(), data); } stdout.println(data); } @@ -1112,7 +1094,7 @@ private void simulate(SolrCloudManager cloudManager, boolean withStats, boolean withSuggestions, boolean withSortedNodes, - boolean withDiagnostics, int iterations) throws Exception { + boolean withDiagnostics, int iterations, boolean redact) throws Exception { File saveDir = null; if (saveSimulated != null) { saveDir = new File(saveSimulated); @@ -1143,10 +1125,10 @@ private void simulate(SolrCloudManager cloudManager, SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(simCloudManager, config); if (saveDir != null) { File target = new File(saveDir, "step" + loop + "_start"); - snapshotCloudManager.saveSnapshot(target, true); + snapshotCloudManager.saveSnapshot(target, true, redact); } if (verbose) { - Map snapshot = snapshotCloudManager.getSnapshot(false); + Map snapshot = snapshotCloudManager.getSnapshot(false, redact); snapshot.remove(SnapshotCloudManager.DISTRIB_STATE_KEY); snapshot.remove(SnapshotCloudManager.MANAGER_STATE_KEY); perStep.put("snapshotStart", snapshot); @@ -1212,10 +1194,10 @@ private void simulate(SolrCloudManager cloudManager, snapshotCloudManager = new SnapshotCloudManager(simCloudManager, config); if (saveDir != null) { File target = new File(saveDir, "step" + loop + "_stop"); - snapshotCloudManager.saveSnapshot(target, true); + snapshotCloudManager.saveSnapshot(target, true, redact); } if (verbose) { - Map snapshot = snapshotCloudManager.getSnapshot(false); + Map snapshot = snapshotCloudManager.getSnapshot(false, redact); snapshot.remove(SnapshotCloudManager.DISTRIB_STATE_KEY); snapshot.remove(SnapshotCloudManager.MANAGER_STATE_KEY); perStep.put("snapshotStop", snapshot); @@ -3482,8 +3464,9 @@ protected Map startSolr(File solrHomeDir, Map startEnv = new HashMap<>(); Map procEnv = EnvironmentUtils.getProcEnvironment(); if (procEnv != null) { - for (String envVar : procEnv.keySet()) { - String envVarVal = procEnv.get(envVar); + for (Map.Entry entry : procEnv.entrySet()) { + String envVar = entry.getKey(); + String envVarVal = entry.getValue(); if (envVarVal != null && !"EXAMPLE".equals(envVar) && !envVar.startsWith("SOLR_")) { startEnv.put(envVar, envVarVal); } diff --git a/solr/core/src/java/org/apache/solr/util/TestInjection.java b/solr/core/src/java/org/apache/solr/util/TestInjection.java index 12de7feb2b0d..ef140d06f111 100644 --- a/solr/core/src/java/org/apache/solr/util/TestInjection.java +++ b/solr/core/src/java/org/apache/solr/util/TestInjection.java @@ -141,6 +141,10 @@ static Random random() { // non-private for testing public volatile static Integer delayBeforeSlaveCommitRefresh=null; + public volatile static Integer delayInExecutePlanAction=null; + + public volatile static boolean failInExecutePlanAction = false; + public volatile static boolean uifOutOfMemoryError = false; public volatile static Map additionalSystemProps = null; @@ -171,6 +175,8 @@ public static void reset() { failIndexFingerprintRequests = null; wrongIndexFingerprint = null; delayBeforeSlaveCommitRefresh = null; + delayInExecutePlanAction = null; + failInExecutePlanAction = false; uifOutOfMemoryError = false; notifyPauseForeverDone(); newSearcherHooks.clear(); diff --git a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpListenerFactory.java b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpListenerFactory.java index d452502da58b..c3bc3e569c44 100644 --- a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpListenerFactory.java +++ b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpListenerFactory.java @@ -21,11 +21,10 @@ import java.util.Locale; import java.util.Map; -import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import org.apache.solr.client.solrj.impl.HttpListenerFactory; -import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; import org.eclipse.jetty.client.api.Request; import org.eclipse.jetty.client.api.Result; @@ -64,9 +63,7 @@ public interface NameStrategy { KNOWN_METRIC_NAME_STRATEGIES.put("methodOnly", METHOD_ONLY); } - protected MetricRegistry metricsRegistry; - protected SolrMetricManager metricManager; - protected String registryName; + protected SolrMetricsContext solrMetricsContext; protected String scope; protected NameStrategy nameStrategy; @@ -85,7 +82,7 @@ public RequestResponseListener get() { @Override public void onBegin(Request request) { - if (metricsRegistry != null) { + if (solrMetricsContext != null) { timerContext = timer(request).time(); } } @@ -100,14 +97,12 @@ public void onComplete(Result result) { } private Timer timer(Request request) { - return metricsRegistry.timer(nameStrategy.getNameFor(scope, request)); + return solrMetricsContext.timer(null, nameStrategy.getNameFor(scope, request)); } @Override - public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) { - this.metricManager = manager; - this.registryName = registry; - this.metricsRegistry = manager.registry(registry); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + this.solrMetricsContext = parentContext; this.scope = scope; } } diff --git a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java index 398ab8bf2bbd..c7397ba3f8c9 100644 --- a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java +++ b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java @@ -22,6 +22,7 @@ import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricProducer; +import org.apache.solr.metrics.SolrMetricsContext; /** * Sub-class of PoolingHttpClientConnectionManager which tracks metrics interesting to Solr. @@ -29,25 +30,28 @@ */ public class InstrumentedPoolingHttpClientConnectionManager extends PoolingHttpClientConnectionManager implements SolrMetricProducer { - private SolrMetricManager metricManager; - private String registryName; + private SolrMetricsContext solrMetricsContext; public InstrumentedPoolingHttpClientConnectionManager(Registry socketFactoryRegistry) { super(socketFactoryRegistry); } @Override - public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) { - this.metricManager = manager; - this.registryName = registry; - manager.registerGauge(null, registry, () -> getTotalStats().getAvailable(), - tag, true, SolrMetricManager.mkName("availableConnections", scope)); + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; + } + + @Override + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + this.solrMetricsContext = parentContext.getChildContext(this); + parentContext.gauge(null, () -> getTotalStats().getAvailable(), + true, SolrMetricManager.mkName("availableConnections", scope)); // this acquires a lock on the connection pool; remove if contention sucks - manager.registerGauge(null, registry, () -> getTotalStats().getLeased(), - tag, true, SolrMetricManager.mkName("leasedConnections", scope)); - manager.registerGauge(null, registry, () -> getTotalStats().getMax(), - tag, true, SolrMetricManager.mkName("maxConnections", scope)); - manager.registerGauge(null, registry, () -> getTotalStats().getPending(), - tag, true, SolrMetricManager.mkName("pendingConnections", scope)); + parentContext.gauge(null, () -> getTotalStats().getLeased(), + true, SolrMetricManager.mkName("leasedConnections", scope)); + parentContext.gauge(null, () -> getTotalStats().getMax(), + true, SolrMetricManager.mkName("maxConnections", scope)); + parentContext.gauge(null, () -> getTotalStats().getPending(), + true, SolrMetricManager.mkName("pendingConnections", scope)); } } diff --git a/solr/core/src/test-files/cryptokeys/priv_key2048.pem b/solr/core/src/test-files/cryptokeys/priv_key2048.pem deleted file mode 100644 index 4d2c8c238039..000000000000 --- a/solr/core/src/test-files/cryptokeys/priv_key2048.pem +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpQIBAAKCAQEA1fSq/8iz1sIppHhSKrC0g2uzfFKZzWZAbcvVQbyS/pwxC7VB -hR93DVINyGGT3XHnpemt/h0wrifCIEMyqSLTIhiu5bRJpfE7UO9vGgTcP5+i2wTe -cKHqrxDvbQ4D7co96Gvu2cShySbOHsFjZXL4eaqU2W2x8S7U+OjRBwtwMxB4vstX -5u75WtwVXwNRj+uXIfTTisplE/nA/slqByW4Q9QAg+du+Ejh4W7nF+Z9GRMR7MZe -c1TeGOYZd8YzYku7WyUZ1SRQ6JjaZrdphlLtysMgqP0MMajEoFs/ajeNHef0iCz0 -TnB05PQd+GPO5+JrLPZ399mucl/jM+cbixn9pwIDAQABAoIBAQCpfA51XryvU9F+ -+t1D+gSU0p00z44MeoJxN3WwhDwBOyNS/ftwA/Pf9m76m+lxEwkIkesFkIP+gXoy -6mhYOUD9EoaBaeapcEWnoHPnLZB3SfLNArdei7UHhyfSwLZ2CK4vzkg/85ahbH79 -N/6P35pbbrhI4K+DubB1mJ/0r6fqmh6/04L47/liAPvsSM9ZJIMwbuZbYY21ggI9 -ZGk+kO0C/CyzxplaVLJ8P86KnRloEfjSmMhP72z7bja/BE2NX42G12YbjY7tVMn7 -duTWU2F4JWYriWAHr+4GwODDdtvn/R5jPirDIJeHCd6Bg1t7KibHRTcgYgtwDBqG -F65g4zqRAoGBAP2fry+6uXe3rAJDJrCSKPQVTv5QhOvG1466xsOaWRSe/rx1Mvnd -Z4pe+T8bdvsvqFnNMAkZKzzPjJ+oCSVKHhcerzMm2Cw6Gpv2yywA/1VykIoZmdNM -/vHjC7w35q7xwEUHxB/rt2vvijrAYnhaq86uIXzoiqTGaKJ/z34QsCppAoGBANf1 -1wsISnZPjIipMIYtC7Co3GCUhsQ+ksVBhtsOHaKfon3Q69Qbz93l7dbCwgFbL6td -HW/ppnABZLVFHnoLJ5YrriVZ1Wizx90+RFGdNj74UTV8bfqr/C32UKTjqoYjPAZO -vEOzHkmpc9I1mrxm1Mcff5EHDFmXGXoZ2GLCpEWPAoGAOXroVFPoVtacuEKJ0Ti+ -6Vqu9XpANcNx9RollA02JTNHnmSdcf2YysZtjLznwVPyvq9/NICsyPJs93443Geo -3CqLIHesRJHCmBhdwZJUTART98iHkVkA6sc/UKAGux11Ku/wph9hCahXVqtlZct+ -5q+WTV3SljeVXUbEOtkDZAkCgYEArnd0R/xls5jmbs1IX01q4Ug56Wh0S3xFtEgQ -u013EZcnfb9Xld72Gk0TzOlANDpHk4hBLNU02c22X188lNoIHCCjqpcdel2rPIh+ -RvTcCxku+ifQ7a8dpsAUPHGUpJM4fdwD6il9cYMNB6i4njXw9gDzXOW1y3bvZR4W -GwsmDO8CgYEA5vG0TdwkvdDcsJYimm3WQJ/VnYidE6JfjnAxnPwFFPjQoDRIS32f -TMMJFTHSSH4xgQLEhEfaAbrkptpPORM9QAjjRx2RXoa5yu2GMpDWua4MxpHdqiSY -v/rOw+6fZbe8YC9bZ8AE+GPuHdJDQFoSU7ieCGiF/iwWB2jhwCm7OyY= ------END RSA PRIVATE KEY----- diff --git a/solr/core/src/test-files/cryptokeys/priv_key512.pem b/solr/core/src/test-files/cryptokeys/priv_key512.pem deleted file mode 100644 index 53c032c2a06c..000000000000 --- a/solr/core/src/test-files/cryptokeys/priv_key512.pem +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIBOQIBAAJBAMgmSVfl+E2Nj/oKAP2TceWh17pk6Ugsw5A5nLH+OeMB/WeWJZg/ -NEDda8SXfQDEVRmw5P+2IZypPASzfCrc6yECAwEAAQJAbZFwEztky+fUSrhRRIAE -GQaZV4PIpWdEA99WJaabv+YsWN5UUd7y+Evu50mhH3RQIxQd+R6SYs1ke9OlHlV2 -cQIhAP8367gybVEu2A+Cg1fE9vbHfnHrurpDQrh9r0ZKooTtAiEAyMMxvlHlSh6Q -2cUTSxuyUEaQfN+W4efehgfIWBVlzIUCIEHBMZ0qeNnCvO36DUbuu0ZHjb9iIaDd -tXH9B8yPbCHdAiAaV3o0ZZx3MDGDUVdpuHWaENguekva0kihP24rGIul3QIgNqZS -EzA2aoQdNPl5oDfkhqAGjs5pb7qLgtmXJvVhi/Q= ------END RSA PRIVATE KEY----- diff --git a/solr/core/src/test-files/cryptokeys/pub_key2048.der b/solr/core/src/test-files/cryptokeys/pub_key2048.der deleted file mode 100644 index 0e0e36b9a2de..000000000000 Binary files a/solr/core/src/test-files/cryptokeys/pub_key2048.der and /dev/null differ diff --git a/solr/core/src/test-files/cryptokeys/pub_key512.der b/solr/core/src/test-files/cryptokeys/pub_key512.der deleted file mode 100644 index 4c926dd82f14..000000000000 Binary files a/solr/core/src/test-files/cryptokeys/pub_key512.der and /dev/null differ diff --git a/solr/core/src/test-files/runtimecode/cache.jar.bin b/solr/core/src/test-files/runtimecode/cache.jar.bin deleted file mode 100644 index 0729896ee88d..000000000000 Binary files a/solr/core/src/test-files/runtimecode/cache.jar.bin and /dev/null differ diff --git a/solr/core/src/test-files/runtimecode/cache_v2.jar.bin b/solr/core/src/test-files/runtimecode/cache_v2.jar.bin deleted file mode 100644 index 61059937fb6c..000000000000 Binary files a/solr/core/src/test-files/runtimecode/cache_v2.jar.bin and /dev/null differ diff --git a/solr/core/src/test-files/runtimecode/runtimelibs_v3.jar.bin b/solr/core/src/test-files/runtimecode/runtimelibs_v3.jar.bin deleted file mode 100644 index 6b5bad661bf5..000000000000 Binary files a/solr/core/src/test-files/runtimecode/runtimelibs_v3.jar.bin and /dev/null differ diff --git a/solr/core/src/test-files/runtimecode/sig.txt b/solr/core/src/test-files/runtimecode/sig.txt deleted file mode 100644 index 29dbb47edb41..000000000000 --- a/solr/core/src/test-files/runtimecode/sig.txt +++ /dev/null @@ -1,97 +0,0 @@ -================priv_key2048.pem=================== - -openssl dgst -sha1 -sign ../cryptokeys/priv_key2048.pem runtimelibs.jar.bin | openssl enc -base64 - -NaTm3+i99/ZhS8YRsLc3NLz2Y6VuwEbu7DihY8GAWwWIGm+jpXgn1JiuaenfxFCc -fNKCC9WgZmEgbTZTzmV/OZMVn90u642YJbF3vTnzelW1pHB43ZRAJ1iesH0anM37 -w03n3es+vFWQtuxc+2Go888fJoMkUX2C6Zk6Jn116KE45DWjeyPM4mp3vvGzwGvd -RxP5K9Q3suA+iuI/ULXM7m9mV4ruvs/MZvL+ELm5Jnmk1bBtixVJhQwJP2z++8tQ -KJghhyBxPIC/2fkAHobQpkhZrXu56JjP+v33ul3Ku4bbvfVMY/LVwCAEnxlvhk+C -6uRCKCeFMrzQ/k5inasXLw== - - -openssl dgst -sha1 -sign ../cryptokeys/priv_key2048.pem runtimelibs_v2.jar.bin | openssl enc -base64 - -jsPpNMs74ogRbx9M4n/OH3j3s85KOq9dOtgGJkUf6O5D8T9d9zU2lDwxnTYjQCaW -cRTLGH3Z8vpc0wyT3g4aXepgLUTSnrepbPffSFhQtFrCNxurPOLzbp6ERhwjZ0RL -GvZrlbbjR2SxqZ3BpHiGxslj0tPCkdevNCEy1glLhl8RWG5xsLCrRL1mrEtLg97A -53oCCrfGAHLEvW+olGeB1r7jqUaSrbfAUfDMSIvZfOIV+xdlvabkNiuzvsAc+B6Q -pXWm+Em2f5TO/bkOh2m/UInGXcNHCa0oqRMGKP1H252Cv9eXm/d0h3Dqxv+f80Gz -LfyA6/OKQ9FfskY4pltCsQ== - -openssl dgst -sha1 -sign ../cryptokeys/priv_key2048.pem runtimelibs_v3.jar.bin | openssl enc -base64 - - -YxFr6SpYrDwG85miDfRWHTjU9UltjtIWQZEhcV55C2rczRUVowCYBxmsDv5mAM8j -0CTv854xpI1DtBT86wpoTdbF95LQuP9FJId4TS1j8bZ9cxHP5Cqyz1uBHFfUUNUr -npzTHQkVTp02O9NAjh3c2W41bL4U7j6jQ32+4CW2M+x00TDG0y0H75rQDR8zbLt3 -1oWCz+sBOdZ3rGKJgAvdoGm/wVCTmsabZN+xoz4JaDeBXF16O9Uk9SSq4G0dz5YX -FuLxHK7ciB5t0+q6pXlF/tdlDqF76Abze0R3d2/0MhXBzyNp3UxJmj6DiprgysfB -0TbQtJG0XGfdSmx0VChvcA== - -YxFr6SpYrDwG85miDfRWHTjU9UltjtIWQZEhcV55C2rczRUVowCYBxmsDv5mAM8j0CTv854xpI1DtBT86wpoTdbF95LQuP9FJId4TS1j8bZ9cxHP5Cqyz1uBHFfUUNUrnpzTHQkVTp02O9NAjh3c2W41bL4U7j6jQ32+4CW2M+x00TDG0y0H75rQDR8zbLt31oWCz+sBOdZ3rGKJgAvdoGm/wVCTmsabZN+xoz4JaDeBXF16O9Uk9SSq4G0dz5YXFuLxHK7ciB5t0+q6pXlF/tdlDqF76Abze0R3d2/0MhXBzyNp3UxJmj6DiprgysfB0TbQtJG0XGfdSmx0VChvcA== - -=====================priv_key512.pem===================== -openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem runtimelibs.jar.bin | openssl enc -base64 - -L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1 -f/U3bOlMPINlSOM6LK3JpQ== -L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ== - -openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem runtimelibs_v2.jar.bin | openssl enc -base64 - -j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJu -XbHkVLj638yZ0Lp1ssnoYA== - -openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem runtimelibs_v3.jar.bin | openssl enc -base64 - -a400n4T7FT+2gM0SC6+MfSOExjud8MkhTSFylhvwNjtWwUgKdPFn434Wv7Qc4QEq -DVLhQoL3WqYtQmLPti0G4Q== - - -====================sha512==================== - -openssl dgst -sha512 runtimelibs.jar.bin - -d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420 - - -openssl dgst -sha512 runtimelibs_v2.jar.bin - -bc5ce45ad281b6a08fb7e529b1eb475040076834816570902acb6ebdd809410e31006efdeaa7f78a6c35574f3504963f5f7e4d92247d0eb4db3fc9abdda5d417 - -openssl dgst -sha512 runtimelibs_v3.jar.bin - -60ec88c2a2e9b409f7afc309273383810a0d07a078b482434eda9674f7e25b8adafa8a67c9913c996cbfb78a7f6ad2b9db26dbd4fe0ca4068f248d5db563f922 - -openssl dgst -sha512 cache.jar.bin - -8946650ba88919cea2f81e4771c418411f61837b2a276088c2f2c86ef2d730f152ccf5975fa8a2c7035a1f00de1994a7788676d95dc7ccea6aaf28c7fff1f46b - -openssl dgst -sha512 cache_v2.jar.bin - -873337e67b90b7ff99df012b2e9093c63079c37a564643d34861a88c4cbaf0698ebb096905929d69cdbde3b4d29d55e31db24ee05c01b39c0b75a16e54eb4335 - -=============sha256============================ - -openssl dgst -sha256 runtimelibs.jar.bin - -e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc - -openssl dgst -sha512 runtimelibs_v2.jar.bin - -79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4 - -openssl dgst -sha256 runtimelibs_v3.jar.bin - -20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3 - -openssl dgst -sha256 cache.jar.bin - -32e8b5b2a95ea306538b52017f0954aa1b0f8a8b2d0acbc498fd0e66a223f7bd - -openssl dgst -sha256 cache_v2.jar.bin - -0f670f6dcc2b00f9a448a7ebd457d4ff985ab702c85cdb3608dcae9889e8d702 - - diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-tiny.xml b/solr/core/src/test-files/solr/collection1/conf/schema-tiny.xml index a0d52385aee8..555ee3571b70 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema-tiny.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema-tiny.xml @@ -32,4 +32,6 @@ + + diff --git a/solr/core/src/test-files/solr/collection1/conf/schema.xml b/solr/core/src/test-files/solr/collection1/conf/schema.xml index e0a96cca05ce..9f46cec836e4 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema.xml @@ -498,6 +498,14 @@ + + + + + + + + @@ -632,6 +640,28 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/solrconfig.xml b/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/solrconfig.xml index 059e58f447c2..0cdb6acb33e3 100644 --- a/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/solrconfig.xml +++ b/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/solrconfig.xml @@ -29,6 +29,8 @@ ${tests.luceneMatchVersion:LATEST} + + ${solr.commitwithin.softcommit:true} diff --git a/solr/core/src/test-files/solr/security/jwt_plugin_jwk_security.json b/solr/core/src/test-files/solr/security/jwt_plugin_jwk_security.json index 7daab7ac9cbd..772089e38199 100644 --- a/solr/core/src/test-files/solr/security/jwt_plugin_jwk_security.json +++ b/solr/core/src/test-files/solr/security/jwt_plugin_jwk_security.json @@ -9,6 +9,10 @@ "kid": "test", "alg": "RS256", "n": "jeyrvOaZrmKWjyNXt0myAc_pJ1hNt3aRupExJEx1ewPaL9J9HFgSCjMrYxCB1ETO1NDyZ3nSgjZis-jHHDqBxBjRdq_t1E2rkGFaYbxAyKt220Pwgme_SFTB9MXVrFQGkKyjmQeVmOmV6zM3KK8uMdKQJ4aoKmwBcF5Zg7EZdDcKOFgpgva1Jq-FlEsaJ2xrYDYo3KnGcOHIt9_0NQeLsqZbeWYLxYni7uROFncXYV5FhSJCeR4A_rrbwlaCydGxE0ToC_9HNYibUHlkJjqyUhAgORCbNS8JLCJH8NUi5sDdIawK9GTSyvsJXZ-QHqo4cMUuxWV5AJtaRGghuMUfqQ" - } + }, + "realm": "my-solr-jwt", + "adminUiScope": "solr:admin", + "authorizationEndpoint": "http://acmepaymentscorp/oauth/auz/authorize", + "clientId": "solr-cluster" } } \ No newline at end of file diff --git a/solr/core/src/test-files/solr/security/jwt_plugin_jwk_url_security.json b/solr/core/src/test-files/solr/security/jwt_plugin_jwk_url_security.json index 74b86ef03c14..24f587683ffb 100644 --- a/solr/core/src/test-files/solr/security/jwt_plugin_jwk_url_security.json +++ b/solr/core/src/test-files/solr/security/jwt_plugin_jwk_url_security.json @@ -1,6 +1,6 @@ { "authentication" : { "class": "solr.JWTAuthPlugin", - "jwkUrl": "https://127.0.0.1:8999/this-will-fail.wks" + "jwksUrl": "https://127.0.0.1:8999/this-will-fail.wks" } } \ No newline at end of file diff --git a/solr/core/src/test-files/solr/simSnapshot/autoscalingState.json b/solr/core/src/test-files/solr/simSnapshot/autoscalingState.json new file mode 100644 index 000000000000..9ce3f6f4a651 --- /dev/null +++ b/solr/core/src/test-files/solr/simSnapshot/autoscalingState.json @@ -0,0 +1,3923 @@ +{ + "suggestions":[{ + "suggestion":{ + "type":"improvement", + "operation":{ + "method":"POST", + "path":"/c/COLL_q", + "command":{"move-replica":{ + "targetNode":"N_b9_solr", + "inPlaceMove":"true", + "replica":"core_node3"}}}}, + "replica":{"core_node3":{ + "core":"COLL_q_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_q", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.9242789E8, + "INDEX.sizeInGB":0.45860921032726765}}}], + "diagnostics":{ + "sortedNodes":[ + { + "node":"N_7e_solr", + "isLive":true, + "cores":13.0, + "freedisk":873.6022491455078, + "sysprop.pool":"pool-03", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875, + "replicas":{ + "COLL_22":{"shard1":[{"core_node6":{ + "core":"COLL_22_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_22", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.4348956483E10, + "INDEX.sizeInGB":22.676732840947807}}]}, + "COLL_q":{"shard1":[{"core_node3":{ + "core":"COLL_q_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_q", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.9242789E8, + "INDEX.sizeInGB":0.45860921032726765}}]}, + "COLL_1b":{"shard1":[{"core_node3":{ + "core":"COLL_1b_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_1b", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1t":{"shard1":[{"core_node3":{ + "core":"COLL_1t_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_1t", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":8.5774407615E10, + "INDEX.sizeInGB":79.8836421361193}}]}, + "COLL_x":{"shard1":[{"core_node3":{ + "core":"COLL_x_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_x", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.18270873E8, + "INDEX.sizeInGB":0.296412848867476}}]}, + "COLL_2k":{"shard1":[{"core_node3":{ + "core":"COLL_2k_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_2k", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1r":{"shard1":[{"core_node3":{ + "core":"COLL_1r_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_1r", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.12015174E8, + "INDEX.sizeInGB":0.38371903263032436}}]}, + "COLL_8":{"shard1":[{"core_node3":{ + "core":"COLL_8_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_8", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":356048.0, + "INDEX.sizeInGB":3.315955400466919E-4}}]}, + "COLL_5":{"shard1":[{"core_node2":{ + "core":"COLL_5_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_5", + "node_name":"N_7e_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":5.854396964E9, + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":5.452332053333521}}]}, + "COLL_l":{"shard1":[{"core_node3":{ + "core":"COLL_l_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_l", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1x":{"shard1":[{"core_node3":{ + "core":"COLL_1x_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_1x", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4248411.0, + "INDEX.sizeInGB":0.00395664107054472}}]}, + "COLL_4":{"shard1":[{"core_node3":{ + "core":"COLL_4_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_4", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.58881858E8, + "INDEX.sizeInGB":0.2411025185137987}}]}, + "COLL_6":{"shard1":[{"core_node3":{ + "core":"COLL_6_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_6", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.6446420654E10, + "INDEX.sizeInGB":15.316922826692462}}]}}}, + { + "node":"N_0_solr", + "isLive":true, + "cores":12.0, + "freedisk":719.6562576293945, + "sysprop.pool":"pool-03", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875, + "replicas":{ + "COLL_22":{"shard1":[{"core_node10":{ + "core":"COLL_22_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_22", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":2.4351639993E10, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":22.679232054390013}}]}, + "COLL_q":{"shard1":[{"core_node10":{ + "core":"COLL_q_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_q", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.9242789E8, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":0.45860921032726765}}]}, + "COLL_1b":{"shard1":[{"core_node10":{ + "core":"COLL_1b_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_1b", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":135.0, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1t":{"shard1":[{"core_node5":{ + "core":"COLL_1t_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_1t", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":8.7485800719E10, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":81.47750116791576}}]}, + "COLL_x":{"shard1":[{"core_node10":{ + "core":"COLL_x_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_x", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":3.0928583E8, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":0.2880448754876852}}]}, + "COLL_2k":{"shard1":[{"core_node10":{ + "core":"COLL_2k_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_2k", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":135.0, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1r":{"shard1":[{"core_node5":{ + "core":"COLL_1r_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_1r", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.25884524E8, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":0.39663587138056755}}]}, + "COLL_8":{"shard1":[{"core_node5":{ + "core":"COLL_8_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_8", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":399225.0, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":3.718072548508644E-4}}]}, + "COLL_l":{"shard1":[{"core_node10":{ + "core":"COLL_l_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_l", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":135.0, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1x":{"shard1":[{"core_node10":{ + "core":"COLL_1x_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_1x", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4264901.0, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":0.003971998579800129}}]}, + "COLL_4":{"shard1":[{"core_node5":{ + "core":"COLL_4_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_4", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":2.58797271E8, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":0.24102374073117971}}]}, + "COLL_6":{"shard1":[{"core_node6":{ + "core":"COLL_6_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_6", + "node_name":"N_0_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.4921656871E10, + "base_url":"http://N_0/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":41.83655313309282}}]}}}, + { + "node":"N_4_solr", + "isLive":true, + "cores":12.0, + "freedisk":875.4758682250977, + "sysprop.pool":"pool-03", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875, + "replicas":{ + "COLL_22":{"shard1":[{"core_node5":{ + "core":"COLL_22_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_22", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.436290627E10, + "INDEX.sizeInGB":22.689724592491984}}]}, + "COLL_q":{"shard1":[{"core_node6":{ + "core":"COLL_q_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_q", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.9242789E8, + "INDEX.sizeInGB":0.45860921032726765}}]}, + "COLL_1b":{"shard1":[{"core_node6":{ + "core":"COLL_1b_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_1b", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1t":{"shard1":[{"core_node6":{ + "core":"COLL_1t_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_1t", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":8.5380419785E10, + "INDEX.sizeInGB":79.51671237591654}}]}, + "COLL_x":{"shard1":[{"core_node6":{ + "core":"COLL_x_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_x", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.03301808E8, + "INDEX.sizeInGB":0.28247182071208954}}]}, + "COLL_2k":{"shard1":[{"core_node6":{ + "core":"COLL_2k_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_2k", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1r":{"shard1":[{"core_node6":{ + "core":"COLL_1r_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_1r", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.46826689E8, + "INDEX.sizeInGB":0.4161397824063897}}]}, + "COLL_8":{"shard1":[{"core_node6":{ + "core":"COLL_8_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_8", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":356048.0, + "INDEX.sizeInGB":3.315955400466919E-4}}]}, + "COLL_l":{"shard1":[{"core_node6":{ + "core":"COLL_l_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_l", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1x":{"shard1":[{"core_node6":{ + "core":"COLL_1x_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_1x", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4255591.0, + "INDEX.sizeInGB":0.003963327966630459}}]}, + "COLL_4":{"shard1":[{"core_node6":{ + "core":"COLL_4_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_4", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.59832461E8, + "INDEX.sizeInGB":0.2419878365471959}}]}, + "COLL_6":{"shard1":[{"core_node5":{ + "core":"COLL_6_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_6", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.0738852096E10, + "INDEX.sizeInGB":19.314561128616333}}]}}}, + { + "node":"N_g_solr", + "isLive":true, + "cores":6.0, + "freedisk":4007.3253440856934, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard2_1_0":[{"core_node1681":{ + "core":"COLL_2_shard2_1_0_replica_n1680", + "shard":"shard2_1_0", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.3012044407E11, + "INDEX.sizeInGB":121.18410698138177}}], + "shard5_0_1":[{"core_node1771":{ + "core":"COLL_2_shard5_0_1_replica_n1770", + "shard":"shard5_0_1", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31464210597E11, + "INDEX.sizeInGB":122.43558708298951}}], + "shard5_1_0":[{"core_node1783":{ + "core":"COLL_2_shard5_1_0_replica_n1782", + "shard":"shard5_1_0", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30012462556E11, + "INDEX.sizeInGB":121.08354135975242}}], + "shard5_1_1":[{"core_node861":{ + "core":"COLL_2_shard5_1_1_replica_n859", + "shard":"shard5_1_1", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29967769078E11, + "INDEX.sizeInGB":121.04191731475294}}], + "shard5_0_0":[{"core_node1769":{ + "core":"COLL_2_shard5_0_0_replica_n1768", + "shard":"shard5_0_0", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31922267714E11, + "INDEX.sizeInGB":122.8621860165149}}], + "shard9_0_0":[{"core_node1683":{ + "core":"COLL_2_shard9_0_0_replica_n1682", + "shard":"shard9_0_0", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "property.preferredleader":"true", + "INDEX.sizeInBytes":1.29248772716E11, + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.37229977175593}}]}}}, + { + "node":"N_17_solr", + "isLive":true, + "cores":6.0, + "freedisk":4093.756145477295, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard11_1_1":[{"core_node768":{ + "core":"COLL_2_shard11_1_1_replica_n762", + "shard":"shard11_1_1", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30871431234E11, + "base_url":"http://N_17/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.88351828046143}}], + "shard14_0_0":[{"core_node1121":{ + "core":"COLL_2_shard14_0_0_replica_n1120", + "shard":"shard14_0_0", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "base_url":"http://N_17/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.3029908264E11, + "INDEX.sizeInGB":121.3504771143198}}], + "shard18_0_1":[{"core_node877":{ + "core":"COLL_2_shard18_0_1_replica_n2", + "shard":"shard18_0_1", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "base_url":"http://N_17/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28174988934E11, + "INDEX.sizeInGB":119.37226069532335}}], + "shard12_0_1":[{"core_node1699":{ + "core":"COLL_2_shard12_0_1_replica_n1698", + "shard":"shard12_0_1", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "base_url":"http://N_17/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30350286057E11, + "INDEX.sizeInGB":121.39816401246935}}], + "shard12_0_0":[{"core_node1751":{ + "core":"COLL_2_shard12_0_0_replica_n1750", + "shard":"shard12_0_0", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "base_url":"http://N_17/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2936875619E11, + "INDEX.sizeInGB":120.48404308967292}}], + "shard14_0_1":[{"core_node1123":{ + "core":"COLL_2_shard14_0_1_replica_n1122", + "shard":"shard14_0_1", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "base_url":"http://N_17/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31146492351E11, + "INDEX.sizeInGB":122.13968890812248}}]}}}, + { + "node":"N_303_solr", + "isLive":true, + "cores":6.0, + "freedisk":4111.4668045043945, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard16_0_1":[{"core_node987":{ + "core":"COLL_2_shard16_0_1_replica_n986", + "shard":"shard16_0_1", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "base_url":"http://N_303/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30738903625E11, + "INDEX.sizeInGB":121.76009232643992}}], + "shard16_0_0":[{"core_node1785":{ + "core":"COLL_2_shard16_0_0_replica_n1784", + "shard":"shard16_0_0", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "base_url":"http://N_303/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.26747476604E11, + "INDEX.sizeInGB":118.04278623685241}}], + "shard3_0_0":[{"core_node544":{ + "core":"COLL_2_shard3_0_0_replica_n2", + "shard":"shard3_0_0", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29792212268E11, + "base_url":"http://N_303/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.87841729447246}}], + "shard9_1_1":[{"core_node1163":{ + "core":"COLL_2_shard9_1_1_replica_n1162", + "shard":"shard9_1_1", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "base_url":"http://N_303/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.36568824379E11, + "INDEX.sizeInGB":127.18962913285941}}], + "shard9_1_0":[{"core_node1151":{ + "core":"COLL_2_shard9_1_0_replica_n1150", + "shard":"shard9_1_0", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "base_url":"http://N_303/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31117387108E11, + "INDEX.sizeInGB":122.11258253827691}}], + "shard4_0_1":[{"core_node1773":{ + "core":"COLL_2_shard4_0_1_replica_n1772", + "shard":"shard4_0_1", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "base_url":"http://N_303/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28126128215E11, + "INDEX.sizeInGB":119.3267556047067}}]}}}, + { + "node":"N_dj_solr", + "isLive":true, + "cores":6.0, + "freedisk":4162.087951660156, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard1_1_0":[{"core_node471":{ + "core":"COLL_2_shard1_1_0_replica_n1", + "shard":"shard1_1_0", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29057719236E11, + "base_url":"http://N_dj/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.19436735287309}}], + "shard7_1_0":[{"core_node928":{ + "core":"COLL_2_shard7_1_0_replica_n926", + "shard":"shard7_1_0", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29963886019E11, + "base_url":"http://N_dj/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.03830093424767}}], + "shard7_1_1":[{"core_node941":{ + "core":"COLL_2_shard7_1_1_replica_n927", + "shard":"shard7_1_1", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.28538540188E11, + "base_url":"http://N_dj/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.71084418520331}}], + "shard18_0_1":[{"core_node773":{ + "core":"COLL_2_shard18_0_1_replica_n771", + "shard":"shard18_0_1", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30821199599E11, + "base_url":"http://N_dj/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.83673642482609}}], + "shard13_0_1":[{"core_node1715":{ + "core":"COLL_2_shard13_0_1_replica_n1714", + "shard":"shard13_0_1", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30355121703E11, + "base_url":"http://N_dj/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.402667558752}}], + "shard13_0_0":[{"core_node1749":{ + "core":"COLL_2_shard13_0_0_replica_n1748", + "shard":"shard13_0_0", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30427736106E11, + "base_url":"http://N_dj/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.47029499150813}}]}}}, + { + "node":"N_1c_solr", + "isLive":true, + "cores":6.0, + "freedisk":4181.229598999023, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard5_0_1":[{"core_node1703":{ + "core":"COLL_2_shard5_0_1_replica_n1702", + "shard":"shard5_0_1", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.31521149156E11, + "base_url":"http://N_1c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":122.48861524835229}}], + "shard5_1_0":[{"core_node1135":{ + "core":"COLL_2_shard5_1_0_replica_n1134", + "shard":"shard5_1_0", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30030877168E11, + "base_url":"http://N_1c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.1006913036108}}], + "shard18_0_0":[{"core_node874":{ + "core":"COLL_2_shard18_0_0_replica_n1", + "shard":"shard18_0_0", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.28011422432E11, + "base_url":"http://N_1c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.21992751955986}}], + "shard5_1_1":[{"core_node1141":{ + "core":"COLL_2_shard5_1_1_replica_n1140", + "shard":"shard5_1_1", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29917464329E11, + "base_url":"http://N_1c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.99506736639887}}], + "shard5_0_0":[{"core_node999":{ + "core":"COLL_2_shard5_0_0_replica_n998", + "shard":"shard5_0_0", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.31937405764E11, + "base_url":"http://N_1c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":122.87628442421556}}], + "shard18_0_1":[{"core_node876":{ + "core":"COLL_2_shard18_0_1_replica_n1", + "shard":"shard18_0_1", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "base_url":"http://N_1c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30729375574E11, + "INDEX.sizeInGB":121.75121863745153}}]}}}, + { + "node":"N_z_solr", + "isLive":true, + "cores":6.0, + "freedisk":4215.115695953369, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard1_0_0":[{"core_node1717":{ + "core":"COLL_2_shard1_0_0_replica_n1716", + "shard":"shard1_0_0", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "base_url":"http://N_z/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.7185112146E10, + "INDEX.sizeInGB":53.25778587348759}}], + "shard8_1_0":[{"core_node1707":{ + "core":"COLL_2_shard8_1_0_replica_n1706", + "shard":"shard8_1_0", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.35679630668E11, + "base_url":"http://N_z/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":126.361502956599}}], + "shard8_0_0":[{"core_node1731":{ + "core":"COLL_2_shard8_0_0_replica_n1730", + "shard":"shard8_0_0", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "base_url":"http://N_z/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30170301246E11, + "INDEX.sizeInGB":121.23054009489715}}], + "shard8_0_1":[{"core_node1695":{ + "core":"COLL_2_shard8_0_1_replica_n1694", + "shard":"shard8_0_1", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.39918850407E11, + "base_url":"http://N_z/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":130.30958399828523}}], + "shard8_1_1":[{"core_node1755":{ + "core":"COLL_2_shard8_1_1_replica_n1754", + "shard":"shard8_1_1", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.33314153125E11, + "base_url":"http://N_z/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":124.15848032105714}}], + "shard14_1_0":[{"core_node1127":{ + "core":"COLL_2_shard14_1_0_replica_n1126", + "shard":"shard14_1_0", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "base_url":"http://N_z/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27443177079E11, + "INDEX.sizeInGB":118.69070779439062}}]}}}, + { + "node":"N_6_solr", + "isLive":true, + "cores":6.0, + "freedisk":4252.47643661499, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard8_1_0":[{"core_node1811":{ + "core":"COLL_2_shard8_1_0_replica_n1810", + "shard":"shard8_1_0", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "base_url":"http://N_6/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.35679249773E11, + "INDEX.sizeInGB":126.36114822048694}}], + "shard4_0_0":[{"core_node520":{ + "core":"COLL_2_shard4_0_0_replica_n2", + "shard":"shard4_0_0", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "base_url":"http://N_6/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28680029361E11, + "INDEX.sizeInGB":119.84261624608189}}], + "shard4_0_1":[{"core_node1803":{ + "core":"COLL_2_shard4_0_1_replica_n1802", + "shard":"shard4_0_1", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "base_url":"http://N_6/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28153346526E11, + "INDEX.sizeInGB":119.35210463218391}}], + "shard9_0_0":[{"core_node1799":{ + "core":"COLL_2_shard9_0_0_replica_n1798", + "shard":"shard9_0_0", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.35157081196E11, + "base_url":"http://N_6/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":125.874840836972}}], + "shard3_1_0":[{"core_node459":{ + "core":"COLL_2_shard3_1_0_replica_n1", + "shard":"shard3_1_0", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "base_url":"http://N_6/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.32652501535E11, + "INDEX.sizeInGB":123.54226925875992}}], + "shard15_1_1":[{"core_node1709":{ + "core":"COLL_2_shard15_1_1_replica_n1708", + "shard":"shard15_1_1", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "base_url":"http://N_6/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30846984322E11, + "INDEX.sizeInGB":121.86075031943619}}]}}}, + { + "node":"N_1m_solr", + "isLive":true, + "cores":6.0, + "freedisk":4257.921604156494, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard6_1_1":[{"core_node1745":{ + "core":"COLL_2_shard6_1_1_replica_n1744", + "shard":"shard6_1_1", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31273933482E11, + "INDEX.sizeInGB":122.25837771035731}}], + "shard1_1_0":[{"core_node1679":{ + "core":"COLL_2_shard1_1_0_replica_n1678", + "shard":"shard1_1_0", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28970690262E11, + "INDEX.sizeInGB":120.11331530474126}}], + "shard8_0_0":[{"core_node887":{ + "core":"COLL_2_shard8_0_0_replica_n886", + "shard":"shard8_0_0", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30145902623E11, + "INDEX.sizeInGB":121.20781710650772}}], + "shard8_0_1":[{"core_node893":{ + "core":"COLL_2_shard8_0_1_replica_n892", + "shard":"shard8_0_1", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.32681734677E11, + "INDEX.sizeInGB":123.56949474383146}}], + "shard8_1_1":[{"core_node1711":{ + "core":"COLL_2_shard8_1_1_replica_n1710", + "shard":"shard8_1_1", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33374089494E11, + "INDEX.sizeInGB":124.21430041454732}}], + "shard6_1_0":[{"core_node1167":{ + "core":"COLL_2_shard6_1_0_replica_n1166", + "shard":"shard6_1_0", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29376799009E11, + "INDEX.sizeInGB":120.49153354857117}}]}}}, + { + "node":"N_4g_solr", + "isLive":true, + "cores":6.0, + "freedisk":4259.9677734375, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard8_1_1":[{"core_node1795":{ + "core":"COLL_2_shard8_1_1_replica_n1794", + "shard":"shard8_1_1", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "base_url":"http://N_4g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33276674177E11, + "INDEX.sizeInGB":124.1235753307119}}], + "shard9_1_1":[{"core_node944":{ + "core":"COLL_2_shard9_1_1_replica_n930", + "shard":"shard9_1_1", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.33928213329E11, + "base_url":"http://N_4g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":124.73036845121533}}], + "shard9_1_0":[{"core_node931":{ + "core":"COLL_2_shard9_1_0_replica_n929", + "shard":"shard9_1_0", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.31111103315E11, + "base_url":"http://N_4g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":122.1067303000018}}], + "shard18_1_1":[{"core_node626":{ + "core":"COLL_2_shard18_1_1_replica_n624", + "shard":"shard18_1_1", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.28190099634E11, + "base_url":"http://N_4g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.38633363135159}}], + "shard18_1_0":[{"core_node625":{ + "core":"COLL_2_shard18_1_0_replica_n623", + "shard":"shard18_1_0", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.28955475131E11, + "base_url":"http://N_4g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.09914510976523}}], + "shard2_1_1":[{"core_node1813":{ + "core":"COLL_2_shard2_1_1_replica_n1812", + "shard":"shard2_1_1", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "base_url":"http://N_4g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28164947427E11, + "INDEX.sizeInGB":119.36290881317109}}]}}}, + { + "node":"N_cs_solr", + "isLive":true, + "cores":6.0, + "freedisk":4260.629165649414, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard6_1_1":[{"core_node1705":{ + "core":"COLL_2_shard6_1_1_replica_n1704", + "shard":"shard6_1_1", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.31274462707E11, + "base_url":"http://N_cs/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":122.25887058954686}}], + "shard10_0_1":[{"core_node828":{ + "core":"COLL_2_shard10_0_1_replica_n826", + "shard":"shard10_0_1", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.28038688927E11, + "base_url":"http://N_cs/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.245321421884}}], + "shard6_1_0":[{"core_node937":{ + "core":"COLL_2_shard6_1_0_replica_n935", + "shard":"shard6_1_0", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29597529819E11, + "base_url":"http://N_cs/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.69710513483733}}], + "shard15_1_0":[{"core_node955":{ + "core":"COLL_2_shard15_1_0_replica_n953", + "shard":"shard15_1_0", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.33515745782E11, + "base_url":"http://N_cs/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":124.34622811339796}}], + "shard10_0_0":[{"core_node827":{ + "core":"COLL_2_shard10_0_0_replica_n825", + "shard":"shard10_0_0", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29486149433E11, + "base_url":"http://N_cs/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.59337406698614}}], + "shard15_1_1":[{"core_node956":{ + "core":"COLL_2_shard15_1_1_replica_n954", + "shard":"shard15_1_1", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30865977458E11, + "base_url":"http://N_cs/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.87843905575573}}]}}}, + { + "node":"N_1f_solr", + "isLive":true, + "cores":6.0, + "freedisk":4260.807849884033, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard11_0_1":[{"core_node1223":{ + "core":"COLL_2_shard11_0_1_replica_n1222", + "shard":"shard11_0_1", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27989218509E11, + "INDEX.sizeInGB":119.19924850482494}}], + "shard11_1_0":[{"core_node779":{ + "core":"COLL_2_shard11_1_0_replica_n778", + "shard":"shard11_1_0", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.32552454912E11, + "INDEX.sizeInGB":123.44909358024597}}], + "shard11_0_0":[{"core_node1217":{ + "core":"COLL_2_shard11_0_0_replica_n1216", + "shard":"shard11_0_0", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27720861488E11, + "INDEX.sizeInGB":118.94932155311108}}], + "shard11_1_1":[{"core_node783":{ + "core":"COLL_2_shard11_1_1_replica_n782", + "shard":"shard11_1_1", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30995783614E11, + "INDEX.sizeInGB":121.99933045916259}}], + "shard5_0_1":[{"core_node1003":{ + "core":"COLL_2_shard5_0_1_replica_n1002", + "shard":"shard5_0_1", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31534942129E11, + "INDEX.sizeInGB":122.50146095547825}}], + "shard5_0_0":[{"core_node1001":{ + "core":"COLL_2_shard5_0_0_replica_n1000", + "shard":"shard5_0_0", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31960210955E11, + "INDEX.sizeInGB":122.89752341341227}}]}}}, + { + "node":"N_65p_solr", + "isLive":true, + "cores":6.0, + "freedisk":4260.997627258301, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard7_0_0":[{"core_node774":{ + "core":"COLL_2_shard7_0_0_replica_n1", + "shard":"shard7_0_0", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29027793373E11, + "INDEX.sizeInGB":120.16649672109634}}], + "shard10_1_0":[{"core_node1797":{ + "core":"COLL_2_shard10_1_0_replica_n1796", + "shard":"shard10_1_0", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27583656591E11, + "INDEX.sizeInGB":118.82153953518718}}], + "shard3_0_0":[{"core_node543":{ + "core":"COLL_2_shard3_0_0_replica_n1", + "shard":"shard3_0_0", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29871412511E11, + "INDEX.sizeInGB":120.95217826869339}}], + "shard3_0_1":[{"core_node545":{ + "core":"COLL_2_shard3_0_1_replica_n1", + "shard":"shard3_0_1", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31838835644E11, + "INDEX.sizeInGB":122.784483846277}}], + "shard15_1_0":[{"core_node1173":{ + "core":"COLL_2_shard15_1_0_replica_n1172", + "shard":"shard15_1_0", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33316507698E11, + "INDEX.sizeInGB":124.16067318804562}}], + "shard15_1_1":[{"core_node1747":{ + "core":"COLL_2_shard15_1_1_replica_n1746", + "shard":"shard15_1_1", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30883359905E11, + "INDEX.sizeInGB":121.89462772104889}}]}}}, + { + "node":"N_u_solr", + "isLive":true, + "cores":6.0, + "freedisk":4260.821304321289, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard8_1_0":[{"core_node1765":{ + "core":"COLL_2_shard8_1_0_replica_n1764", + "shard":"shard8_1_0", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "base_url":"http://N_u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.35571920799E11, + "INDEX.sizeInGB":126.26119032409042}}], + "shard13_1_1":[{"core_node921":{ + "core":"COLL_2_shard13_1_1_replica_n920", + "shard":"shard13_1_1", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29634542289E11, + "base_url":"http://N_u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.73157568369061}}], + "shard15_0_1":[{"core_node734":{ + "core":"COLL_2_shard15_0_1_replica_n2", + "shard":"shard15_0_1", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.27250282639E11, + "base_url":"http://N_u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":118.51106084790081}}], + "shard13_0_1":[{"core_node1263":{ + "core":"COLL_2_shard13_0_1_replica_n1262", + "shard":"shard13_0_1", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "base_url":"http://N_u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30321828131E11, + "INDEX.sizeInGB":121.37166050355881}}], + "shard13_1_0":[{"core_node1763":{ + "core":"COLL_2_shard13_1_0_replica_n1762", + "shard":"shard13_1_0", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "base_url":"http://N_u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29567251239E11, + "INDEX.sizeInGB":120.66890600975603}}], + "shard13_0_0":[{"core_node1257":{ + "core":"COLL_2_shard13_0_0_replica_n1256", + "shard":"shard13_0_0", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "base_url":"http://N_u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30381429251E11, + "INDEX.sizeInGB":121.42716837208718}}]}}}, + { + "node":"N_a_solr", + "isLive":true, + "cores":6.0, + "freedisk":4262.172649383545, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard3_0_0":[{"core_node1809":{ + "core":"COLL_2_shard3_0_0_replica_n1808", + "shard":"shard3_0_0", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29798330608E11, + "INDEX.sizeInGB":120.88411544263363}}], + "shard14_0_0":[{"core_node1119":{ + "core":"COLL_2_shard14_0_0_replica_n1118", + "shard":"shard14_0_0", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30313698451E11, + "INDEX.sizeInGB":121.36408914905041}}], + "shard15_1_0":[{"core_node1175":{ + "core":"COLL_2_shard15_1_0_replica_n1174", + "shard":"shard15_1_0", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33321224738E11, + "INDEX.sizeInGB":124.16506627388299}}], + "shard14_1_1":[{"core_node836":{ + "core":"COLL_2_shard14_1_1_replica_n834", + "shard":"shard14_1_1", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29318568492E11, + "INDEX.sizeInGB":120.43730215355754}}], + "shard14_0_1":[{"core_node1125":{ + "core":"COLL_2_shard14_0_1_replica_n1124", + "shard":"shard14_0_1", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31102045065E11, + "INDEX.sizeInGB":122.09829414729029}}], + "shard14_1_0":[{"core_node835":{ + "core":"COLL_2_shard14_1_0_replica_n833", + "shard":"shard14_1_0", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27418065808E11, + "INDEX.sizeInGB":118.66732110083103}}]}}}, + { + "node":"N_8_solr", + "isLive":true, + "cores":6.0, + "freedisk":4262.037788391113, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard16_1_1":[{"core_node853":{ + "core":"COLL_2_shard16_1_1_replica_n851", + "shard":"shard16_1_1", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.33685050832E11, + "base_url":"http://N_8/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":124.50390572845936}}], + "shard16_0_1":[{"core_node857":{ + "core":"COLL_2_shard16_0_1_replica_n855", + "shard":"shard16_0_1", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30788718518E11, + "base_url":"http://N_8/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.80648606084287}}], + "shard16_1_0":[{"core_node852":{ + "core":"COLL_2_shard16_1_0_replica_n850", + "shard":"shard16_1_0", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.28801317856E11, + "base_url":"http://N_8/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.95557495951653}}], + "shard16_0_0":[{"core_node856":{ + "core":"COLL_2_shard16_0_0_replica_n854", + "shard":"shard16_0_0", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.2677230126E11, + "base_url":"http://N_8/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":118.06590599939227}}], + "shard2_0_0":[{"core_node796":{ + "core":"COLL_2_shard2_0_0_replica_n794", + "shard":"shard2_0_0", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29517293483E11, + "base_url":"http://N_8/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.6223792238161}}], + "shard2_0_1":[{"core_node800":{ + "core":"COLL_2_shard2_0_1_replica_n795", + "shard":"shard2_0_1", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.31328007233E11, + "base_url":"http://N_8/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":122.30873781535774}}]}}}, + { + "node":"N_3a7_solr", + "isLive":true, + "cores":6.0, + "freedisk":4263.317134857178, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard7_0_0":[{"core_node775":{ + "core":"COLL_2_shard7_0_0_replica_n2", + "shard":"shard7_0_0", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29074533898E11, + "base_url":"http://N_3a7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.21002722717822}}], + "shard2_0_0":[{"core_node1823":{ + "core":"COLL_2_shard2_0_0_replica_n1822", + "shard":"shard2_0_0", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "base_url":"http://N_3a7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29476268104E11, + "INDEX.sizeInGB":120.58417136222124}}], + "shard14_0_0":[{"core_node839":{ + "core":"COLL_2_shard14_0_0_replica_n837", + "shard":"shard14_0_0", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30330451538E11, + "base_url":"http://N_3a7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.37969167716801}}], + "shard3_1_1":[{"core_node462":{ + "core":"COLL_2_shard3_1_1_replica_n2", + "shard":"shard3_1_1", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "base_url":"http://N_3a7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2992912768E11, + "INDEX.sizeInGB":121.00592970848083}}], + "shard14_1_1":[{"core_node1825":{ + "core":"COLL_2_shard14_1_1_replica_n1824", + "shard":"shard14_1_1", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "base_url":"http://N_3a7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.300425186E11, + "INDEX.sizeInGB":121.11153323203325}}], + "shard14_0_1":[{"core_node841":{ + "core":"COLL_2_shard14_0_1_replica_n838", + "shard":"shard14_0_1", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.31168916273E11, + "base_url":"http://N_3a7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":122.1605728128925}}]}}}, + { + "node":"N_11_solr", + "isLive":true, + "cores":6.0, + "freedisk":4264.325901031494, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard6_0_0":[{"core_node1210":{ + "core":"COLL_2_shard6_0_0_replica_n1209", + "shard":"shard6_0_0", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28939953876E11, + "INDEX.sizeInGB":120.08468981459737}}], + "shard6_0_1":[{"core_node1212":{ + "core":"COLL_2_shard6_0_1_replica_n1211", + "shard":"shard6_0_1", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28744354495E11, + "INDEX.sizeInGB":119.90252369549125}}], + "shard9_1_1":[{"core_node1155":{ + "core":"COLL_2_shard9_1_1_replica_n1154", + "shard":"shard9_1_1", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33894519282E11, + "INDEX.sizeInGB":124.69898842461407}}], + "shard9_1_0":[{"core_node1153":{ + "core":"COLL_2_shard9_1_0_replica_n1152", + "shard":"shard9_1_0", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31406038908E11, + "INDEX.sizeInGB":122.3814104758203}}], + "shard9_0_1":[{"core_node438":{ + "core":"COLL_2_shard9_0_1_replica_n436", + "shard":"shard9_0_1", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29282915395E11, + "INDEX.sizeInGB":120.40409761946648}}], + "shard12_1_1":[{"core_node662":{ + "core":"COLL_2_shard12_1_1_replica_n2", + "shard":"shard12_1_1", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.26693447901E11, + "INDEX.sizeInGB":117.99246808607131}}]}}}, + { + "node":"N_4f_solr", + "isLive":true, + "cores":6.0, + "freedisk":4264.210151672363, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard2_0_1":[{"core_node915":{ + "core":"COLL_2_shard2_0_1_replica_n914", + "shard":"shard2_0_1", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "base_url":"http://N_4f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31386626219E11, + "INDEX.sizeInGB":122.36333100032061}}], + "shard2_1_0":[{"core_node975":{ + "core":"COLL_2_shard2_1_0_replica_n974", + "shard":"shard2_1_0", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.3001251468E11, + "base_url":"http://N_4f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.0835899040103}}], + "shard6_0_0":[{"core_node1182":{ + "core":"COLL_2_shard6_0_0_replica_n1180", + "shard":"shard6_0_0", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.28922958966E11, + "base_url":"http://N_4f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.06886207126081}}], + "shard6_0_1":[{"core_node1189":{ + "core":"COLL_2_shard6_0_1_replica_n1181", + "shard":"shard6_0_1", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.28773562289E11, + "base_url":"http://N_4f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.92972557339817}}], + "shard3_0_1":[{"core_node546":{ + "core":"COLL_2_shard3_0_1_replica_n2", + "shard":"shard3_0_1", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.31838927317E11, + "base_url":"http://N_4f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":122.78456922341138}}], + "shard2_1_1":[{"core_node1685":{ + "core":"COLL_2_shard2_1_1_replica_n1684", + "shard":"shard2_1_1", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.2812596905E11, + "base_url":"http://N_4f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.32660737074912}}]}}}, + { + "node":"N_1i_solr", + "isLive":true, + "cores":6.0, + "freedisk":4266.027156829834, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard17_1_0":[{"core_node1200":{ + "core":"COLL_2_shard17_1_0_replica_n1198", + "shard":"shard17_1_0", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "base_url":"http://N_1i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29069936299E11, + "INDEX.sizeInGB":120.20574537944049}}], + "shard17_0_1":[{"core_node1117":{ + "core":"COLL_2_shard17_0_1_replica_n1116", + "shard":"shard17_0_1", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "base_url":"http://N_1i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30694171889E11, + "INDEX.sizeInGB":121.71843265090138}}], + "shard10_1_1":[{"core_node1779":{ + "core":"COLL_2_shard10_1_1_replica_n1778", + "shard":"shard10_1_1", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "base_url":"http://N_1i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30255789623E11, + "INDEX.sizeInGB":121.31015735026449}}], + "shard17_0_0":[{"core_node1781":{ + "core":"COLL_2_shard17_0_0_replica_n1780", + "shard":"shard17_0_0", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "base_url":"http://N_1i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30702509646E11, + "INDEX.sizeInGB":121.72619779221714}}], + "shard10_1_0":[{"core_node1693":{ + "core":"COLL_2_shard10_1_0_replica_n1692", + "shard":"shard10_1_0", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.27561685082E11, + "base_url":"http://N_1i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":118.80107697285712}}], + "shard17_1_1":[{"core_node1203":{ + "core":"COLL_2_shard17_1_1_replica_n1199", + "shard":"shard17_1_1", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "base_url":"http://N_1i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28764084367E11, + "INDEX.sizeInGB":119.92089857067913}}]}}}, + { + "node":"N_9o_solr", + "isLive":true, + "cores":6.0, + "freedisk":4265.881809234619, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard11_0_1":[{"core_node1221":{ + "core":"COLL_2_shard11_0_1_replica_n1220", + "shard":"shard11_0_1", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "base_url":"http://N_9o/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28020049235E11, + "INDEX.sizeInGB":119.22796185594052}}], + "shard11_1_0":[{"core_node781":{ + "core":"COLL_2_shard11_1_0_replica_n780", + "shard":"shard11_1_0", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.32420261013E11, + "base_url":"http://N_9o/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":123.32597841788083}}], + "shard11_0_0":[{"core_node1219":{ + "core":"COLL_2_shard11_0_0_replica_n1218", + "shard":"shard11_0_0", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "base_url":"http://N_9o/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28002391411E11, + "INDEX.sizeInGB":119.21151672583073}}], + "shard7_0_0":[{"core_node766":{ + "core":"COLL_2_shard7_0_0_replica_n764", + "shard":"shard7_0_0", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "base_url":"http://N_9o/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28994593549E11, + "INDEX.sizeInGB":120.13557697553188}}], + "shard11_1_1":[{"core_node785":{ + "core":"COLL_2_shard11_1_1_replica_n784", + "shard":"shard11_1_1", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "base_url":"http://N_9o/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30909357727E11, + "INDEX.sizeInGB":121.91884007956833}}], + "shard7_0_1":[{"core_node769":{ + "core":"COLL_2_shard7_0_1_replica_n765", + "shard":"shard7_0_1", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "base_url":"http://N_9o/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28908501869E11, + "INDEX.sizeInGB":120.0553978504613}}]}}}, + { + "node":"N_2_solr", + "isLive":true, + "cores":6.0, + "freedisk":4266.604637145996, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard5_1_0":[{"core_node1137":{ + "core":"COLL_2_shard5_1_0_replica_n1136", + "shard":"shard5_1_0", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "base_url":"http://N_2/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":7.6877250282E10, + "INDEX.sizeInGB":71.59751866199076}}], + "shard5_1_1":[{"core_node1139":{ + "core":"COLL_2_shard5_1_1_replica_n1138", + "shard":"shard5_1_1", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "base_url":"http://N_2/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29952609098E11, + "INDEX.sizeInGB":121.02779848314822}}], + "shard7_0_1":[{"core_node776":{ + "core":"COLL_2_shard7_0_1_replica_n1", + "shard":"shard7_0_1", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.2890128588E11, + "base_url":"http://N_2/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.04867743700743}}], + "shard9_0_1":[{"core_node478":{ + "core":"COLL_2_shard9_0_1_replica_n2", + "shard":"shard9_0_1", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29212951693E11, + "base_url":"http://N_2/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.33893884439021}}], + "shard12_0_1":[{"core_node1255":{ + "core":"COLL_2_shard12_0_1_replica_n1254", + "shard":"shard12_0_1", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30384315739E11, + "base_url":"http://N_2/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.42985662352294}}], + "shard12_0_0":[{"core_node1249":{ + "core":"COLL_2_shard12_0_0_replica_n1248", + "shard":"shard12_0_0", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29421522442E11, + "base_url":"http://N_2/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.53318549133837}}]}}}, + { + "node":"N_2u_solr", + "isLive":true, + "cores":6.0, + "freedisk":4266.648368835449, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard17_1_0":[{"core_node1225":{ + "core":"COLL_2_shard17_1_0_replica_n1224", + "shard":"shard17_1_0", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29066474889E11, + "base_url":"http://N_2u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.20252169016749}}], + "shard17_0_1":[{"core_node1115":{ + "core":"COLL_2_shard17_0_1_replica_n1114", + "shard":"shard17_0_1", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "base_url":"http://N_2u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30049647193E11, + "INDEX.sizeInGB":121.1181722516194}}], + "shard17_0_0":[{"core_node1735":{ + "core":"COLL_2_shard17_0_0_replica_n1734", + "shard":"shard17_0_0", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "base_url":"http://N_2u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31102615765E11, + "INDEX.sizeInGB":122.09882565308362}}], + "shard3_1_1":[{"core_node461":{ + "core":"COLL_2_shard3_1_1_replica_n1", + "shard":"shard3_1_1", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "base_url":"http://N_2u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29953637358E11, + "INDEX.sizeInGB":121.02875612489879}}], + "shard17_1_1":[{"core_node1231":{ + "core":"COLL_2_shard17_1_1_replica_n1230", + "shard":"shard17_1_1", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.287734207E11, + "base_url":"http://N_2u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.92959370836616}}], + "shard12_1_0":[{"core_node660":{ + "core":"COLL_2_shard12_1_0_replica_n2", + "shard":"shard12_1_0", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "base_url":"http://N_2u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27387972534E11, + "INDEX.sizeInGB":118.63929455541074}}]}}}, + { + "node":"N_m_solr", + "isLive":true, + "cores":6.0, + "freedisk":4267.171646118164, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard6_1_1":[{"core_node1171":{ + "core":"COLL_2_shard6_1_1_replica_n1170", + "shard":"shard6_1_1", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31256081422E11, + "INDEX.sizeInGB":122.24175168387592}}], + "shard17_1_0":[{"core_node1227":{ + "core":"COLL_2_shard17_1_0_replica_n1226", + "shard":"shard17_1_0", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29049722959E11, + "INDEX.sizeInGB":120.18692023959011}}], + "shard6_0_0":[{"core_node1208":{ + "core":"COLL_2_shard6_0_0_replica_n1207", + "shard":"shard6_0_0", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28936808614E11, + "INDEX.sizeInGB":120.08176056109369}}], + "shard6_0_1":[{"core_node1214":{ + "core":"COLL_2_shard6_0_1_replica_n1213", + "shard":"shard6_0_1", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28745543493E11, + "INDEX.sizeInGB":119.90363103616983}}], + "shard9_0_1":[{"core_node477":{ + "core":"COLL_2_shard9_0_1_replica_n1", + "shard":"shard9_0_1", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29063920601E11, + "INDEX.sizeInGB":120.20014282409102}}], + "shard17_1_1":[{"core_node1229":{ + "core":"COLL_2_shard17_1_1_replica_n1228", + "shard":"shard17_1_1", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28816978409E11, + "INDEX.sizeInGB":119.97015998605639}}]}}}, + { + "node":"N_t_solr", + "isLive":true, + "cores":6.0, + "freedisk":4266.856658935547, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard11_0_1":[{"core_node1195":{ + "core":"COLL_2_shard11_0_1_replica_n1184", + "shard":"shard11_0_1", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.27980394382E11, + "base_url":"http://N_t/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.19103039614856}}], + "shard11_1_0":[{"core_node1791":{ + "core":"COLL_2_shard11_1_0_replica_n1790", + "shard":"shard11_1_0", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "base_url":"http://N_t/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.32416023485E11, + "INDEX.sizeInGB":123.32203191239387}}], + "shard11_0_0":[{"core_node1185":{ + "core":"COLL_2_shard11_0_0_replica_n1183", + "shard":"shard11_0_0", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.2777477116E11, + "base_url":"http://N_t/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":118.99952884763479}}], + "shard10_1_1":[{"core_node1743":{ + "core":"COLL_2_shard10_1_1_replica_n1742", + "shard":"shard10_1_1", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30757016285E11, + "base_url":"http://N_t/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.77696105558425}}], + "shard10_0_1":[{"core_node905":{ + "core":"COLL_2_shard10_0_1_replica_n904", + "shard":"shard10_0_1", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "base_url":"http://N_t/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28142990156E11, + "INDEX.sizeInGB":119.34245951101184}}], + "shard10_0_0":[{"core_node1733":{ + "core":"COLL_2_shard10_0_0_replica_n1732", + "shard":"shard10_0_0", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "base_url":"http://N_t/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2914349283E11, + "INDEX.sizeInGB":120.27425023727119}}]}}}, + { + "node":"N_7_solr", + "isLive":true, + "cores":6.0, + "freedisk":4268.472709655762, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard13_1_1":[{"core_node808":{ + "core":"COLL_2_shard13_1_1_replica_n806", + "shard":"shard13_1_1", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2961448776E11, + "INDEX.sizeInGB":120.71289844810963}}], + "shard15_0_1":[{"core_node610":{ + "core":"COLL_2_shard15_0_1_replica_n608", + "shard":"shard15_0_1", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2722802278E11, + "INDEX.sizeInGB":118.49032973870635}}], + "shard15_0_0":[{"core_node609":{ + "core":"COLL_2_shard15_0_0_replica_n607", + "shard":"shard15_0_0", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27258670055E11, + "INDEX.sizeInGB":118.5188722377643}}], + "shard13_0_1":[{"core_node1767":{ + "core":"COLL_2_shard13_0_1_replica_n1766", + "shard":"shard13_0_1", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30339106107E11, + "INDEX.sizeInGB":121.38775187265128}}], + "shard13_1_0":[{"core_node1689":{ + "core":"COLL_2_shard13_1_0_replica_n1688", + "shard":"shard13_1_0", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29592823396E11, + "INDEX.sizeInGB":120.69272193685174}}], + "shard13_0_0":[{"core_node1713":{ + "core":"COLL_2_shard13_0_0_replica_n1712", + "shard":"shard13_0_0", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30437704659E11, + "INDEX.sizeInGB":121.47957892995328}}]}}}, + { + "node":"N_6c_solr", + "isLive":true, + "cores":6.0, + "freedisk":4269.135753631592, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard17_0_1":[{"core_node848":{ + "core":"COLL_2_shard17_0_1_replica_n843", + "shard":"shard17_0_1", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30730929322E11, + "base_url":"http://N_6c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.7526656780392}}], + "shard17_0_0":[{"core_node844":{ + "core":"COLL_2_shard17_0_0_replica_n842", + "shard":"shard17_0_0", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30743109221E11, + "base_url":"http://N_6c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.76400909293443}}], + "shard4_0_0":[{"core_node445":{ + "core":"COLL_2_shard4_0_0_replica_n443", + "shard":"shard4_0_0", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.28741762257E11, + "base_url":"http://N_6c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.90010948572308}}], + "shard4_1_0":[{"core_node457":{ + "core":"COLL_2_shard4_1_0_replica_n455", + "shard":"shard4_1_0", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.27664473589E11, + "base_url":"http://N_6c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":118.89680622983724}}], + "shard4_0_1":[{"core_node446":{ + "core":"COLL_2_shard4_0_1_replica_n444", + "shard":"shard4_0_1", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.28032413116E11, + "base_url":"http://N_6c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.23947661742568}}], + "shard4_1_1":[{"core_node458":{ + "core":"COLL_2_shard4_1_1_replica_n456", + "shard":"shard4_1_1", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.27865802727E11, + "base_url":"http://N_6c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.08430860098451}}]}}}, + { + "node":"N_6i_solr", + "isLive":true, + "cores":6.0, + "freedisk":4269.712917327881, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard10_1_1":[{"core_node840":{ + "core":"COLL_2_shard10_1_1_replica_n830", + "shard":"shard10_1_1", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30273229534E11, + "INDEX.sizeInGB":121.32639953307807}}], + "shard10_1_0":[{"core_node831":{ + "core":"COLL_2_shard10_1_0_replica_n829", + "shard":"shard10_1_0", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27564995026E11, + "INDEX.sizeInGB":118.80415959842503}}], + "shard10_0_1":[{"core_node1739":{ + "core":"COLL_2_shard10_0_1_replica_n1738", + "shard":"shard10_0_1", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28024871739E11, + "INDEX.sizeInGB":119.2324531627819}}], + "shard2_1_0":[{"core_node1727":{ + "core":"COLL_2_shard2_1_0_replica_n1726", + "shard":"shard2_1_0", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30025926492E11, + "INDEX.sizeInGB":121.0960806272924}}], + "shard10_0_0":[{"core_node897":{ + "core":"COLL_2_shard10_0_0_replica_n896", + "shard":"shard10_0_0", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29103730913E11, + "INDEX.sizeInGB":120.2372190663591}}], + "shard2_1_1":[{"core_node979":{ + "core":"COLL_2_shard2_1_1_replica_n978", + "shard":"shard2_1_1", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2815510735E11, + "INDEX.sizeInGB":119.35374452732503}}]}}}, + { + "node":"N_3_solr", + "isLive":true, + "cores":6.0, + "freedisk":4272.45711517334, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard16_1_1":[{"core_node997":{ + "core":"COLL_2_shard16_1_1_replica_n996", + "shard":"shard16_1_1", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.26611980672E11, + "INDEX.sizeInGB":117.91659581661224}}], + "shard16_1_0":[{"core_node991":{ + "core":"COLL_2_shard16_1_0_replica_n990", + "shard":"shard16_1_0", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28724323652E11, + "INDEX.sizeInGB":119.88386851921678}}], + "shard1_1_1":[{"core_node474":{ + "core":"COLL_2_shard1_1_1_replica_n2", + "shard":"shard1_1_1", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29556889925E11, + "INDEX.sizeInGB":120.65925628412515}}], + "shard4_0_0":[{"core_node1737":{ + "core":"COLL_2_shard4_0_0_replica_n1736", + "shard":"shard4_0_0", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28645187639E11, + "INDEX.sizeInGB":119.81016736384481}}], + "shard4_1_0":[{"core_node523":{ + "core":"COLL_2_shard4_1_0_replica_n1", + "shard":"shard4_1_0", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27649471364E11, + "INDEX.sizeInGB":118.88283431902528}}], + "shard9_0_0":[{"core_node1815":{ + "core":"COLL_2_shard9_0_0_replica_n1814", + "shard":"shard9_0_0", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29037175651E11, + "INDEX.sizeInGB":120.17523464839906}}]}}}, + { + "node":"N_1d_solr", + "isLive":true, + "cores":6.0, + "freedisk":4273.009799957275, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard3_1_0":[{"core_node425":{ + "core":"COLL_2_shard3_1_0_replica_n423", + "shard":"shard3_1_0", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.2828759808E11, + "base_url":"http://N_1d/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":119.47713613510132}}], + "shard3_1_1":[{"core_node426":{ + "core":"COLL_2_shard3_1_1_replica_n424", + "shard":"shard3_1_1", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29948029547E11, + "base_url":"http://N_1d/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.02353344392031}}], + "shard15_0_0":[{"core_node732":{ + "core":"COLL_2_shard15_0_0_replica_n2", + "shard":"shard15_0_0", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.27262832088E11, + "base_url":"http://N_1d/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":118.5227484330535}}], + "shard12_1_0":[{"core_node1789":{ + "core":"COLL_2_shard12_1_0_replica_n1788", + "shard":"shard12_1_0", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "base_url":"http://N_1d/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27487519935E11, + "INDEX.sizeInGB":118.73200529720634}}], + "shard14_1_1":[{"core_node1741":{ + "core":"COLL_2_shard14_1_1_replica_n1740", + "shard":"shard14_1_1", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29231781669E11, + "base_url":"http://N_1d/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.35647562611848}}], + "shard14_1_0":[{"core_node1129":{ + "core":"COLL_2_shard14_1_0_replica_n1128", + "shard":"shard14_1_0", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.27407685053E11, + "base_url":"http://N_1d/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":118.65765326935798}}]}}}, + { + "node":"N_1_solr", + "isLive":true, + "cores":6.0, + "freedisk":4274.765396118164, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard16_1_1":[{"core_node995":{ + "core":"COLL_2_shard16_1_1_replica_n994", + "shard":"shard16_1_1", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.26672765511E11, + "INDEX.sizeInGB":117.97320610936731}}], + "shard16_0_1":[{"core_node989":{ + "core":"COLL_2_shard16_0_1_replica_n988", + "shard":"shard16_0_1", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.3069803609E11, + "INDEX.sizeInGB":121.72203146852553}}], + "shard16_1_0":[{"core_node993":{ + "core":"COLL_2_shard16_1_0_replica_n992", + "shard":"shard16_1_0", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28812502313E11, + "INDEX.sizeInGB":119.96599129680544}}], + "shard16_0_0":[{"core_node983":{ + "core":"COLL_2_shard16_0_0_replica_n982", + "shard":"shard16_0_0", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.26766519189E11, + "INDEX.sizeInGB":118.06052102614194}}], + "shard18_0_0":[{"core_node875":{ + "core":"COLL_2_shard18_0_0_replica_n2", + "shard":"shard18_0_0", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28033512867E11, + "INDEX.sizeInGB":119.24050084035844}}], + "shard12_1_1":[{"core_node586":{ + "core":"COLL_2_shard12_1_1_replica_n584", + "shard":"shard12_1_1", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2671712403E11, + "INDEX.sizeInGB":118.01451819948852}}]}}}, + { + "node":"N_aw_solr", + "isLive":true, + "cores":6.0, + "freedisk":4276.759601593018, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard18_1_1":[{"core_node1821":{ + "core":"COLL_2_shard18_1_1_replica_n1820", + "shard":"shard18_1_1", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "base_url":"http://N_aw/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28188518759E11, + "INDEX.sizeInGB":119.38486132677644}}], + "shard4_1_1":[{"core_node525":{ + "core":"COLL_2_shard4_1_1_replica_n1", + "shard":"shard4_1_1", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "base_url":"http://N_aw/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27899653279E11, + "INDEX.sizeInGB":119.11583438422531}}], + "shard3_1_0":[{"core_node460":{ + "core":"COLL_2_shard3_1_0_replica_n2", + "shard":"shard3_1_0", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "base_url":"http://N_aw/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28273400877E11, + "INDEX.sizeInGB":119.46391395945102}}], + "shard15_0_1":[{"core_node1817":{ + "core":"COLL_2_shard15_0_1_replica_n1816", + "shard":"shard15_0_1", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "base_url":"http://N_aw/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27129784031E11, + "INDEX.sizeInGB":118.39883777406067}}], + "shard12_1_1":[{"core_node661":{ + "core":"COLL_2_shard12_1_1_replica_n1", + "shard":"shard12_1_1", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.26701654869E11, + "base_url":"http://N_aw/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":118.00011142063886}}], + "shard12_1_0":[{"core_node659":{ + "core":"COLL_2_shard12_1_0_replica_n1", + "shard":"shard12_1_0", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.27434400341E11, + "base_url":"http://N_aw/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":118.68253382015973}}]}}}, + { + "node":"N_1h_solr", + "isLive":true, + "cores":6.0, + "freedisk":4297.329685211182, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard1_0_0":[{"core_node1729":{ + "core":"COLL_2_shard1_0_0_replica_n1728", + "shard":"shard1_0_0", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.7176945428E10, + "INDEX.sizeInGB":53.25018002465367}}], + "shard7_1_0":[{"core_node1145":{ + "core":"COLL_2_shard7_1_0_replica_n1144", + "shard":"shard7_1_0", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2949609012E11, + "INDEX.sizeInGB":120.60263205319643}}], + "shard7_1_1":[{"core_node1701":{ + "core":"COLL_2_shard7_1_1_replica_n1700", + "shard":"shard7_1_1", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28489170345E11, + "INDEX.sizeInGB":119.66486493591219}}], + "shard3_0_1":[{"core_node510":{ + "core":"COLL_2_shard3_0_1_replica_n508", + "shard":"shard3_0_1", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31866901019E11, + "INDEX.sizeInGB":122.81062176357955}}], + "shard12_0_1":[{"core_node1761":{ + "core":"COLL_2_shard12_0_1_replica_n1760", + "shard":"shard12_0_1", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30342308934E11, + "INDEX.sizeInGB":121.39073473773897}}], + "shard12_0_0":[{"core_node1697":{ + "core":"COLL_2_shard12_0_0_replica_n1696", + "shard":"shard12_0_0", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29369271388E11, + "INDEX.sizeInGB":120.48452290520072}}]}}}, + { + "node":"N_29_solr", + "isLive":true, + "cores":6.0, + "freedisk":4303.548599243164, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard8_0_0":[{"core_node1691":{ + "core":"COLL_2_shard8_0_0_replica_n1690", + "shard":"shard8_0_0", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.30176337999E11, + "base_url":"http://N_29/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":121.23616225924343}}], + "shard8_0_1":[{"core_node1787":{ + "core":"COLL_2_shard8_0_1_replica_n1786", + "shard":"shard8_0_1", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "base_url":"http://N_29/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.32692723859E11, + "INDEX.sizeInGB":123.57972921710461}}], + "shard7_1_0":[{"core_node1143":{ + "core":"COLL_2_shard7_1_0_replica_n1142", + "shard":"shard7_1_0", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "base_url":"http://N_29/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2946739865E11, + "INDEX.sizeInGB":120.57591103948653}}], + "shard7_0_1":[{"core_node777":{ + "core":"COLL_2_shard7_0_1_replica_n2", + "shard":"shard7_0_1", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "base_url":"http://N_29/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":8.6794048237E10, + "INDEX.sizeInGB":80.83325646538287}}], + "shard7_1_1":[{"core_node1759":{ + "core":"COLL_2_shard7_1_1_replica_n1758", + "shard":"shard7_1_1", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "base_url":"http://N_29/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28546712309E11, + "INDEX.sizeInGB":119.7184550659731}}], + "shard6_1_0":[{"core_node1793":{ + "core":"COLL_2_shard6_1_0_replica_n1792", + "shard":"shard6_1_0", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "base_url":"http://N_29/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29365181039E11, + "INDEX.sizeInGB":120.48071347083896}}]}}}, + { + "node":"N_e_solr", + "isLive":true, + "cores":6.0, + "freedisk":4334.874732971191, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard1_0_1":[{"core_node1719":{ + "core":"COLL_2_shard1_0_1_replica_n1718", + "shard":"shard1_0_1", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":5.9506746089E10, + "base_url":"http://N_e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":55.41997597459704}}], + "shard18_0_0":[{"core_node1819":{ + "core":"COLL_2_shard18_0_0_replica_n1818", + "shard":"shard18_0_0", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "base_url":"http://N_e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28218931509E11, + "INDEX.sizeInGB":119.41318540740758}}], + "shard13_1_1":[{"core_node925":{ + "core":"COLL_2_shard13_1_1_replica_n924", + "shard":"shard13_1_1", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "base_url":"http://N_e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29598508564E11, + "INDEX.sizeInGB":120.69801666215062}}], + "shard18_1_0":[{"core_node672":{ + "core":"COLL_2_shard18_1_0_replica_n2", + "shard":"shard18_1_0", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "base_url":"http://N_e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29108586002E11, + "INDEX.sizeInGB":120.24174072034657}}], + "shard15_0_0":[{"core_node731":{ + "core":"COLL_2_shard15_0_0_replica_n1", + "shard":"shard15_0_0", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "base_url":"http://N_e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27235871561E11, + "INDEX.sizeInGB":118.49763948563486}}], + "shard13_1_0":[{"core_node923":{ + "core":"COLL_2_shard13_1_0_replica_n922", + "shard":"shard13_1_0", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29514183189E11, + "base_url":"http://N_e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.6194825368002}}]}}}, + { + "node":"N_2w_solr", + "isLive":true, + "cores":6.0, + "freedisk":4336.208312988281, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard1_0_1":[{"core_node1677":{ + "core":"COLL_2_shard1_0_1_replica_n1676", + "shard":"shard1_0_1", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.9557275352E10, + "INDEX.sizeInGB":55.46703501790762}}], + "shard1_1_1":[{"core_node1807":{ + "core":"COLL_2_shard1_1_1_replica_n1806", + "shard":"shard1_1_1", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2954748046E11, + "INDEX.sizeInGB":120.6504930369556}}], + "shard4_1_0":[{"core_node1775":{ + "core":"COLL_2_shard4_1_0_replica_n1774", + "shard":"shard4_1_0", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27659935903E11, + "INDEX.sizeInGB":118.89258018042892}}], + "shard18_1_1":[{"core_node673":{ + "core":"COLL_2_shard18_1_1_replica_n1", + "shard":"shard18_1_1", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28226679933E11, + "INDEX.sizeInGB":119.42040168959647}}], + "shard4_1_1":[{"core_node1805":{ + "core":"COLL_2_shard4_1_1_replica_n1804", + "shard":"shard4_1_1", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27878088796E11, + "INDEX.sizeInGB":119.0957508943975}}], + "shard18_1_0":[{"core_node671":{ + "core":"COLL_2_shard18_1_0_replica_n1", + "shard":"shard18_1_0", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28884297502E11, + "INDEX.sizeInGB":120.03285577706993}}]}}}, + { + "node":"N_5_solr", + "isLive":true, + "cores":6.0, + "freedisk":4397.149795532227, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "replicas":{"COLL_2":{ + "shard1_1_0":[{"core_node1721":{ + "core":"COLL_2_shard1_1_0_replica_n1720", + "shard":"shard1_1_0", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "base_url":"http://N_5/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29009851855E11, + "INDEX.sizeInGB":120.14978738036007}}], + "shard1_0_1":[{"core_node1669":{ + "core":"COLL_2_shard1_0_1_replica_n1668", + "shard":"shard1_0_1", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "base_url":"http://N_5/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.9574276743E10, + "INDEX.sizeInGB":55.482868797145784}}], + "shard1_1_1":[{"core_node418":{ + "core":"COLL_2_shard1_1_1_replica_n416", + "shard":"shard1_1_1", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":1.29698716918E11, + "base_url":"http://N_5/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":120.79134296439588}}], + "shard2_0_0":[{"core_node911":{ + "core":"COLL_2_shard2_0_0_replica_n910", + "shard":"shard2_0_0", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "base_url":"http://N_5/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29504451209E11, + "INDEX.sizeInGB":120.6104189241305}}], + "shard2_0_1":[{"core_node917":{ + "core":"COLL_2_shard2_0_1_replica_n916", + "shard":"shard2_0_1", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "base_url":"http://N_5/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31334463143E11, + "INDEX.sizeInGB":122.31475035008043}}], + "shard1_0_0":[{"core_node1725":{ + "core":"COLL_2_shard1_0_0_replica_n1724", + "shard":"shard1_0_0", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":5.7183711221E10, + "base_url":"http://N_5/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":53.25648116040975}}]}}}, + { + "node":"N_do_solr", + "isLive":true, + "cores":5.0, + "freedisk":407.25314712524414, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875, + "replicas":{ + "COLL_1":{ + "shard3_0_0":[{"core_node112":{ + "core":"COLL_1_shard3_0_0_replica_n111", + "shard":"shard3_0_0", + "collection":"COLL_1", + "node_name":"N_do_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.4957115524E10, + "base_url":"http://N_do/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":41.86957657709718}}], + "shard3_1_0":[{"core_node116":{ + "core":"COLL_1_shard3_1_0_replica_n115", + "shard":"shard3_1_0", + "collection":"COLL_1", + "node_name":"N_do_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.3732753925E10, + "base_url":"http://N_do/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":40.72930098045617}}], + "shard3_0_1":[{"core_node114":{ + "core":"COLL_1_shard3_0_1_replica_n113", + "shard":"shard3_0_1", + "collection":"COLL_1", + "node_name":"N_do_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.577095697E10, + "base_url":"http://N_do/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":42.62752548791468}}], + "shard3_1_1":[{"core_node118":{ + "core":"COLL_1_shard3_1_1_replica_n117", + "shard":"shard3_1_1", + "collection":"COLL_1", + "node_name":"N_do_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.8532509927E10, + "base_url":"http://N_do/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":45.19942209776491}}]}, + "COLL_0":{"shard3":[{"core_node15":{ + "core":"COLL_0_shard3_replica_n12", + "shard":"shard3", + "collection":"COLL_0", + "node_name":"N_do_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":3.1297025422E10, + "base_url":"http://N_do/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":29.147626293823123}}]}}}, + { + "node":"N_3a_solr", + "isLive":true, + "cores":5.0, + "freedisk":407.706729888916, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875, + "replicas":{ + "COLL_1":{ + "shard3_0_0":[{"core_node73":{ + "core":"COLL_1_shard3_0_0_replica_n71", + "shard":"shard3_0_0", + "collection":"COLL_1", + "node_name":"N_3a_solr", + "type":"NRT", + "base_url":"http://N_3a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5160600486E10, + "INDEX.sizeInGB":42.05908671580255}}], + "shard3_1_0":[{"core_node77":{ + "core":"COLL_1_shard3_1_0_replica_n75", + "shard":"shard3_1_0", + "collection":"COLL_1", + "node_name":"N_3a_solr", + "type":"NRT", + "base_url":"http://N_3a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5090380622E10, + "INDEX.sizeInGB":41.99368937127292}}], + "shard3_0_1":[{"core_node74":{ + "core":"COLL_1_shard3_0_1_replica_n72", + "shard":"shard3_0_1", + "collection":"COLL_1", + "node_name":"N_3a_solr", + "type":"NRT", + "base_url":"http://N_3a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5879426317E10, + "INDEX.sizeInGB":42.72854543942958}}], + "shard3_1_1":[{"core_node78":{ + "core":"COLL_1_shard3_1_1_replica_n76", + "shard":"shard3_1_1", + "collection":"COLL_1", + "node_name":"N_3a_solr", + "type":"NRT", + "base_url":"http://N_3a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.6849085882E10, + "INDEX.sizeInGB":43.631611282005906}}]}, + "COLL_0":{"shard3":[{"core_node17":{ + "core":"COLL_0_shard3_replica_n14", + "shard":"shard3", + "collection":"COLL_0", + "node_name":"N_3a_solr", + "type":"NRT", + "base_url":"http://N_3a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.0819950704E10, + "INDEX.sizeInGB":28.70331583917141}}]}}}, + { + "node":"N_v_solr", + "isLive":true, + "cores":5.0, + "freedisk":412.18456649780273, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875, + "replicas":{ + "COLL_1":{ + "shard3_0_0":[{"core_node120":{ + "core":"COLL_1_shard3_0_0_replica_n119", + "shard":"shard3_0_0", + "collection":"COLL_1", + "node_name":"N_v_solr", + "type":"NRT", + "base_url":"http://N_v/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3809517838E10, + "INDEX.sizeInGB":40.80079294554889}}], + "shard3_1_0":[{"core_node124":{ + "core":"COLL_1_shard3_1_0_replica_n123", + "shard":"shard3_1_0", + "collection":"COLL_1", + "node_name":"N_v_solr", + "type":"NRT", + "base_url":"http://N_v/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5638162031E10, + "INDEX.sizeInGB":42.503850563429296}}], + "shard3_0_1":[{"core_node122":{ + "core":"COLL_1_shard3_0_1_replica_n121", + "shard":"shard3_0_1", + "collection":"COLL_1", + "node_name":"N_v_solr", + "type":"NRT", + "base_url":"http://N_v/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.6310602091E10, + "INDEX.sizeInGB":43.13010917138308}}], + "shard3_1_1":[{"core_node126":{ + "core":"COLL_1_shard3_1_1_replica_n125", + "shard":"shard3_1_1", + "collection":"COLL_1", + "node_name":"N_v_solr", + "type":"NRT", + "base_url":"http://N_v/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.4257494507E10, + "INDEX.sizeInGB":41.21800373028964}}]}, + "COLL_0":{"shard3":[{"core_node18":{ + "core":"COLL_0_shard3_replica_n16", + "shard":"shard3", + "collection":"COLL_0", + "node_name":"N_v_solr", + "type":"NRT", + "base_url":"http://N_v/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.8932093807E10, + "INDEX.sizeInGB":26.94511209335178}}]}}}, + { + "node":"N_13_solr", + "isLive":true, + "cores":5.0, + "freedisk":718.1634063720703, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875, + "replicas":{ + "COLL_1":{ + "shard1_1_0":[{"core_node61":{ + "core":"COLL_1_shard1_1_0_replica_n59", + "shard":"shard1_1_0", + "collection":"COLL_1", + "node_name":"N_13_solr", + "type":"NRT", + "base_url":"http://N_13/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3783419579E10, + "INDEX.sizeInGB":40.77648704778403}}], + "shard1_0_1":[{"core_node58":{ + "core":"COLL_1_shard1_0_1_replica_n56", + "shard":"shard1_0_1", + "collection":"COLL_1", + "node_name":"N_13_solr", + "type":"NRT", + "base_url":"http://N_13/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.4932001726E10, + "INDEX.sizeInGB":41.846187530085444}}], + "shard1_1_1":[{"core_node62":{ + "core":"COLL_1_shard1_1_1_replica_n60", + "shard":"shard1_1_1", + "collection":"COLL_1", + "node_name":"N_13_solr", + "type":"NRT", + "base_url":"http://N_13/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.811959042E10, + "INDEX.sizeInGB":44.814860839396715}}], + "shard1_0_0":[{"core_node57":{ + "core":"COLL_1_shard1_0_0_replica_n55", + "shard":"shard1_0_0", + "collection":"COLL_1", + "node_name":"N_13_solr", + "type":"NRT", + "base_url":"http://N_13/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5921892273E10, + "INDEX.sizeInGB":42.76809494290501}}]}, + "COLL_0":{"shard2":[{"core_node13":{ + "core":"COLL_0_shard2_replica_n10", + "shard":"shard2", + "collection":"COLL_0", + "node_name":"N_13_solr", + "type":"NRT", + "base_url":"http://N_13/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.4248182159E10, + "INDEX.sizeInGB":31.896105184219778}}]}}}, + { + "node":"N_3to_solr", + "isLive":true, + "cores":5.0, + "freedisk":794.5433731079102, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875, + "replicas":{ + "COLL_1":{ + "shard1_1_0":[{"core_node84":{ + "core":"COLL_1_shard1_1_0_replica_n83", + "shard":"shard1_1_0", + "collection":"COLL_1", + "node_name":"N_3to_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.3892348528E10, + "base_url":"http://N_3to/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":40.87793503701687}}], + "shard1_0_1":[{"core_node82":{ + "core":"COLL_1_shard1_0_1_replica_n81", + "shard":"shard1_0_1", + "collection":"COLL_1", + "node_name":"N_3to_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.4936912617E10, + "base_url":"http://N_3to/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":41.85076115373522}}], + "shard1_1_1":[{"core_node86":{ + "core":"COLL_1_shard1_1_1_replica_n85", + "shard":"shard1_1_1", + "collection":"COLL_1", + "node_name":"N_3to_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":5.1015133973E10, + "base_url":"http://N_3to/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":47.511545916087925}}], + "shard1_0_0":[{"core_node80":{ + "core":"COLL_1_shard1_0_0_replica_n79", + "shard":"shard1_0_0", + "collection":"COLL_1", + "node_name":"N_3to_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.644843302E10, + "base_url":"http://N_3to/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":43.258474227041006}}]}, + "COLL_0":{"shard2":[{"core_node11":{ + "core":"COLL_0_shard2_replica_n8", + "shard":"shard2", + "collection":"COLL_0", + "node_name":"N_3to_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":3.0722710385E10, + "base_url":"http://N_3to/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":28.6127537349239}}]}}}, + { + "node":"N_16_solr", + "isLive":true, + "cores":5.0, + "freedisk":795.7872657775879, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875, + "replicas":{ + "COLL_1":{ + "shard2_0_0":[{"core_node100":{ + "core":"COLL_1_shard2_0_0_replica_n99", + "shard":"shard2_0_0", + "collection":"COLL_1", + "node_name":"N_16_solr", + "type":"NRT", + "base_url":"http://N_16/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.8764329025E10, + "INDEX.sizeInGB":45.41532045695931}}], + "shard2_0_1":[{"core_node102":{ + "core":"COLL_1_shard2_0_1_replica_n101", + "shard":"shard2_0_1", + "collection":"COLL_1", + "node_name":"N_16_solr", + "type":"NRT", + "base_url":"http://N_16/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3740343099E10, + "INDEX.sizeInGB":40.73636894952506}}], + "shard2_1_0":[{"core_node96":{ + "core":"COLL_1_shard2_1_0_replica_n95", + "shard":"shard2_1_0", + "collection":"COLL_1", + "node_name":"N_16_solr", + "type":"NRT", + "base_url":"http://N_16/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5585236311E10, + "INDEX.sizeInGB":42.45455964561552}}], + "shard2_1_1":[{"core_node98":{ + "core":"COLL_1_shard2_1_1_replica_n97", + "shard":"shard2_1_1", + "collection":"COLL_1", + "node_name":"N_16_solr", + "type":"NRT", + "base_url":"http://N_16/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.527594328E10, + "INDEX.sizeInGB":42.16650806367397}}]}, + "COLL_0":{"shard1":[{"core_node5":{ + "core":"COLL_0_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_0", + "node_name":"N_16_solr", + "type":"NRT", + "base_url":"http://N_16/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.3775978753E10, + "INDEX.sizeInGB":31.45633149240166}}]}}}, + { + "node":"N_d4_solr", + "isLive":true, + "cores":5.0, + "freedisk":797.2159843444824, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875, + "replicas":{ + "COLL_1":{ + "shard2_0_0":[{"core_node69":{ + "core":"COLL_1_shard2_0_0_replica_n67", + "shard":"shard2_0_0", + "collection":"COLL_1", + "node_name":"N_d4_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.497304707E10, + "base_url":"http://N_d4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":41.8844139855355}}], + "shard2_0_1":[{"core_node70":{ + "core":"COLL_1_shard2_0_1_replica_n68", + "shard":"shard2_0_1", + "collection":"COLL_1", + "node_name":"N_d4_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.5692831033E10, + "base_url":"http://N_d4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":42.554765039123595}}], + "shard2_1_0":[{"core_node65":{ + "core":"COLL_1_shard2_1_0_replica_n63", + "shard":"shard2_1_0", + "collection":"COLL_1", + "node_name":"N_d4_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.5935880044E10, + "base_url":"http://N_d4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":42.78112206980586}}], + "shard2_1_1":[{"core_node66":{ + "core":"COLL_1_shard2_1_1_replica_n64", + "shard":"shard2_1_1", + "collection":"COLL_1", + "node_name":"N_d4_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":4.5166045429E10, + "base_url":"http://N_d4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":42.064157714135945}}]}, + "COLL_0":{"shard1":[{"core_node3":{ + "core":"COLL_0_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_0", + "node_name":"N_d4_solr", + "type":"NRT", + "leader":"true", + "INDEX.sizeInBytes":3.401835331E10, + "base_url":"http://N_d4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInGB":31.682060388848186}}]}}}, + { + "node":"N_b9_solr", + "isLive":true, + "cores":5.0, + "freedisk":801.2417984008789, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875, + "replicas":{ + "COLL_1":{ + "shard1_1_0":[{"core_node92":{ + "core":"COLL_1_shard1_1_0_replica_n91", + "shard":"shard1_1_0", + "collection":"COLL_1", + "node_name":"N_b9_solr", + "type":"NRT", + "base_url":"http://N_b9/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5724314347E10, + "INDEX.sizeInGB":42.5840861601755}}], + "shard1_0_1":[{"core_node90":{ + "core":"COLL_1_shard1_0_1_replica_n89", + "shard":"shard1_0_1", + "collection":"COLL_1", + "node_name":"N_b9_solr", + "type":"NRT", + "base_url":"http://N_b9/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.6030616744E10, + "INDEX.sizeInGB":42.869352497160435}}], + "shard1_1_1":[{"core_node94":{ + "core":"COLL_1_shard1_1_1_replica_n93", + "shard":"shard1_1_1", + "collection":"COLL_1", + "node_name":"N_b9_solr", + "type":"NRT", + "base_url":"http://N_b9/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.574559386E10, + "INDEX.sizeInGB":42.603904251009226}}], + "shard1_0_0":[{"core_node88":{ + "core":"COLL_1_shard1_0_0_replica_n87", + "shard":"shard1_0_0", + "collection":"COLL_1", + "node_name":"N_b9_solr", + "type":"NRT", + "base_url":"http://N_b9/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5100613575E10, + "INDEX.sizeInGB":42.0032195514068}}]}, + "COLL_0":{"shard2":[{"core_node9":{ + "core":"COLL_0_shard2_replica_n6", + "shard":"shard2", + "collection":"COLL_0", + "node_name":"N_b9_solr", + "type":"NRT", + "base_url":"http://N_b9/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.8865621899E10, + "INDEX.sizeInGB":26.883205304853618}}]}}}, + { + "node":"N_74_solr", + "isLive":true, + "cores":5.0, + "freedisk":802.5921897888184, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875, + "replicas":{ + "COLL_1":{ + "shard2_0_0":[{"core_node108":{ + "core":"COLL_1_shard2_0_0_replica_n107", + "shard":"shard2_0_0", + "collection":"COLL_1", + "node_name":"N_74_solr", + "type":"NRT", + "base_url":"http://N_74/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3767024396E10, + "INDEX.sizeInGB":40.76121784374118}}], + "shard2_0_1":[{"core_node110":{ + "core":"COLL_1_shard2_0_1_replica_n109", + "shard":"shard2_0_1", + "collection":"COLL_1", + "node_name":"N_74_solr", + "type":"NRT", + "base_url":"http://N_74/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.8622428842E10, + "INDEX.sizeInGB":45.28316561318934}}], + "shard2_1_0":[{"core_node104":{ + "core":"COLL_1_shard2_1_0_replica_n103", + "shard":"shard2_1_0", + "collection":"COLL_1", + "node_name":"N_74_solr", + "type":"NRT", + "base_url":"http://N_74/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.4599223614E10, + "INDEX.sizeInGB":41.536263762041926}}], + "shard2_1_1":[{"core_node106":{ + "core":"COLL_1_shard2_1_1_replica_n105", + "shard":"shard2_1_1", + "collection":"COLL_1", + "node_name":"N_74_solr", + "type":"NRT", + "base_url":"http://N_74/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3768191618E10, + "INDEX.sizeInGB":40.762304903939366}}]}, + "COLL_0":{"shard1":[{"core_node7":{ + "core":"COLL_0_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_0", + "node_name":"N_74_solr", + "type":"NRT", + "base_url":"http://N_74/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.9252853492E10, + "INDEX.sizeInGB":27.24384282901883}}]}}}], + "liveNodes":[ + "N_7e_solr", + "N_dj_solr", + "N_b9_solr", + "N_m_solr", + "N_1i_solr", + "N_17_solr", + "N_1_solr", + "N_g_solr", + "N_e_solr", + "N_4_solr", + "N_a_solr", + "N_16_solr", + "N_2w_solr", + "N_13_solr", + "N_2_solr", + "N_1m_solr", + "N_5_solr", + "N_do_solr", + "N_3a_solr", + "N_6i_solr", + "N_cs_solr", + "N_1f_solr", + "N_65p_solr", + "N_1c_solr", + "N_1d_solr", + "N_d4_solr", + "N_2u_solr", + "N_3to_solr", + "N_v_solr", + "N_3a7_solr", + "N_74_solr", + "N_t_solr", + "N_9o_solr", + "N_11_solr", + "N_0_solr", + "N_8_solr", + "N_7_solr", + "N_303_solr", + "N_6_solr", + "N_29_solr", + "N_3_solr", + "N_1h_solr", + "N_aw_solr", + "N_6c_solr", + "N_z_solr", + "N_4f_solr", + "N_4g_solr", + "N_u_solr"], + "violations":[], + "config":{ + "cluster-preferences":[ + { + "minimize":"cores", + "precision":1}, + { + "maximize":"freedisk", + "precision":10}], + "cluster-policy":[ + { + "replica":"#ALL", + "collection":"COLL_2", + "sysprop.pool":"pool-01"}, + { + "replica":"#ALL", + "collection":"COLL_1", + "sysprop.pool":"pool-02"}, + { + "replica":"#ALL", + "collection":"COLL_0", + "sysprop.pool":"pool-02"}, + { + "replica":"<2", + "shard":"#EACH", + "node":"#ANY"}, + { + "replica":"#EQUAL", + "shard":"#EACH", + "sysprop.az":"#EACH"}]}}} \ No newline at end of file diff --git a/solr/core/src/test-files/solr/simSnapshot/clusterState.json b/solr/core/src/test-files/solr/simSnapshot/clusterState.json new file mode 100644 index 000000000000..004d202cc3db --- /dev/null +++ b/solr/core/src/test-files/solr/simSnapshot/clusterState.json @@ -0,0 +1,2854 @@ +{ + "clusterProperties":{}, + "liveNodes":[ + "N_7e_solr", + "N_dj_solr", + "N_b9_solr", + "N_m_solr", + "N_1i_solr", + "N_17_solr", + "N_1_solr", + "N_g_solr", + "N_e_solr", + "N_4_solr", + "N_a_solr", + "N_16_solr", + "N_2w_solr", + "N_13_solr", + "N_2_solr", + "N_1m_solr", + "N_5_solr", + "N_do_solr", + "N_3a_solr", + "N_6i_solr", + "N_cs_solr", + "N_1f_solr", + "N_65p_solr", + "N_1c_solr", + "N_1d_solr", + "N_d4_solr", + "N_2u_solr", + "N_3to_solr", + "N_v_solr", + "N_3a7_solr", + "N_74_solr", + "N_t_solr", + "N_9o_solr", + "N_11_solr", + "N_0_solr", + "N_8_solr", + "N_7_solr", + "N_303_solr", + "N_6_solr", + "N_29_solr", + "N_3_solr", + "N_1h_solr", + "N_aw_solr", + "N_6c_solr", + "N_z_solr", + "N_4f_solr", + "N_4g_solr", + "N_u_solr"], + "clusterState":{ + "COLL_1t":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_1t_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node5":{ + "core":"COLL_1t_shard1_replica_n2", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node6":{ + "core":"COLL_1t_shard1_replica_n4", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_x":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_x_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node6":{ + "core":"COLL_x_shard1_replica_n4", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node10":{ + "core":"COLL_x_shard1_replica_n9", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_2k":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_2k_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node6":{ + "core":"COLL_2k_shard1_replica_n4", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node10":{ + "core":"COLL_2k_shard1_replica_n9", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_1r":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_1r_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node5":{ + "core":"COLL_1r_shard1_replica_n2", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node6":{ + "core":"COLL_1r_shard1_replica_n4", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_8":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_8_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node5":{ + "core":"COLL_8_shard1_replica_n2", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node6":{ + "core":"COLL_8_shard1_replica_n4", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_1":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{ + "shard1_0_0":{ + "range":"80000000-9554ffff", + "state":"active", + "replicas":{ + "core_node57":{ + "core":"COLL_1_shard1_0_0_replica_n55", + "base_url":"http://N_13/solr", + "node_name":"N_13_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node80":{ + "core":"COLL_1_shard1_0_0_replica_n79", + "base_url":"http://N_3to/solr", + "node_name":"N_3to_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node88":{ + "core":"COLL_1_shard1_0_0_replica_n87", + "base_url":"http://N_b9/solr", + "node_name":"N_b9_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040263462532068"}, + "shard1_0_1":{ + "range":"95550000-aaa9ffff", + "state":"active", + "replicas":{ + "core_node58":{ + "core":"COLL_1_shard1_0_1_replica_n56", + "base_url":"http://N_13/solr", + "node_name":"N_13_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node82":{ + "core":"COLL_1_shard1_0_1_replica_n81", + "base_url":"http://N_3to/solr", + "node_name":"N_3to_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node90":{ + "core":"COLL_1_shard1_0_1_replica_n89", + "base_url":"http://N_b9/solr", + "node_name":"N_b9_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040263462514239"}, + "shard1_1_0":{ + "range":"aaaa0000-bffeffff", + "state":"active", + "replicas":{ + "core_node61":{ + "core":"COLL_1_shard1_1_0_replica_n59", + "base_url":"http://N_13/solr", + "node_name":"N_13_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node84":{ + "core":"COLL_1_shard1_1_0_replica_n83", + "base_url":"http://N_3to/solr", + "node_name":"N_3to_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node92":{ + "core":"COLL_1_shard1_1_0_replica_n91", + "base_url":"http://N_b9/solr", + "node_name":"N_b9_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040278815865699"}, + "shard1_1_1":{ + "range":"bfff0000-d554ffff", + "state":"active", + "replicas":{ + "core_node62":{ + "core":"COLL_1_shard1_1_1_replica_n60", + "base_url":"http://N_13/solr", + "node_name":"N_13_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node86":{ + "core":"COLL_1_shard1_1_1_replica_n85", + "base_url":"http://N_3to/solr", + "node_name":"N_3to_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node94":{ + "core":"COLL_1_shard1_1_1_replica_n93", + "base_url":"http://N_b9/solr", + "node_name":"N_b9_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040278815883523"}, + "shard2_1_0":{ + "range":"ffff0000-1553ffff", + "state":"active", + "replicas":{ + "core_node65":{ + "core":"COLL_1_shard2_1_0_replica_n63", + "base_url":"http://N_d4/solr", + "node_name":"N_d4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node96":{ + "core":"COLL_1_shard2_1_0_replica_n95", + "base_url":"http://N_16/solr", + "node_name":"N_16_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node104":{ + "core":"COLL_1_shard2_1_0_replica_n103", + "base_url":"http://N_74/solr", + "node_name":"N_74_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040347757791179"}, + "shard2_1_1":{ + "range":"15540000-2aa9ffff", + "state":"active", + "replicas":{ + "core_node66":{ + "core":"COLL_1_shard2_1_1_replica_n64", + "base_url":"http://N_d4/solr", + "node_name":"N_d4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node98":{ + "core":"COLL_1_shard2_1_1_replica_n97", + "base_url":"http://N_16/solr", + "node_name":"N_16_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node106":{ + "core":"COLL_1_shard2_1_1_replica_n105", + "base_url":"http://N_74/solr", + "node_name":"N_74_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040347757805057"}, + "shard2_0_0":{ + "range":"d5550000-eaa9ffff", + "state":"active", + "replicas":{ + "core_node69":{ + "core":"COLL_1_shard2_0_0_replica_n67", + "base_url":"http://N_d4/solr", + "node_name":"N_d4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node100":{ + "core":"COLL_1_shard2_0_0_replica_n99", + "base_url":"http://N_16/solr", + "node_name":"N_16_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node108":{ + "core":"COLL_1_shard2_0_0_replica_n107", + "base_url":"http://N_74/solr", + "node_name":"N_74_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040365925094823"}, + "shard2_0_1":{ + "range":"eaaa0000-fffeffff", + "state":"active", + "replicas":{ + "core_node70":{ + "core":"COLL_1_shard2_0_1_replica_n68", + "base_url":"http://N_d4/solr", + "node_name":"N_d4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node102":{ + "core":"COLL_1_shard2_0_1_replica_n101", + "base_url":"http://N_16/solr", + "node_name":"N_16_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node110":{ + "core":"COLL_1_shard2_0_1_replica_n109", + "base_url":"http://N_74/solr", + "node_name":"N_74_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040365925105897"}, + "shard3_0_0":{ + "range":"2aaa0000-3ffeffff", + "state":"active", + "replicas":{ + "core_node73":{ + "core":"COLL_1_shard3_0_0_replica_n71", + "base_url":"http://N_3a/solr", + "node_name":"N_3a_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node112":{ + "core":"COLL_1_shard3_0_0_replica_n111", + "base_url":"http://N_do/solr", + "node_name":"N_do_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node120":{ + "core":"COLL_1_shard3_0_0_replica_n119", + "base_url":"http://N_v/solr", + "node_name":"N_v_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040222670106007"}, + "shard3_0_1":{ + "range":"3fff0000-5554ffff", + "state":"active", + "replicas":{ + "core_node74":{ + "core":"COLL_1_shard3_0_1_replica_n72", + "base_url":"http://N_3a/solr", + "node_name":"N_3a_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node114":{ + "core":"COLL_1_shard3_0_1_replica_n113", + "base_url":"http://N_do/solr", + "node_name":"N_do_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node122":{ + "core":"COLL_1_shard3_0_1_replica_n121", + "base_url":"http://N_v/solr", + "node_name":"N_v_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040222670118507"}, + "shard3_1_0":{ + "range":"55550000-6aa9ffff", + "state":"active", + "replicas":{ + "core_node77":{ + "core":"COLL_1_shard3_1_0_replica_n75", + "base_url":"http://N_3a/solr", + "node_name":"N_3a_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node116":{ + "core":"COLL_1_shard3_1_0_replica_n115", + "base_url":"http://N_do/solr", + "node_name":"N_do_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node124":{ + "core":"COLL_1_shard3_1_0_replica_n123", + "base_url":"http://N_v/solr", + "node_name":"N_v_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040233681530342"}, + "shard3_1_1":{ + "range":"6aaa0000-7fffffff", + "state":"active", + "replicas":{ + "core_node78":{ + "core":"COLL_1_shard3_1_1_replica_n76", + "base_url":"http://N_3a/solr", + "node_name":"N_3a_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node118":{ + "core":"COLL_1_shard3_1_1_replica_n117", + "base_url":"http://N_do/solr", + "node_name":"N_do_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node126":{ + "core":"COLL_1_shard3_1_1_replica_n125", + "base_url":"http://N_v/solr", + "node_name":"N_v_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1562040233681548279"}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "version":0, + "COLL_4":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_4_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node5":{ + "core":"COLL_4_shard1_replica_n2", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node6":{ + "core":"COLL_4_shard1_replica_n4", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_2":{ + "pullReplicas":"0", + "replicationFactor":"2", + "shards":{ + "shard1_0_0":{ + "range":"80000000-838dffff", + "state":"active", + "replicas":{ + "core_node1717":{ + "core":"COLL_2_shard1_0_0_replica_n1716", + "base_url":"http://N_z/solr", + "node_name":"N_z_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1725":{ + "core":"COLL_2_shard1_0_0_replica_n1724", + "base_url":"http://N_5/solr", + "node_name":"N_5_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1729":{ + "core":"COLL_2_shard1_0_0_replica_n1728", + "base_url":"http://N_1h/solr", + "node_name":"N_1h_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565148935777897324"}, + "shard1_0_1":{ + "range":"838e0000-871bffff", + "state":"active", + "replicas":{ + "core_node1669":{ + "core":"COLL_2_shard1_0_1_replica_n1668", + "base_url":"http://N_5/solr", + "node_name":"N_5_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1677":{ + "core":"COLL_2_shard1_0_1_replica_n1676", + "base_url":"http://N_2w/solr", + "node_name":"N_2w_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1719":{ + "core":"COLL_2_shard1_0_1_replica_n1718", + "base_url":"http://N_e/solr", + "node_name":"N_e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}, + "stateTimestamp":"1565148935777884238"}, + "shard1_1_0":{ + "range":"871c0000-8aa9ffff", + "state":"active", + "replicas":{ + "core_node471":{ + "core":"COLL_2_shard1_1_0_replica_n1", + "base_url":"http://N_dj/solr", + "node_name":"N_dj_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1679":{ + "core":"COLL_2_shard1_1_0_replica_n1678", + "base_url":"http://N_1m/solr", + "node_name":"N_1m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1721":{ + "core":"COLL_2_shard1_1_0_replica_n1720", + "base_url":"http://N_5/solr", + "node_name":"N_5_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565151683385910884"}, + "shard1_1_1":{ + "range":"8aaa0000-8e37ffff", + "state":"active", + "replicas":{ + "core_node418":{ + "core":"COLL_2_shard1_1_1_replica_n416", + "base_url":"http://N_5/solr", + "node_name":"N_5_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node474":{ + "core":"COLL_2_shard1_1_1_replica_n2", + "base_url":"http://N_3/solr", + "node_name":"N_3_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1807":{ + "core":"COLL_2_shard1_1_1_replica_n1806", + "base_url":"http://N_2w/solr", + "node_name":"N_2w_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565151683385930901"}, + "shard3_1_0":{ + "range":"a38d0000-a71affff", + "state":"active", + "replicas":{ + "core_node425":{ + "core":"COLL_2_shard3_1_0_replica_n423", + "base_url":"http://N_1d/solr", + "node_name":"N_1d_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node459":{ + "core":"COLL_2_shard3_1_0_replica_n1", + "base_url":"http://N_6/solr", + "node_name":"N_6_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node460":{ + "core":"COLL_2_shard3_1_0_replica_n2", + "base_url":"http://N_aw/solr", + "node_name":"N_aw_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565145109110689908"}, + "shard3_1_1":{ + "range":"a71b0000-aaa9ffff", + "state":"active", + "replicas":{ + "core_node426":{ + "core":"COLL_2_shard3_1_1_replica_n424", + "base_url":"http://N_1d/solr", + "node_name":"N_1d_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node461":{ + "core":"COLL_2_shard3_1_1_replica_n1", + "base_url":"http://N_2u/solr", + "node_name":"N_2u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node462":{ + "core":"COLL_2_shard3_1_1_replica_n2", + "base_url":"http://N_3a7/solr", + "node_name":"N_3a7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565145109110736393"}, + "shard9_0_0":{ + "range":"f1c70000-f554ffff", + "state":"active", + "replicas":{ + "core_node1683":{ + "core":"COLL_2_shard9_0_0_replica_n1682", + "base_url":"http://N_g/solr", + "node_name":"N_g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "property.preferredleader":"true"}, + "core_node1799":{ + "core":"COLL_2_shard9_0_0_replica_n1798", + "base_url":"http://N_6/solr", + "node_name":"N_6_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1815":{ + "core":"COLL_2_shard9_0_0_replica_n1814", + "base_url":"http://N_3/solr", + "node_name":"N_3_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565151698484117350"}, + "shard9_0_1":{ + "range":"f5550000-f8e2ffff", + "state":"active", + "replicas":{ + "core_node438":{ + "core":"COLL_2_shard9_0_1_replica_n436", + "base_url":"http://N_11/solr", + "node_name":"N_11_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node477":{ + "core":"COLL_2_shard9_0_1_replica_n1", + "base_url":"http://N_m/solr", + "node_name":"N_m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node478":{ + "core":"COLL_2_shard9_0_1_replica_n2", + "base_url":"http://N_2/solr", + "node_name":"N_2_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}, + "stateTimestamp":"1565151698484109022"}, + "shard4_0_0":{ + "range":"aaaa0000-ae37ffff", + "state":"active", + "replicas":{ + "core_node445":{ + "core":"COLL_2_shard4_0_0_replica_n443", + "base_url":"http://N_6c/solr", + "node_name":"N_6c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node520":{ + "core":"COLL_2_shard4_0_0_replica_n2", + "base_url":"http://N_6/solr", + "node_name":"N_6_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1737":{ + "core":"COLL_2_shard4_0_0_replica_n1736", + "base_url":"http://N_3/solr", + "node_name":"N_3_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565158279227348339"}, + "shard4_0_1":{ + "range":"ae380000-b1c5ffff", + "state":"active", + "replicas":{ + "core_node446":{ + "core":"COLL_2_shard4_0_1_replica_n444", + "base_url":"http://N_6c/solr", + "node_name":"N_6c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1773":{ + "core":"COLL_2_shard4_0_1_replica_n1772", + "base_url":"http://N_303/solr", + "node_name":"N_303_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1803":{ + "core":"COLL_2_shard4_0_1_replica_n1802", + "base_url":"http://N_6/solr", + "node_name":"N_6_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565158279227361478"}, + "shard4_1_0":{ + "range":"b1c60000-b553ffff", + "state":"active", + "replicas":{ + "core_node457":{ + "core":"COLL_2_shard4_1_0_replica_n455", + "base_url":"http://N_6c/solr", + "node_name":"N_6c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node523":{ + "core":"COLL_2_shard4_1_0_replica_n1", + "base_url":"http://N_3/solr", + "node_name":"N_3_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1775":{ + "core":"COLL_2_shard4_1_0_replica_n1774", + "base_url":"http://N_2w/solr", + "node_name":"N_2w_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565161247490072659"}, + "shard4_1_1":{ + "range":"b5540000-b8e2ffff", + "state":"active", + "replicas":{ + "core_node458":{ + "core":"COLL_2_shard4_1_1_replica_n456", + "base_url":"http://N_6c/solr", + "node_name":"N_6c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node525":{ + "core":"COLL_2_shard4_1_1_replica_n1", + "base_url":"http://N_aw/solr", + "node_name":"N_aw_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1805":{ + "core":"COLL_2_shard4_1_1_replica_n1804", + "base_url":"http://N_2w/solr", + "node_name":"N_2w_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565161247490078977"}, + "shard3_0_0":{ + "range":"9c710000-9ffeffff", + "state":"active", + "replicas":{ + "core_node543":{ + "core":"COLL_2_shard3_0_0_replica_n1", + "base_url":"http://N_65p/solr", + "node_name":"N_65p_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node544":{ + "core":"COLL_2_shard3_0_0_replica_n2", + "base_url":"http://N_303/solr", + "node_name":"N_303_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1809":{ + "core":"COLL_2_shard3_0_0_replica_n1808", + "base_url":"http://N_a/solr", + "node_name":"N_a_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565183961448368559"}, + "shard3_0_1":{ + "range":"9fff0000-a38cffff", + "state":"active", + "replicas":{ + "core_node510":{ + "core":"COLL_2_shard3_0_1_replica_n508", + "base_url":"http://N_1h/solr", + "node_name":"N_1h_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node545":{ + "core":"COLL_2_shard3_0_1_replica_n1", + "base_url":"http://N_65p/solr", + "node_name":"N_65p_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node546":{ + "core":"COLL_2_shard3_0_1_replica_n2", + "base_url":"http://N_4f/solr", + "node_name":"N_4f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}, + "stateTimestamp":"1565183961448383802"}, + "shard12_1_0":{ + "range":"238d0000-271affff", + "state":"active", + "replicas":{ + "core_node659":{ + "core":"COLL_2_shard12_1_0_replica_n1", + "base_url":"http://N_aw/solr", + "node_name":"N_aw_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node660":{ + "core":"COLL_2_shard12_1_0_replica_n2", + "base_url":"http://N_2u/solr", + "node_name":"N_2u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1789":{ + "core":"COLL_2_shard12_1_0_replica_n1788", + "base_url":"http://N_1d/solr", + "node_name":"N_1d_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565207157854150844"}, + "shard12_1_1":{ + "range":"271b0000-2aa9ffff", + "state":"active", + "replicas":{ + "core_node586":{ + "core":"COLL_2_shard12_1_1_replica_n584", + "base_url":"http://N_1/solr", + "node_name":"N_1_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node661":{ + "core":"COLL_2_shard12_1_1_replica_n1", + "base_url":"http://N_aw/solr", + "node_name":"N_aw_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node662":{ + "core":"COLL_2_shard12_1_1_replica_n2", + "base_url":"http://N_11/solr", + "node_name":"N_11_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565207157854141908"}, + "shard15_0_0":{ + "range":"471c0000-4aa9ffff", + "state":"active", + "replicas":{ + "core_node609":{ + "core":"COLL_2_shard15_0_0_replica_n607", + "base_url":"http://N_7/solr", + "node_name":"N_7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node731":{ + "core":"COLL_2_shard15_0_0_replica_n1", + "base_url":"http://N_e/solr", + "node_name":"N_e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node732":{ + "core":"COLL_2_shard15_0_0_replica_n2", + "base_url":"http://N_1d/solr", + "node_name":"N_1d_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}, + "stateTimestamp":"1565218606036002434"}, + "shard15_0_1":{ + "range":"4aaa0000-4e37ffff", + "state":"active", + "replicas":{ + "core_node610":{ + "core":"COLL_2_shard15_0_1_replica_n608", + "base_url":"http://N_7/solr", + "node_name":"N_7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node734":{ + "core":"COLL_2_shard15_0_1_replica_n2", + "base_url":"http://N_u/solr", + "node_name":"N_u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1817":{ + "core":"COLL_2_shard15_0_1_replica_n1816", + "base_url":"http://N_aw/solr", + "node_name":"N_aw_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565218606035996309"}, + "shard18_1_0":{ + "range":"78e30000-7c70ffff", + "state":"active", + "replicas":{ + "core_node625":{ + "core":"COLL_2_shard18_1_0_replica_n623", + "base_url":"http://N_4g/solr", + "node_name":"N_4g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node671":{ + "core":"COLL_2_shard18_1_0_replica_n1", + "base_url":"http://N_2w/solr", + "node_name":"N_2w_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node672":{ + "core":"COLL_2_shard18_1_0_replica_n2", + "base_url":"http://N_e/solr", + "node_name":"N_e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565207836146608524"}, + "shard18_1_1":{ + "range":"7c710000-7fffffff", + "state":"active", + "replicas":{ + "core_node626":{ + "core":"COLL_2_shard18_1_1_replica_n624", + "base_url":"http://N_4g/solr", + "node_name":"N_4g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node673":{ + "core":"COLL_2_shard18_1_1_replica_n1", + "base_url":"http://N_2w/solr", + "node_name":"N_2w_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1821":{ + "core":"COLL_2_shard18_1_1_replica_n1820", + "base_url":"http://N_aw/solr", + "node_name":"N_aw_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565207836146601465"}, + "shard11_1_0":{ + "range":"15540000-18e1ffff", + "state":"active", + "replicas":{ + "core_node779":{ + "core":"COLL_2_shard11_1_0_replica_n778", + "base_url":"http://N_1f/solr", + "node_name":"N_1f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node781":{ + "core":"COLL_2_shard11_1_0_replica_n780", + "base_url":"http://N_9o/solr", + "node_name":"N_9o_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1791":{ + "core":"COLL_2_shard11_1_0_replica_n1790", + "base_url":"http://N_t/solr", + "node_name":"N_t_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565245269658561257"}, + "shard7_0_0":{ + "range":"d5550000-d8e2ffff", + "state":"active", + "replicas":{ + "core_node766":{ + "core":"COLL_2_shard7_0_0_replica_n764", + "base_url":"http://N_9o/solr", + "node_name":"N_9o_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node774":{ + "core":"COLL_2_shard7_0_0_replica_n1", + "base_url":"http://N_65p/solr", + "node_name":"N_65p_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node775":{ + "core":"COLL_2_shard7_0_0_replica_n2", + "base_url":"http://N_3a7/solr", + "node_name":"N_3a7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}, + "stateTimestamp":"1565248636417965858"}, + "shard11_1_1":{ + "range":"18e20000-1c70ffff", + "state":"active", + "replicas":{ + "core_node768":{ + "core":"COLL_2_shard11_1_1_replica_n762", + "base_url":"http://N_17/solr", + "node_name":"N_17_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node783":{ + "core":"COLL_2_shard11_1_1_replica_n782", + "base_url":"http://N_1f/solr", + "node_name":"N_1f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node785":{ + "core":"COLL_2_shard11_1_1_replica_n784", + "base_url":"http://N_9o/solr", + "node_name":"N_9o_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565245269658580580"}, + "shard7_0_1":{ + "range":"d8e30000-dc70ffff", + "state":"active", + "replicas":{ + "core_node769":{ + "core":"COLL_2_shard7_0_1_replica_n765", + "base_url":"http://N_9o/solr", + "node_name":"N_9o_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node776":{ + "core":"COLL_2_shard7_0_1_replica_n1", + "base_url":"http://N_2/solr", + "node_name":"N_2_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node777":{ + "core":"COLL_2_shard7_0_1_replica_n2", + "base_url":"http://N_29/solr", + "node_name":"N_29_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565248636417971231"}, + "shard18_0_0":{ + "range":"71c70000-7554ffff", + "state":"active", + "replicas":{ + "core_node874":{ + "core":"COLL_2_shard18_0_0_replica_n1", + "base_url":"http://N_1c/solr", + "node_name":"N_1c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node875":{ + "core":"COLL_2_shard18_0_0_replica_n2", + "base_url":"http://N_1/solr", + "node_name":"N_1_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1819":{ + "core":"COLL_2_shard18_0_0_replica_n1818", + "base_url":"http://N_e/solr", + "node_name":"N_e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565255619352465985"}, + "shard18_0_1":{ + "range":"75550000-78e2ffff", + "state":"active", + "replicas":{ + "core_node773":{ + "core":"COLL_2_shard18_0_1_replica_n771", + "base_url":"http://N_dj/solr", + "node_name":"N_dj_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node876":{ + "core":"COLL_2_shard18_0_1_replica_n1", + "base_url":"http://N_1c/solr", + "node_name":"N_1c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node877":{ + "core":"COLL_2_shard18_0_1_replica_n2", + "base_url":"http://N_17/solr", + "node_name":"N_17_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565255619352471980"}, + "shard2_0_0":{ + "range":"8e380000-91c5ffff", + "state":"active", + "replicas":{ + "core_node796":{ + "core":"COLL_2_shard2_0_0_replica_n794", + "base_url":"http://N_8/solr", + "node_name":"N_8_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node911":{ + "core":"COLL_2_shard2_0_0_replica_n910", + "base_url":"http://N_5/solr", + "node_name":"N_5_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1823":{ + "core":"COLL_2_shard2_0_0_replica_n1822", + "base_url":"http://N_3a7/solr", + "node_name":"N_3a7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565270983762975731"}, + "shard2_1_0":{ + "range":"95540000-98e1ffff", + "state":"active", + "replicas":{ + "core_node975":{ + "core":"COLL_2_shard2_1_0_replica_n974", + "base_url":"http://N_4f/solr", + "node_name":"N_4f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1681":{ + "core":"COLL_2_shard2_1_0_replica_n1680", + "base_url":"http://N_g/solr", + "node_name":"N_g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1727":{ + "core":"COLL_2_shard2_1_0_replica_n1726", + "base_url":"http://N_6i/solr", + "node_name":"N_6i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565271264820336969"}, + "shard2_0_1":{ + "range":"91c60000-9553ffff", + "state":"active", + "replicas":{ + "core_node800":{ + "core":"COLL_2_shard2_0_1_replica_n795", + "base_url":"http://N_8/solr", + "node_name":"N_8_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node915":{ + "core":"COLL_2_shard2_0_1_replica_n914", + "base_url":"http://N_4f/solr", + "node_name":"N_4f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node917":{ + "core":"COLL_2_shard2_0_1_replica_n916", + "base_url":"http://N_5/solr", + "node_name":"N_5_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565270983762987501"}, + "shard2_1_1":{ + "range":"98e20000-9c70ffff", + "state":"active", + "replicas":{ + "core_node979":{ + "core":"COLL_2_shard2_1_1_replica_n978", + "base_url":"http://N_6i/solr", + "node_name":"N_6i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1685":{ + "core":"COLL_2_shard2_1_1_replica_n1684", + "base_url":"http://N_4f/solr", + "node_name":"N_4f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1813":{ + "core":"COLL_2_shard2_1_1_replica_n1812", + "base_url":"http://N_4g/solr", + "node_name":"N_4g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565271264820364576"}, + "shard13_1_0":{ + "range":"31c60000-3553ffff", + "state":"active", + "replicas":{ + "core_node923":{ + "core":"COLL_2_shard13_1_0_replica_n922", + "base_url":"http://N_e/solr", + "node_name":"N_e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1689":{ + "core":"COLL_2_shard13_1_0_replica_n1688", + "base_url":"http://N_7/solr", + "node_name":"N_7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1763":{ + "core":"COLL_2_shard13_1_0_replica_n1762", + "base_url":"http://N_u/solr", + "node_name":"N_u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565268739309072068"}, + "shard13_1_1":{ + "range":"35540000-38e2ffff", + "state":"active", + "replicas":{ + "core_node808":{ + "core":"COLL_2_shard13_1_1_replica_n806", + "base_url":"http://N_7/solr", + "node_name":"N_7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node921":{ + "core":"COLL_2_shard13_1_1_replica_n920", + "base_url":"http://N_u/solr", + "node_name":"N_u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node925":{ + "core":"COLL_2_shard13_1_1_replica_n924", + "base_url":"http://N_e/solr", + "node_name":"N_e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565268739309062648"}, + "shard8_0_0":{ + "range":"e38e0000-e71bffff", + "state":"active", + "replicas":{ + "core_node887":{ + "core":"COLL_2_shard8_0_0_replica_n886", + "base_url":"http://N_1m/solr", + "node_name":"N_1m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1691":{ + "core":"COLL_2_shard8_0_0_replica_n1690", + "base_url":"http://N_29/solr", + "node_name":"N_29_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1731":{ + "core":"COLL_2_shard8_0_0_replica_n1730", + "base_url":"http://N_z/solr", + "node_name":"N_z_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565264342830784906"}, + "shard10_0_0":{ + "range":"0-38dffff", + "state":"active", + "replicas":{ + "core_node827":{ + "core":"COLL_2_shard10_0_0_replica_n825", + "base_url":"http://N_cs/solr", + "node_name":"N_cs_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node897":{ + "core":"COLL_2_shard10_0_0_replica_n896", + "base_url":"http://N_6i/solr", + "node_name":"N_6i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1733":{ + "core":"COLL_2_shard10_0_0_replica_n1732", + "base_url":"http://N_t/solr", + "node_name":"N_t_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565267042378799051"}, + "shard10_0_1":{ + "range":"38e0000-71bffff", + "state":"active", + "replicas":{ + "core_node828":{ + "core":"COLL_2_shard10_0_1_replica_n826", + "base_url":"http://N_cs/solr", + "node_name":"N_cs_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node905":{ + "core":"COLL_2_shard10_0_1_replica_n904", + "base_url":"http://N_t/solr", + "node_name":"N_t_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1739":{ + "core":"COLL_2_shard10_0_1_replica_n1738", + "base_url":"http://N_6i/solr", + "node_name":"N_6i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565267042378772588"}, + "shard10_1_0":{ + "range":"71c0000-aa9ffff", + "state":"active", + "replicas":{ + "core_node831":{ + "core":"COLL_2_shard10_1_0_replica_n829", + "base_url":"http://N_6i/solr", + "node_name":"N_6i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1693":{ + "core":"COLL_2_shard10_1_0_replica_n1692", + "base_url":"http://N_1i/solr", + "node_name":"N_1i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1797":{ + "core":"COLL_2_shard10_1_0_replica_n1796", + "base_url":"http://N_65p/solr", + "node_name":"N_65p_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565264436499940709"}, + "shard8_0_1":{ + "range":"e71c0000-eaa9ffff", + "state":"active", + "replicas":{ + "core_node893":{ + "core":"COLL_2_shard8_0_1_replica_n892", + "base_url":"http://N_1m/solr", + "node_name":"N_1m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1695":{ + "core":"COLL_2_shard8_0_1_replica_n1694", + "base_url":"http://N_z/solr", + "node_name":"N_z_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1787":{ + "core":"COLL_2_shard8_0_1_replica_n1786", + "base_url":"http://N_29/solr", + "node_name":"N_29_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565264342830792972"}, + "shard14_1_0":{ + "range":"3fff0000-438cffff", + "state":"active", + "replicas":{ + "core_node835":{ + "core":"COLL_2_shard14_1_0_replica_n833", + "base_url":"http://N_a/solr", + "node_name":"N_a_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1127":{ + "core":"COLL_2_shard14_1_0_replica_n1126", + "base_url":"http://N_z/solr", + "node_name":"N_z_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1129":{ + "core":"COLL_2_shard14_1_0_replica_n1128", + "base_url":"http://N_1d/solr", + "node_name":"N_1d_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}, + "stateTimestamp":"1565264301050142265"}, + "shard14_1_1":{ + "range":"438d0000-471bffff", + "state":"active", + "replicas":{ + "core_node836":{ + "core":"COLL_2_shard14_1_1_replica_n834", + "base_url":"http://N_a/solr", + "node_name":"N_a_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1741":{ + "core":"COLL_2_shard14_1_1_replica_n1740", + "base_url":"http://N_1d/solr", + "node_name":"N_1d_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1825":{ + "core":"COLL_2_shard14_1_1_replica_n1824", + "base_url":"http://N_3a7/solr", + "node_name":"N_3a7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565264301050133844"}, + "shard14_0_0":{ + "range":"38e30000-3c70ffff", + "state":"active", + "replicas":{ + "core_node839":{ + "core":"COLL_2_shard14_0_0_replica_n837", + "base_url":"http://N_3a7/solr", + "node_name":"N_3a7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1119":{ + "core":"COLL_2_shard14_0_0_replica_n1118", + "base_url":"http://N_a/solr", + "node_name":"N_a_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1121":{ + "core":"COLL_2_shard14_0_0_replica_n1120", + "base_url":"http://N_17/solr", + "node_name":"N_17_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565265507181833703"}, + "shard10_1_1":{ + "range":"aaa0000-e37ffff", + "state":"active", + "replicas":{ + "core_node840":{ + "core":"COLL_2_shard10_1_1_replica_n830", + "base_url":"http://N_6i/solr", + "node_name":"N_6i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1743":{ + "core":"COLL_2_shard10_1_1_replica_n1742", + "base_url":"http://N_t/solr", + "node_name":"N_t_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1779":{ + "core":"COLL_2_shard10_1_1_replica_n1778", + "base_url":"http://N_1i/solr", + "node_name":"N_1i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565264436499929417"}, + "shard14_0_1":{ + "range":"3c710000-3ffeffff", + "state":"active", + "replicas":{ + "core_node841":{ + "core":"COLL_2_shard14_0_1_replica_n838", + "base_url":"http://N_3a7/solr", + "node_name":"N_3a7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1123":{ + "core":"COLL_2_shard14_0_1_replica_n1122", + "base_url":"http://N_17/solr", + "node_name":"N_17_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1125":{ + "core":"COLL_2_shard14_0_1_replica_n1124", + "base_url":"http://N_a/solr", + "node_name":"N_a_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565265507181840877"}, + "shard17_0_0":{ + "range":"638e0000-671bffff", + "state":"active", + "replicas":{ + "core_node844":{ + "core":"COLL_2_shard17_0_0_replica_n842", + "base_url":"http://N_6c/solr", + "node_name":"N_6c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1735":{ + "core":"COLL_2_shard17_0_0_replica_n1734", + "base_url":"http://N_2u/solr", + "node_name":"N_2u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1781":{ + "core":"COLL_2_shard17_0_0_replica_n1780", + "base_url":"http://N_1i/solr", + "node_name":"N_1i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565266637880800687"}, + "shard17_0_1":{ + "range":"671c0000-6aa9ffff", + "state":"active", + "replicas":{ + "core_node848":{ + "core":"COLL_2_shard17_0_1_replica_n843", + "base_url":"http://N_6c/solr", + "node_name":"N_6c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1115":{ + "core":"COLL_2_shard17_0_1_replica_n1114", + "base_url":"http://N_2u/solr", + "node_name":"N_2u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1117":{ + "core":"COLL_2_shard17_0_1_replica_n1116", + "base_url":"http://N_1i/solr", + "node_name":"N_1i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565266637880794287"}, + "shard16_1_0":{ + "range":"5c710000-5ffeffff", + "state":"active", + "replicas":{ + "core_node852":{ + "core":"COLL_2_shard16_1_0_replica_n850", + "base_url":"http://N_8/solr", + "node_name":"N_8_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node991":{ + "core":"COLL_2_shard16_1_0_replica_n990", + "base_url":"http://N_3/solr", + "node_name":"N_3_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node993":{ + "core":"COLL_2_shard16_1_0_replica_n992", + "base_url":"http://N_1/solr", + "node_name":"N_1_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565275919174780002"}, + "shard16_1_1":{ + "range":"5fff0000-638dffff", + "state":"active", + "replicas":{ + "core_node853":{ + "core":"COLL_2_shard16_1_1_replica_n851", + "base_url":"http://N_8/solr", + "node_name":"N_8_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node995":{ + "core":"COLL_2_shard16_1_1_replica_n994", + "base_url":"http://N_1/solr", + "node_name":"N_1_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node997":{ + "core":"COLL_2_shard16_1_1_replica_n996", + "base_url":"http://N_3/solr", + "node_name":"N_3_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565275919174771365"}, + "shard16_0_0":{ + "range":"55550000-58e2ffff", + "state":"active", + "replicas":{ + "core_node856":{ + "core":"COLL_2_shard16_0_0_replica_n854", + "base_url":"http://N_8/solr", + "node_name":"N_8_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node983":{ + "core":"COLL_2_shard16_0_0_replica_n982", + "base_url":"http://N_1/solr", + "node_name":"N_1_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1785":{ + "core":"COLL_2_shard16_0_0_replica_n1784", + "base_url":"http://N_303/solr", + "node_name":"N_303_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565275747479472229"}, + "shard16_0_1":{ + "range":"58e30000-5c70ffff", + "state":"active", + "replicas":{ + "core_node857":{ + "core":"COLL_2_shard16_0_1_replica_n855", + "base_url":"http://N_8/solr", + "node_name":"N_8_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node987":{ + "core":"COLL_2_shard16_0_1_replica_n986", + "base_url":"http://N_303/solr", + "node_name":"N_303_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node989":{ + "core":"COLL_2_shard16_0_1_replica_n988", + "base_url":"http://N_1/solr", + "node_name":"N_1_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565275747479466413"}, + "shard5_1_0":{ + "range":"bfff0000-c38cffff", + "state":"active", + "replicas":{ + "core_node1135":{ + "core":"COLL_2_shard5_1_0_replica_n1134", + "base_url":"http://N_1c/solr", + "node_name":"N_1c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1137":{ + "core":"COLL_2_shard5_1_0_replica_n1136", + "base_url":"http://N_2/solr", + "node_name":"N_2_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1783":{ + "core":"COLL_2_shard5_1_0_replica_n1782", + "base_url":"http://N_g/solr", + "node_name":"N_g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565275178824240086"}, + "shard5_1_1":{ + "range":"c38d0000-c71bffff", + "state":"active", + "replicas":{ + "core_node861":{ + "core":"COLL_2_shard5_1_1_replica_n859", + "base_url":"http://N_g/solr", + "node_name":"N_g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1139":{ + "core":"COLL_2_shard5_1_1_replica_n1138", + "base_url":"http://N_2/solr", + "node_name":"N_2_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1141":{ + "core":"COLL_2_shard5_1_1_replica_n1140", + "base_url":"http://N_1c/solr", + "node_name":"N_1c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}, + "stateTimestamp":"1565275178824249126"}, + "shard5_0_0":{ + "range":"b8e30000-bc70ffff", + "state":"active", + "replicas":{ + "core_node999":{ + "core":"COLL_2_shard5_0_0_replica_n998", + "base_url":"http://N_1c/solr", + "node_name":"N_1c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1001":{ + "core":"COLL_2_shard5_0_0_replica_n1000", + "base_url":"http://N_1f/solr", + "node_name":"N_1f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1769":{ + "core":"COLL_2_shard5_0_0_replica_n1768", + "base_url":"http://N_g/solr", + "node_name":"N_g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565275212524831473"}, + "shard5_0_1":{ + "range":"bc710000-bffeffff", + "state":"active", + "replicas":{ + "core_node1003":{ + "core":"COLL_2_shard5_0_1_replica_n1002", + "base_url":"http://N_1f/solr", + "node_name":"N_1f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1703":{ + "core":"COLL_2_shard5_0_1_replica_n1702", + "base_url":"http://N_1c/solr", + "node_name":"N_1c_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1771":{ + "core":"COLL_2_shard5_0_1_replica_n1770", + "base_url":"http://N_g/solr", + "node_name":"N_g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565275212524825994"}, + "shard7_1_0":{ + "range":"dc710000-dffeffff", + "state":"active", + "replicas":{ + "core_node928":{ + "core":"COLL_2_shard7_1_0_replica_n926", + "base_url":"http://N_dj/solr", + "node_name":"N_dj_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1143":{ + "core":"COLL_2_shard7_1_0_replica_n1142", + "base_url":"http://N_29/solr", + "node_name":"N_29_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1145":{ + "core":"COLL_2_shard7_1_0_replica_n1144", + "base_url":"http://N_1h/solr", + "node_name":"N_1h_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565302558938511755"}, + "shard9_1_0":{ + "range":"f8e30000-fc70ffff", + "state":"active", + "replicas":{ + "core_node931":{ + "core":"COLL_2_shard9_1_0_replica_n929", + "base_url":"http://N_4g/solr", + "node_name":"N_4g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1151":{ + "core":"COLL_2_shard9_1_0_replica_n1150", + "base_url":"http://N_303/solr", + "node_name":"N_303_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1153":{ + "core":"COLL_2_shard9_1_0_replica_n1152", + "base_url":"http://N_11/solr", + "node_name":"N_11_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565302364380675715"}, + "shard6_1_0":{ + "range":"ce380000-d1c5ffff", + "state":"active", + "replicas":{ + "core_node937":{ + "core":"COLL_2_shard6_1_0_replica_n935", + "base_url":"http://N_cs/solr", + "node_name":"N_cs_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1167":{ + "core":"COLL_2_shard6_1_0_replica_n1166", + "base_url":"http://N_1m/solr", + "node_name":"N_1m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1793":{ + "core":"COLL_2_shard6_1_0_replica_n1792", + "base_url":"http://N_29/solr", + "node_name":"N_29_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565308352274112068"}, + "shard7_1_1":{ + "range":"dfff0000-e38dffff", + "state":"active", + "replicas":{ + "core_node941":{ + "core":"COLL_2_shard7_1_1_replica_n927", + "base_url":"http://N_dj/solr", + "node_name":"N_dj_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1701":{ + "core":"COLL_2_shard7_1_1_replica_n1700", + "base_url":"http://N_1h/solr", + "node_name":"N_1h_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1759":{ + "core":"COLL_2_shard7_1_1_replica_n1758", + "base_url":"http://N_29/solr", + "node_name":"N_29_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565302558938518446"}, + "shard9_1_1":{ + "range":"fc710000-ffffffff", + "state":"active", + "replicas":{ + "core_node944":{ + "core":"COLL_2_shard9_1_1_replica_n930", + "base_url":"http://N_4g/solr", + "node_name":"N_4g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1155":{ + "core":"COLL_2_shard9_1_1_replica_n1154", + "base_url":"http://N_11/solr", + "node_name":"N_11_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1163":{ + "core":"COLL_2_shard9_1_1_replica_n1162", + "base_url":"http://N_303/solr", + "node_name":"N_303_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565302364380668948"}, + "shard6_1_1":{ + "range":"d1c60000-d554ffff", + "state":"active", + "replicas":{ + "core_node1171":{ + "core":"COLL_2_shard6_1_1_replica_n1170", + "base_url":"http://N_m/solr", + "node_name":"N_m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1705":{ + "core":"COLL_2_shard6_1_1_replica_n1704", + "base_url":"http://N_cs/solr", + "node_name":"N_cs_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1745":{ + "core":"COLL_2_shard6_1_1_replica_n1744", + "base_url":"http://N_1m/solr", + "node_name":"N_1m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565308352274105667"}, + "shard15_1_0":{ + "range":"4e380000-51c5ffff", + "state":"active", + "replicas":{ + "core_node955":{ + "core":"COLL_2_shard15_1_0_replica_n953", + "base_url":"http://N_cs/solr", + "node_name":"N_cs_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1173":{ + "core":"COLL_2_shard15_1_0_replica_n1172", + "base_url":"http://N_65p/solr", + "node_name":"N_65p_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1175":{ + "core":"COLL_2_shard15_1_0_replica_n1174", + "base_url":"http://N_a/solr", + "node_name":"N_a_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565308280442325340"}, + "shard15_1_1":{ + "range":"51c60000-5554ffff", + "state":"active", + "replicas":{ + "core_node956":{ + "core":"COLL_2_shard15_1_1_replica_n954", + "base_url":"http://N_cs/solr", + "node_name":"N_cs_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1709":{ + "core":"COLL_2_shard15_1_1_replica_n1708", + "base_url":"http://N_6/solr", + "node_name":"N_6_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1747":{ + "core":"COLL_2_shard15_1_1_replica_n1746", + "base_url":"http://N_65p/solr", + "node_name":"N_65p_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565308280442332742"}, + "shard6_0_0":{ + "range":"c71c0000-caa9ffff", + "state":"active", + "replicas":{ + "core_node1182":{ + "core":"COLL_2_shard6_0_0_replica_n1180", + "base_url":"http://N_4f/solr", + "node_name":"N_4f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1208":{ + "core":"COLL_2_shard6_0_0_replica_n1207", + "base_url":"http://N_m/solr", + "node_name":"N_m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1210":{ + "core":"COLL_2_shard6_0_0_replica_n1209", + "base_url":"http://N_11/solr", + "node_name":"N_11_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565327402584689595"}, + "shard11_0_0":{ + "range":"e380000-11c5ffff", + "state":"active", + "replicas":{ + "core_node1185":{ + "core":"COLL_2_shard11_0_0_replica_n1183", + "base_url":"http://N_t/solr", + "node_name":"N_t_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1217":{ + "core":"COLL_2_shard11_0_0_replica_n1216", + "base_url":"http://N_1f/solr", + "node_name":"N_1f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1219":{ + "core":"COLL_2_shard11_0_0_replica_n1218", + "base_url":"http://N_9o/solr", + "node_name":"N_9o_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565326848103929784"}, + "shard6_0_1":{ + "range":"caaa0000-ce37ffff", + "state":"active", + "replicas":{ + "core_node1189":{ + "core":"COLL_2_shard6_0_1_replica_n1181", + "base_url":"http://N_4f/solr", + "node_name":"N_4f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1212":{ + "core":"COLL_2_shard6_0_1_replica_n1211", + "base_url":"http://N_11/solr", + "node_name":"N_11_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1214":{ + "core":"COLL_2_shard6_0_1_replica_n1213", + "base_url":"http://N_m/solr", + "node_name":"N_m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565327402584696403"}, + "shard11_0_1":{ + "range":"11c60000-1553ffff", + "state":"active", + "replicas":{ + "core_node1195":{ + "core":"COLL_2_shard11_0_1_replica_n1184", + "base_url":"http://N_t/solr", + "node_name":"N_t_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1221":{ + "core":"COLL_2_shard11_0_1_replica_n1220", + "base_url":"http://N_9o/solr", + "node_name":"N_9o_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1223":{ + "core":"COLL_2_shard11_0_1_replica_n1222", + "base_url":"http://N_1f/solr", + "node_name":"N_1f_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565326848103918157"}, + "shard17_1_0":{ + "range":"6aaa0000-6e37ffff", + "state":"active", + "replicas":{ + "core_node1200":{ + "core":"COLL_2_shard17_1_0_replica_n1198", + "base_url":"http://N_1i/solr", + "node_name":"N_1i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1225":{ + "core":"COLL_2_shard17_1_0_replica_n1224", + "base_url":"http://N_2u/solr", + "node_name":"N_2u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1227":{ + "core":"COLL_2_shard17_1_0_replica_n1226", + "base_url":"http://N_m/solr", + "node_name":"N_m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565327203156720131"}, + "shard17_1_1":{ + "range":"6e380000-71c6ffff", + "state":"active", + "replicas":{ + "core_node1203":{ + "core":"COLL_2_shard17_1_1_replica_n1199", + "base_url":"http://N_1i/solr", + "node_name":"N_1i_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1229":{ + "core":"COLL_2_shard17_1_1_replica_n1228", + "base_url":"http://N_m/solr", + "node_name":"N_m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1231":{ + "core":"COLL_2_shard17_1_1_replica_n1230", + "base_url":"http://N_2u/solr", + "node_name":"N_2u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}, + "stateTimestamp":"1565327203156741532"}, + "shard12_0_0":{ + "range":"1c710000-1ffeffff", + "state":"active", + "replicas":{ + "core_node1249":{ + "core":"COLL_2_shard12_0_0_replica_n1248", + "base_url":"http://N_2/solr", + "node_name":"N_2_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1697":{ + "core":"COLL_2_shard12_0_0_replica_n1696", + "base_url":"http://N_1h/solr", + "node_name":"N_1h_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1751":{ + "core":"COLL_2_shard12_0_0_replica_n1750", + "base_url":"http://N_17/solr", + "node_name":"N_17_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565348748808724074"}, + "shard12_0_1":{ + "range":"1fff0000-238cffff", + "state":"active", + "replicas":{ + "core_node1255":{ + "core":"COLL_2_shard12_0_1_replica_n1254", + "base_url":"http://N_2/solr", + "node_name":"N_2_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1699":{ + "core":"COLL_2_shard12_0_1_replica_n1698", + "base_url":"http://N_17/solr", + "node_name":"N_17_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1761":{ + "core":"COLL_2_shard12_0_1_replica_n1760", + "base_url":"http://N_1h/solr", + "node_name":"N_1h_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565348748808712354"}, + "shard8_1_0":{ + "range":"eaaa0000-ee37ffff", + "state":"active", + "replicas":{ + "core_node1707":{ + "core":"COLL_2_shard8_1_0_replica_n1706", + "base_url":"http://N_z/solr", + "node_name":"N_z_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1765":{ + "core":"COLL_2_shard8_1_0_replica_n1764", + "base_url":"http://N_u/solr", + "node_name":"N_u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1811":{ + "core":"COLL_2_shard8_1_0_replica_n1810", + "base_url":"http://N_6/solr", + "node_name":"N_6_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565367832373747746"}, + "shard8_1_1":{ + "range":"ee380000-f1c6ffff", + "state":"active", + "replicas":{ + "core_node1711":{ + "core":"COLL_2_shard8_1_1_replica_n1710", + "base_url":"http://N_1m/solr", + "node_name":"N_1m_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1755":{ + "core":"COLL_2_shard8_1_1_replica_n1754", + "base_url":"http://N_z/solr", + "node_name":"N_z_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1795":{ + "core":"COLL_2_shard8_1_1_replica_n1794", + "base_url":"http://N_4g/solr", + "node_name":"N_4g_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565367832373770614"}, + "shard13_0_0":{ + "range":"2aaa0000-2e37ffff", + "state":"active", + "replicas":{ + "core_node1257":{ + "core":"COLL_2_shard13_0_0_replica_n1256", + "base_url":"http://N_u/solr", + "node_name":"N_u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1713":{ + "core":"COLL_2_shard13_0_0_replica_n1712", + "base_url":"http://N_7/solr", + "node_name":"N_7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1749":{ + "core":"COLL_2_shard13_0_0_replica_n1748", + "base_url":"http://N_dj/solr", + "node_name":"N_dj_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}, + "stateTimestamp":"1565369006475868394"}, + "shard13_0_1":{ + "range":"2e380000-31c5ffff", + "state":"active", + "replicas":{ + "core_node1263":{ + "core":"COLL_2_shard13_0_1_replica_n1262", + "base_url":"http://N_u/solr", + "node_name":"N_u_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node1715":{ + "core":"COLL_2_shard13_0_1_replica_n1714", + "base_url":"http://N_dj/solr", + "node_name":"N_dj_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node1767":{ + "core":"COLL_2_shard13_0_1_replica_n1766", + "base_url":"http://N_7/solr", + "node_name":"N_7_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}, + "stateTimestamp":"1565369006475856752"}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"2", + "tlogReplicas":"0"}, + "COLL_q":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_q_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node6":{ + "core":"COLL_q_shard1_replica_n4", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node10":{ + "core":"COLL_q_shard1_replica_n9", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_22":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node5":{ + "core":"COLL_22_shard1_replica_n2", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node6":{ + "core":"COLL_22_shard1_replica_n4", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node10":{ + "core":"COLL_22_shard1_replica_n9", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_1b":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_1b_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node6":{ + "core":"COLL_1b_shard1_replica_n4", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node10":{ + "core":"COLL_1b_shard1_replica_n9", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_5":{ + "pullReplicas":"0", + "replicationFactor":"1", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{"core_node2":{ + "core":"COLL_5_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"1", + "tlogReplicas":"0"}, + "COLL_1x":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_1x_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node6":{ + "core":"COLL_1x_shard1_replica_n4", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node10":{ + "core":"COLL_1x_shard1_replica_n9", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_l":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_l_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node6":{ + "core":"COLL_l_shard1_replica_n4", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node10":{ + "core":"COLL_l_shard1_replica_n9", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_0":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{ + "shard1":{ + "range":"80000000-d554ffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_0_shard1_replica_n1", + "base_url":"http://N_d4/solr", + "node_name":"N_d4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node5":{ + "core":"COLL_0_shard1_replica_n2", + "base_url":"http://N_16/solr", + "node_name":"N_16_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node7":{ + "core":"COLL_0_shard1_replica_n4", + "base_url":"http://N_74/solr", + "node_name":"N_74_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}}, + "shard2":{ + "range":"d5550000-2aa9ffff", + "state":"active", + "replicas":{ + "core_node9":{ + "core":"COLL_0_shard2_replica_n6", + "base_url":"http://N_b9/solr", + "node_name":"N_b9_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node11":{ + "core":"COLL_0_shard2_replica_n8", + "base_url":"http://N_3to/solr", + "node_name":"N_3to_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node13":{ + "core":"COLL_0_shard2_replica_n10", + "base_url":"http://N_13/solr", + "node_name":"N_13_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}}, + "shard3":{ + "range":"2aaa0000-7fffffff", + "state":"active", + "replicas":{ + "core_node15":{ + "core":"COLL_0_shard3_replica_n12", + "base_url":"http://N_do/solr", + "node_name":"N_do_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}, + "core_node17":{ + "core":"COLL_0_shard3_replica_n14", + "base_url":"http://N_3a/solr", + "node_name":"N_3a_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node18":{ + "core":"COLL_0_shard3_replica_n16", + "base_url":"http://N_v/solr", + "node_name":"N_v_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}, + "COLL_6":{ + "pullReplicas":"0", + "replicationFactor":"3", + "shards":{"shard1":{ + "range":"80000000-7fffffff", + "state":"active", + "replicas":{ + "core_node3":{ + "core":"COLL_6_shard1_replica_n1", + "base_url":"http://N_7e/solr", + "node_name":"N_7e_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node5":{ + "core":"COLL_6_shard1_replica_n2", + "base_url":"http://N_4/solr", + "node_name":"N_4_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false"}, + "core_node6":{ + "core":"COLL_6_shard1_replica_n4", + "base_url":"http://N_0/solr", + "node_name":"N_0_solr", + "state":"active", + "type":"NRT", + "force_set_state":"false", + "leader":"true"}}}}, + "router":{"name":"compositeId"}, + "maxShardsPerNode":"1", + "autoAddReplicas":"true", + "nrtReplicas":"3", + "tlogReplicas":"0"}}} \ No newline at end of file diff --git a/solr/core/src/test-files/solr/simSnapshot/distribState.json b/solr/core/src/test-files/solr/simSnapshot/distribState.json new file mode 100644 index 000000000000..f59ab577e40d --- /dev/null +++ b/solr/core/src/test-files/solr/simSnapshot/distribState.json @@ -0,0 +1,206 @@ +{ + "/clusterstate.json":{ + "owner":"0", + "mode":"PERSISTENT", + "version":0}, + "/live_nodes":{ + "owner":"0", + "mode":"PERSISTENT", + "version":0}, + "/live_nodes/N_11_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_65p_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_8_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_74_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_1i_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_4g_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_2_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_a_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_1c_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_16_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_6c_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_v_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_3a_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_1d_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_1_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_u_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_7_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_t_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_6i_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_d4_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_17_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_1f_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_do_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_3_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_303_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_29_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_9o_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_7e_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_1m_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_4_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_m_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_dj_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_e_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_13_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_g_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_2w_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_z_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_cs_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_3a7_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_aw_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_0_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_3to_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_4f_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_1h_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_2u_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_b9_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_6_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/live_nodes/N_5_solr":{ + "owner":"0", + "mode":"EPHEMERAL", + "version":0}, + "/autoscaling.json":{ + "owner":"0", + "mode":"PERSISTENT", + "data":"ewogICJjbHVzdGVyLXByZWZlcmVuY2VzIjpbCiAgICB7CiAgICAgICJtaW5pbWl6ZSI6ImNvcmVzIiwKICAgICAgInByZWNpc2lvbiI6MX0sCiAgICB7CiAgICAgICJtYXhpbWl6ZSI6ImZyZWVkaXNrIiwKICAgICAgInByZWNpc2lvbiI6MTB9XSwKICAiY2x1c3Rlci1wb2xpY3kiOlsKICAgIHsKICAgICAgInJlcGxpY2EiOiIjQUxMIiwKICAgICAgImNvbGxlY3Rpb24iOiJDT0xMXzIiLAogICAgICAic3lzcHJvcC5wb29sIjoicG9vbC0wMSJ9LAogICAgewogICAgICAicmVwbGljYSI6IiNBTEwiLAogICAgICAiY29sbGVjdGlvbiI6IkNPTExfMSIsCiAgICAgICJzeXNwcm9wLnBvb2wiOiJwb29sLTAyIn0sCiAgICB7CiAgICAgICJyZXBsaWNhIjoiI0FMTCIsCiAgICAgICJjb2xsZWN0aW9uIjoiQ09MTF8wIiwKICAgICAgInN5c3Byb3AucG9vbCI6InBvb2wtMDIifSwKICAgIHsKICAgICAgInJlcGxpY2EiOiI8MiIsCiAgICAgICJzaGFyZCI6IiNFQUNIIiwKICAgICAgIm5vZGUiOiIjQU5ZIn0sCiAgICB7CiAgICAgICJyZXBsaWNhIjoiI0VRVUFMIiwKICAgICAgInNoYXJkIjoiI0VBQ0giLAogICAgICAic3lzcHJvcC5heiI6IiNFQUNIIn1dLAogICJ0cmlnZ2VycyI6e30sCiAgImxpc3RlbmVycyI6e30sCiAgInByb3BlcnRpZXMiOnt9fQ==", + "version":0}} \ No newline at end of file diff --git a/solr/core/src/test-files/solr/simSnapshot/managerState.json b/solr/core/src/test-files/solr/simSnapshot/managerState.json new file mode 100644 index 000000000000..b96ebf4ca94a --- /dev/null +++ b/solr/core/src/test-files/solr/simSnapshot/managerState.json @@ -0,0 +1 @@ +{"timeSource":"SimTimeSource:50.0"} \ No newline at end of file diff --git a/solr/core/src/test-files/solr/simSnapshot/nodeState.json b/solr/core/src/test-files/solr/simSnapshot/nodeState.json new file mode 100644 index 000000000000..e923736badac --- /dev/null +++ b/solr/core/src/test-files/solr/simSnapshot/nodeState.json @@ -0,0 +1,3823 @@ +{ + "nodeValues":{ + "N_7e_solr":{ + "node":"N_7e_solr", + "isLive":true, + "cores":13, + "freedisk":873.6022491455078, + "sysprop.pool":"pool-03", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875}, + "N_0_solr":{ + "node":"N_0_solr", + "isLive":true, + "cores":12, + "freedisk":719.6562576293945, + "sysprop.pool":"pool-03", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875}, + "N_4_solr":{ + "node":"N_4_solr", + "isLive":true, + "cores":12, + "freedisk":875.4758682250977, + "sysprop.pool":"pool-03", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875}, + "N_g_solr":{ + "node":"N_g_solr", + "isLive":true, + "cores":6, + "freedisk":4007.3253440856934, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_17_solr":{ + "node":"N_17_solr", + "isLive":true, + "cores":6, + "freedisk":4093.756145477295, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_303_solr":{ + "node":"N_303_solr", + "isLive":true, + "cores":6, + "freedisk":4111.4668045043945, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_dj_solr":{ + "node":"N_dj_solr", + "isLive":true, + "cores":6, + "freedisk":4162.087951660156, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_1c_solr":{ + "node":"N_1c_solr", + "isLive":true, + "cores":6, + "freedisk":4181.229598999023, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_z_solr":{ + "node":"N_z_solr", + "isLive":true, + "cores":6, + "freedisk":4215.115695953369, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_6_solr":{ + "node":"N_6_solr", + "isLive":true, + "cores":6, + "freedisk":4252.47643661499, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_1m_solr":{ + "node":"N_1m_solr", + "isLive":true, + "cores":6, + "freedisk":4257.921604156494, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_4g_solr":{ + "node":"N_4g_solr", + "isLive":true, + "cores":6, + "freedisk":4259.9677734375, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_65p_solr":{ + "node":"N_65p_solr", + "isLive":true, + "cores":6, + "freedisk":4260.997627258301, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_u_solr":{ + "node":"N_u_solr", + "isLive":true, + "cores":6, + "freedisk":4260.821304321289, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_1f_solr":{ + "node":"N_1f_solr", + "isLive":true, + "cores":6, + "freedisk":4260.807849884033, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_cs_solr":{ + "node":"N_cs_solr", + "isLive":true, + "cores":6, + "freedisk":4260.629165649414, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_8_solr":{ + "node":"N_8_solr", + "isLive":true, + "cores":6, + "freedisk":4262.037788391113, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_a_solr":{ + "node":"N_a_solr", + "isLive":true, + "cores":6, + "freedisk":4262.172649383545, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_3a7_solr":{ + "node":"N_3a7_solr", + "isLive":true, + "cores":6, + "freedisk":4263.317134857178, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_11_solr":{ + "node":"N_11_solr", + "isLive":true, + "cores":6, + "freedisk":4264.325901031494, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_4f_solr":{ + "node":"N_4f_solr", + "isLive":true, + "cores":6, + "freedisk":4264.210151672363, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_1i_solr":{ + "node":"N_1i_solr", + "isLive":true, + "cores":6, + "freedisk":4266.027156829834, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_9o_solr":{ + "node":"N_9o_solr", + "isLive":true, + "cores":6, + "freedisk":4265.881809234619, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_2_solr":{ + "node":"N_2_solr", + "isLive":true, + "cores":6, + "freedisk":4266.604637145996, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_t_solr":{ + "node":"N_t_solr", + "isLive":true, + "cores":6, + "freedisk":4266.856658935547, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_2u_solr":{ + "node":"N_2u_solr", + "isLive":true, + "cores":6, + "freedisk":4266.648368835449, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_m_solr":{ + "node":"N_m_solr", + "isLive":true, + "cores":6, + "freedisk":4267.171646118164, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_7_solr":{ + "node":"N_7_solr", + "isLive":true, + "cores":6, + "freedisk":4268.472709655762, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_6c_solr":{ + "node":"N_6c_solr", + "isLive":true, + "cores":6, + "freedisk":4269.135753631592, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_6i_solr":{ + "node":"N_6i_solr", + "isLive":true, + "cores":6, + "freedisk":4269.712917327881, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_3_solr":{ + "node":"N_3_solr", + "isLive":true, + "cores":6, + "freedisk":4272.45711517334, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_1d_solr":{ + "node":"N_1d_solr", + "isLive":true, + "cores":6, + "freedisk":4273.009799957275, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_1_solr":{ + "node":"N_1_solr", + "isLive":true, + "cores":6, + "freedisk":4274.765396118164, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_aw_solr":{ + "node":"N_aw_solr", + "isLive":true, + "cores":6, + "freedisk":4276.759601593018, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_1h_solr":{ + "node":"N_1h_solr", + "isLive":true, + "cores":6, + "freedisk":4297.329685211182, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_29_solr":{ + "node":"N_29_solr", + "isLive":true, + "cores":6, + "freedisk":4303.548599243164, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_e_solr":{ + "node":"N_e_solr", + "isLive":true, + "cores":6, + "freedisk":4334.874732971191, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625}, + "N_2w_solr":{ + "node":"N_2w_solr", + "isLive":true, + "cores":6, + "freedisk":4336.208312988281, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625}, + "N_5_solr":{ + "node":"N_5_solr", + "isLive":true, + "cores":6, + "freedisk":4397.149795532227, + "sysprop.pool":"pool-01", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625}, + "N_do_solr":{ + "node":"N_do_solr", + "isLive":true, + "cores":5, + "freedisk":407.25314712524414, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875}, + "N_3a_solr":{ + "node":"N_3a_solr", + "isLive":true, + "cores":5, + "freedisk":407.706729888916, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875}, + "N_v_solr":{ + "node":"N_v_solr", + "isLive":true, + "cores":5, + "freedisk":412.18456649780273, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875}, + "N_13_solr":{ + "node":"N_13_solr", + "isLive":true, + "cores":5, + "freedisk":718.1634063720703, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875}, + "N_3to_solr":{ + "node":"N_3to_solr", + "isLive":true, + "cores":5, + "freedisk":794.5433731079102, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875}, + "N_16_solr":{ + "node":"N_16_solr", + "isLive":true, + "cores":5, + "freedisk":795.7872657775879, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875}, + "N_d4_solr":{ + "node":"N_d4_solr", + "isLive":true, + "cores":5, + "freedisk":797.2159843444824, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875}, + "N_b9_solr":{ + "node":"N_b9_solr", + "isLive":true, + "cores":5, + "freedisk":801.2417984008789, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875}, + "N_74_solr":{ + "node":"N_74_solr", + "isLive":true, + "cores":5, + "freedisk":802.5921897888184, + "sysprop.pool":"pool-02", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875}}, + "replicaInfos":{ + "N_7e_solr":{ + "COLL_22":{"shard1":[{"core_node6":{ + "core":"COLL_22_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_22", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.4348956483E10, + "INDEX.sizeInGB":22.676732840947807}}]}, + "COLL_q":{"shard1":[{"core_node3":{ + "core":"COLL_q_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_q", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.9242789E8, + "INDEX.sizeInGB":0.45860921032726765}}]}, + "COLL_1b":{"shard1":[{"core_node3":{ + "core":"COLL_1b_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_1b", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1t":{"shard1":[{"core_node3":{ + "core":"COLL_1t_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_1t", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":8.5774407615E10, + "INDEX.sizeInGB":79.8836421361193}}]}, + "COLL_x":{"shard1":[{"core_node3":{ + "core":"COLL_x_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_x", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.18270873E8, + "INDEX.sizeInGB":0.296412848867476}}]}, + "COLL_2k":{"shard1":[{"core_node3":{ + "core":"COLL_2k_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_2k", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1r":{"shard1":[{"core_node3":{ + "core":"COLL_1r_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_1r", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.12015174E8, + "INDEX.sizeInGB":0.38371903263032436}}]}, + "COLL_8":{"shard1":[{"core_node3":{ + "core":"COLL_8_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_8", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":356048.0, + "INDEX.sizeInGB":3.315955400466919E-4}}]}, + "COLL_5":{"shard1":[{"core_node2":{ + "core":"COLL_5_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_5", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.854396964E9, + "INDEX.sizeInGB":5.452332053333521}}]}, + "COLL_l":{"shard1":[{"core_node3":{ + "core":"COLL_l_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_l", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1x":{"shard1":[{"core_node3":{ + "core":"COLL_1x_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_1x", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4248411.0, + "INDEX.sizeInGB":0.00395664107054472}}]}, + "COLL_4":{"shard1":[{"core_node3":{ + "core":"COLL_4_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_4", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.58881858E8, + "INDEX.sizeInGB":0.2411025185137987}}]}, + "COLL_6":{"shard1":[{"core_node3":{ + "core":"COLL_6_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_6", + "node_name":"N_7e_solr", + "type":"NRT", + "base_url":"http://N_7e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.6446420654E10, + "INDEX.sizeInGB":15.316922826692462}}]}}, + "N_0_solr":{ + "COLL_22":{"shard1":[{"core_node10":{ + "core":"COLL_22_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_22", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.4351639993E10, + "INDEX.sizeInGB":22.679232054390013}}]}, + "COLL_q":{"shard1":[{"core_node10":{ + "core":"COLL_q_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_q", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.9242789E8, + "INDEX.sizeInGB":0.45860921032726765}}]}, + "COLL_1b":{"shard1":[{"core_node10":{ + "core":"COLL_1b_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_1b", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1t":{"shard1":[{"core_node5":{ + "core":"COLL_1t_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_1t", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":8.7485800719E10, + "INDEX.sizeInGB":81.47750116791576}}]}, + "COLL_x":{"shard1":[{"core_node10":{ + "core":"COLL_x_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_x", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.0928583E8, + "INDEX.sizeInGB":0.2880448754876852}}]}, + "COLL_2k":{"shard1":[{"core_node10":{ + "core":"COLL_2k_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_2k", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1r":{"shard1":[{"core_node5":{ + "core":"COLL_1r_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_1r", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.25884524E8, + "INDEX.sizeInGB":0.39663587138056755}}]}, + "COLL_8":{"shard1":[{"core_node5":{ + "core":"COLL_8_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_8", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":399225.0, + "INDEX.sizeInGB":3.718072548508644E-4}}]}, + "COLL_l":{"shard1":[{"core_node10":{ + "core":"COLL_l_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_l", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1x":{"shard1":[{"core_node10":{ + "core":"COLL_1x_shard1_replica_n9", + "shard":"shard1", + "collection":"COLL_1x", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4264901.0, + "INDEX.sizeInGB":0.003971998579800129}}]}, + "COLL_4":{"shard1":[{"core_node5":{ + "core":"COLL_4_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_4", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.58797271E8, + "INDEX.sizeInGB":0.24102374073117971}}]}, + "COLL_6":{"shard1":[{"core_node6":{ + "core":"COLL_6_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_6", + "node_name":"N_0_solr", + "type":"NRT", + "base_url":"http://N_0/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.4921656871E10, + "INDEX.sizeInGB":41.83655313309282}}]}}, + "N_4_solr":{ + "COLL_22":{"shard1":[{"core_node5":{ + "core":"COLL_22_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_22", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.436290627E10, + "INDEX.sizeInGB":22.689724592491984}}]}, + "COLL_q":{"shard1":[{"core_node6":{ + "core":"COLL_q_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_q", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.9242789E8, + "INDEX.sizeInGB":0.45860921032726765}}]}, + "COLL_1b":{"shard1":[{"core_node6":{ + "core":"COLL_1b_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_1b", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1t":{"shard1":[{"core_node6":{ + "core":"COLL_1t_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_1t", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":8.5380419785E10, + "INDEX.sizeInGB":79.51671237591654}}]}, + "COLL_x":{"shard1":[{"core_node6":{ + "core":"COLL_x_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_x", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.03301808E8, + "INDEX.sizeInGB":0.28247182071208954}}]}, + "COLL_2k":{"shard1":[{"core_node6":{ + "core":"COLL_2k_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_2k", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1r":{"shard1":[{"core_node6":{ + "core":"COLL_1r_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_1r", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.46826689E8, + "INDEX.sizeInGB":0.4161397824063897}}]}, + "COLL_8":{"shard1":[{"core_node6":{ + "core":"COLL_8_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_8", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":356048.0, + "INDEX.sizeInGB":3.315955400466919E-4}}]}, + "COLL_l":{"shard1":[{"core_node6":{ + "core":"COLL_l_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_l", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7}}]}, + "COLL_1x":{"shard1":[{"core_node6":{ + "core":"COLL_1x_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_1x", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4255591.0, + "INDEX.sizeInGB":0.003963327966630459}}]}, + "COLL_4":{"shard1":[{"core_node6":{ + "core":"COLL_4_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_4", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.59832461E8, + "INDEX.sizeInGB":0.2419878365471959}}]}, + "COLL_6":{"shard1":[{"core_node5":{ + "core":"COLL_6_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_6", + "node_name":"N_4_solr", + "type":"NRT", + "base_url":"http://N_4/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.0738852096E10, + "INDEX.sizeInGB":19.314561128616333}}]}}, + "N_g_solr":{"COLL_2":{ + "shard2_1_0":[{"core_node1681":{ + "core":"COLL_2_shard2_1_0_replica_n1680", + "shard":"shard2_1_0", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.3012044407E11, + "INDEX.sizeInGB":121.18410698138177}}], + "shard5_0_1":[{"core_node1771":{ + "core":"COLL_2_shard5_0_1_replica_n1770", + "shard":"shard5_0_1", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31464210597E11, + "INDEX.sizeInGB":122.43558708298951}}], + "shard5_1_0":[{"core_node1783":{ + "core":"COLL_2_shard5_1_0_replica_n1782", + "shard":"shard5_1_0", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30012462556E11, + "INDEX.sizeInGB":121.08354135975242}}], + "shard5_1_1":[{"core_node861":{ + "core":"COLL_2_shard5_1_1_replica_n859", + "shard":"shard5_1_1", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29967769078E11, + "INDEX.sizeInGB":121.04191731475294}}], + "shard5_0_0":[{"core_node1769":{ + "core":"COLL_2_shard5_0_0_replica_n1768", + "shard":"shard5_0_0", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31922267714E11, + "INDEX.sizeInGB":122.8621860165149}}], + "shard9_0_0":[{"core_node1683":{ + "core":"COLL_2_shard9_0_0_replica_n1682", + "shard":"shard9_0_0", + "collection":"COLL_2", + "node_name":"N_g_solr", + "type":"NRT", + "property.preferredleader":"true", + "base_url":"http://N_g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29248772716E11, + "INDEX.sizeInGB":120.37229977175593}}]}}, + "N_17_solr":{"COLL_2":{ + "shard11_1_1":[{"core_node768":{ + "core":"COLL_2_shard11_1_1_replica_n762", + "shard":"shard11_1_1", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "base_url":"http://N_17/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30871431234E11, + "INDEX.sizeInGB":121.88351828046143}}], + "shard14_0_0":[{"core_node1121":{ + "core":"COLL_2_shard14_0_0_replica_n1120", + "shard":"shard14_0_0", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "base_url":"http://N_17/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.3029908264E11, + "INDEX.sizeInGB":121.3504771143198}}], + "shard18_0_1":[{"core_node877":{ + "core":"COLL_2_shard18_0_1_replica_n2", + "shard":"shard18_0_1", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "base_url":"http://N_17/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28174988934E11, + "INDEX.sizeInGB":119.37226069532335}}], + "shard12_0_1":[{"core_node1699":{ + "core":"COLL_2_shard12_0_1_replica_n1698", + "shard":"shard12_0_1", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "base_url":"http://N_17/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30350286057E11, + "INDEX.sizeInGB":121.39816401246935}}], + "shard12_0_0":[{"core_node1751":{ + "core":"COLL_2_shard12_0_0_replica_n1750", + "shard":"shard12_0_0", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "base_url":"http://N_17/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2936875619E11, + "INDEX.sizeInGB":120.48404308967292}}], + "shard14_0_1":[{"core_node1123":{ + "core":"COLL_2_shard14_0_1_replica_n1122", + "shard":"shard14_0_1", + "collection":"COLL_2", + "node_name":"N_17_solr", + "type":"NRT", + "base_url":"http://N_17/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31146492351E11, + "INDEX.sizeInGB":122.13968890812248}}]}}, + "N_303_solr":{"COLL_2":{ + "shard16_0_1":[{"core_node987":{ + "core":"COLL_2_shard16_0_1_replica_n986", + "shard":"shard16_0_1", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "base_url":"http://N_303/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30738903625E11, + "INDEX.sizeInGB":121.76009232643992}}], + "shard16_0_0":[{"core_node1785":{ + "core":"COLL_2_shard16_0_0_replica_n1784", + "shard":"shard16_0_0", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "base_url":"http://N_303/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.26747476604E11, + "INDEX.sizeInGB":118.04278623685241}}], + "shard3_0_0":[{"core_node544":{ + "core":"COLL_2_shard3_0_0_replica_n2", + "shard":"shard3_0_0", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "base_url":"http://N_303/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29792212268E11, + "INDEX.sizeInGB":120.87841729447246}}], + "shard9_1_1":[{"core_node1163":{ + "core":"COLL_2_shard9_1_1_replica_n1162", + "shard":"shard9_1_1", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "base_url":"http://N_303/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.36568824379E11, + "INDEX.sizeInGB":127.18962913285941}}], + "shard9_1_0":[{"core_node1151":{ + "core":"COLL_2_shard9_1_0_replica_n1150", + "shard":"shard9_1_0", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "base_url":"http://N_303/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31117387108E11, + "INDEX.sizeInGB":122.11258253827691}}], + "shard4_0_1":[{"core_node1773":{ + "core":"COLL_2_shard4_0_1_replica_n1772", + "shard":"shard4_0_1", + "collection":"COLL_2", + "node_name":"N_303_solr", + "type":"NRT", + "base_url":"http://N_303/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28126128215E11, + "INDEX.sizeInGB":119.3267556047067}}]}}, + "N_dj_solr":{"COLL_2":{ + "shard1_1_0":[{"core_node471":{ + "core":"COLL_2_shard1_1_0_replica_n1", + "shard":"shard1_1_0", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "base_url":"http://N_dj/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29057719236E11, + "INDEX.sizeInGB":120.19436735287309}}], + "shard7_1_0":[{"core_node928":{ + "core":"COLL_2_shard7_1_0_replica_n926", + "shard":"shard7_1_0", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "base_url":"http://N_dj/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29963886019E11, + "INDEX.sizeInGB":121.03830093424767}}], + "shard7_1_1":[{"core_node941":{ + "core":"COLL_2_shard7_1_1_replica_n927", + "shard":"shard7_1_1", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "base_url":"http://N_dj/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28538540188E11, + "INDEX.sizeInGB":119.71084418520331}}], + "shard18_0_1":[{"core_node773":{ + "core":"COLL_2_shard18_0_1_replica_n771", + "shard":"shard18_0_1", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "base_url":"http://N_dj/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30821199599E11, + "INDEX.sizeInGB":121.83673642482609}}], + "shard13_0_1":[{"core_node1715":{ + "core":"COLL_2_shard13_0_1_replica_n1714", + "shard":"shard13_0_1", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "base_url":"http://N_dj/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30355121703E11, + "INDEX.sizeInGB":121.402667558752}}], + "shard13_0_0":[{"core_node1749":{ + "core":"COLL_2_shard13_0_0_replica_n1748", + "shard":"shard13_0_0", + "collection":"COLL_2", + "node_name":"N_dj_solr", + "type":"NRT", + "base_url":"http://N_dj/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30427736106E11, + "INDEX.sizeInGB":121.47029499150813}}]}}, + "N_1c_solr":{"COLL_2":{ + "shard5_0_1":[{"core_node1703":{ + "core":"COLL_2_shard5_0_1_replica_n1702", + "shard":"shard5_0_1", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "base_url":"http://N_1c/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31521149156E11, + "INDEX.sizeInGB":122.48861524835229}}], + "shard5_1_0":[{"core_node1135":{ + "core":"COLL_2_shard5_1_0_replica_n1134", + "shard":"shard5_1_0", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "base_url":"http://N_1c/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30030877168E11, + "INDEX.sizeInGB":121.1006913036108}}], + "shard18_0_0":[{"core_node874":{ + "core":"COLL_2_shard18_0_0_replica_n1", + "shard":"shard18_0_0", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "base_url":"http://N_1c/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28011422432E11, + "INDEX.sizeInGB":119.21992751955986}}], + "shard5_1_1":[{"core_node1141":{ + "core":"COLL_2_shard5_1_1_replica_n1140", + "shard":"shard5_1_1", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "base_url":"http://N_1c/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29917464329E11, + "INDEX.sizeInGB":120.99506736639887}}], + "shard5_0_0":[{"core_node999":{ + "core":"COLL_2_shard5_0_0_replica_n998", + "shard":"shard5_0_0", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "base_url":"http://N_1c/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31937405764E11, + "INDEX.sizeInGB":122.87628442421556}}], + "shard18_0_1":[{"core_node876":{ + "core":"COLL_2_shard18_0_1_replica_n1", + "shard":"shard18_0_1", + "collection":"COLL_2", + "node_name":"N_1c_solr", + "type":"NRT", + "base_url":"http://N_1c/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30729375574E11, + "INDEX.sizeInGB":121.75121863745153}}]}}, + "N_z_solr":{"COLL_2":{ + "shard1_0_0":[{"core_node1717":{ + "core":"COLL_2_shard1_0_0_replica_n1716", + "shard":"shard1_0_0", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "base_url":"http://N_z/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.7185112146E10, + "INDEX.sizeInGB":53.25778587348759}}], + "shard8_1_0":[{"core_node1707":{ + "core":"COLL_2_shard8_1_0_replica_n1706", + "shard":"shard8_1_0", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "base_url":"http://N_z/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.35679630668E11, + "INDEX.sizeInGB":126.361502956599}}], + "shard8_0_0":[{"core_node1731":{ + "core":"COLL_2_shard8_0_0_replica_n1730", + "shard":"shard8_0_0", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "base_url":"http://N_z/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30170301246E11, + "INDEX.sizeInGB":121.23054009489715}}], + "shard8_0_1":[{"core_node1695":{ + "core":"COLL_2_shard8_0_1_replica_n1694", + "shard":"shard8_0_1", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "base_url":"http://N_z/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.39918850407E11, + "INDEX.sizeInGB":130.30958399828523}}], + "shard8_1_1":[{"core_node1755":{ + "core":"COLL_2_shard8_1_1_replica_n1754", + "shard":"shard8_1_1", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "base_url":"http://N_z/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33314153125E11, + "INDEX.sizeInGB":124.15848032105714}}], + "shard14_1_0":[{"core_node1127":{ + "core":"COLL_2_shard14_1_0_replica_n1126", + "shard":"shard14_1_0", + "collection":"COLL_2", + "node_name":"N_z_solr", + "type":"NRT", + "base_url":"http://N_z/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27443177079E11, + "INDEX.sizeInGB":118.69070779439062}}]}}, + "N_6_solr":{"COLL_2":{ + "shard8_1_0":[{"core_node1811":{ + "core":"COLL_2_shard8_1_0_replica_n1810", + "shard":"shard8_1_0", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "base_url":"http://N_6/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.35679249773E11, + "INDEX.sizeInGB":126.36114822048694}}], + "shard4_0_0":[{"core_node520":{ + "core":"COLL_2_shard4_0_0_replica_n2", + "shard":"shard4_0_0", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "base_url":"http://N_6/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28680029361E11, + "INDEX.sizeInGB":119.84261624608189}}], + "shard4_0_1":[{"core_node1803":{ + "core":"COLL_2_shard4_0_1_replica_n1802", + "shard":"shard4_0_1", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "base_url":"http://N_6/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28153346526E11, + "INDEX.sizeInGB":119.35210463218391}}], + "shard9_0_0":[{"core_node1799":{ + "core":"COLL_2_shard9_0_0_replica_n1798", + "shard":"shard9_0_0", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "base_url":"http://N_6/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.35157081196E11, + "INDEX.sizeInGB":125.874840836972}}], + "shard3_1_0":[{"core_node459":{ + "core":"COLL_2_shard3_1_0_replica_n1", + "shard":"shard3_1_0", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "base_url":"http://N_6/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.32652501535E11, + "INDEX.sizeInGB":123.54226925875992}}], + "shard15_1_1":[{"core_node1709":{ + "core":"COLL_2_shard15_1_1_replica_n1708", + "shard":"shard15_1_1", + "collection":"COLL_2", + "node_name":"N_6_solr", + "type":"NRT", + "base_url":"http://N_6/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30846984322E11, + "INDEX.sizeInGB":121.86075031943619}}]}}, + "N_1m_solr":{"COLL_2":{ + "shard6_1_1":[{"core_node1745":{ + "core":"COLL_2_shard6_1_1_replica_n1744", + "shard":"shard6_1_1", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31273933482E11, + "INDEX.sizeInGB":122.25837771035731}}], + "shard1_1_0":[{"core_node1679":{ + "core":"COLL_2_shard1_1_0_replica_n1678", + "shard":"shard1_1_0", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28970690262E11, + "INDEX.sizeInGB":120.11331530474126}}], + "shard8_0_0":[{"core_node887":{ + "core":"COLL_2_shard8_0_0_replica_n886", + "shard":"shard8_0_0", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30145902623E11, + "INDEX.sizeInGB":121.20781710650772}}], + "shard8_0_1":[{"core_node893":{ + "core":"COLL_2_shard8_0_1_replica_n892", + "shard":"shard8_0_1", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.32681734677E11, + "INDEX.sizeInGB":123.56949474383146}}], + "shard8_1_1":[{"core_node1711":{ + "core":"COLL_2_shard8_1_1_replica_n1710", + "shard":"shard8_1_1", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33374089494E11, + "INDEX.sizeInGB":124.21430041454732}}], + "shard6_1_0":[{"core_node1167":{ + "core":"COLL_2_shard6_1_0_replica_n1166", + "shard":"shard6_1_0", + "collection":"COLL_2", + "node_name":"N_1m_solr", + "type":"NRT", + "base_url":"http://N_1m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29376799009E11, + "INDEX.sizeInGB":120.49153354857117}}]}}, + "N_4g_solr":{"COLL_2":{ + "shard8_1_1":[{"core_node1795":{ + "core":"COLL_2_shard8_1_1_replica_n1794", + "shard":"shard8_1_1", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "base_url":"http://N_4g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33276674177E11, + "INDEX.sizeInGB":124.1235753307119}}], + "shard9_1_1":[{"core_node944":{ + "core":"COLL_2_shard9_1_1_replica_n930", + "shard":"shard9_1_1", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "base_url":"http://N_4g/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33928213329E11, + "INDEX.sizeInGB":124.73036845121533}}], + "shard9_1_0":[{"core_node931":{ + "core":"COLL_2_shard9_1_0_replica_n929", + "shard":"shard9_1_0", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "base_url":"http://N_4g/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31111103315E11, + "INDEX.sizeInGB":122.1067303000018}}], + "shard18_1_1":[{"core_node626":{ + "core":"COLL_2_shard18_1_1_replica_n624", + "shard":"shard18_1_1", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "base_url":"http://N_4g/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28190099634E11, + "INDEX.sizeInGB":119.38633363135159}}], + "shard18_1_0":[{"core_node625":{ + "core":"COLL_2_shard18_1_0_replica_n623", + "shard":"shard18_1_0", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "base_url":"http://N_4g/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28955475131E11, + "INDEX.sizeInGB":120.09914510976523}}], + "shard2_1_1":[{"core_node1813":{ + "core":"COLL_2_shard2_1_1_replica_n1812", + "shard":"shard2_1_1", + "collection":"COLL_2", + "node_name":"N_4g_solr", + "type":"NRT", + "base_url":"http://N_4g/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28164947427E11, + "INDEX.sizeInGB":119.36290881317109}}]}}, + "N_65p_solr":{"COLL_2":{ + "shard7_0_0":[{"core_node774":{ + "core":"COLL_2_shard7_0_0_replica_n1", + "shard":"shard7_0_0", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29027793373E11, + "INDEX.sizeInGB":120.16649672109634}}], + "shard10_1_0":[{"core_node1797":{ + "core":"COLL_2_shard10_1_0_replica_n1796", + "shard":"shard10_1_0", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27583656591E11, + "INDEX.sizeInGB":118.82153953518718}}], + "shard3_0_0":[{"core_node543":{ + "core":"COLL_2_shard3_0_0_replica_n1", + "shard":"shard3_0_0", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29871412511E11, + "INDEX.sizeInGB":120.95217826869339}}], + "shard3_0_1":[{"core_node545":{ + "core":"COLL_2_shard3_0_1_replica_n1", + "shard":"shard3_0_1", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31838835644E11, + "INDEX.sizeInGB":122.784483846277}}], + "shard15_1_0":[{"core_node1173":{ + "core":"COLL_2_shard15_1_0_replica_n1172", + "shard":"shard15_1_0", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33316507698E11, + "INDEX.sizeInGB":124.16067318804562}}], + "shard15_1_1":[{"core_node1747":{ + "core":"COLL_2_shard15_1_1_replica_n1746", + "shard":"shard15_1_1", + "collection":"COLL_2", + "node_name":"N_65p_solr", + "type":"NRT", + "base_url":"http://N_65p/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30883359905E11, + "INDEX.sizeInGB":121.89462772104889}}]}}, + "N_u_solr":{"COLL_2":{ + "shard8_1_0":[{"core_node1765":{ + "core":"COLL_2_shard8_1_0_replica_n1764", + "shard":"shard8_1_0", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "base_url":"http://N_u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.35571920799E11, + "INDEX.sizeInGB":126.26119032409042}}], + "shard13_1_1":[{"core_node921":{ + "core":"COLL_2_shard13_1_1_replica_n920", + "shard":"shard13_1_1", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "base_url":"http://N_u/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29634542289E11, + "INDEX.sizeInGB":120.73157568369061}}], + "shard15_0_1":[{"core_node734":{ + "core":"COLL_2_shard15_0_1_replica_n2", + "shard":"shard15_0_1", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "base_url":"http://N_u/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27250282639E11, + "INDEX.sizeInGB":118.51106084790081}}], + "shard13_0_1":[{"core_node1263":{ + "core":"COLL_2_shard13_0_1_replica_n1262", + "shard":"shard13_0_1", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "base_url":"http://N_u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30321828131E11, + "INDEX.sizeInGB":121.37166050355881}}], + "shard13_1_0":[{"core_node1763":{ + "core":"COLL_2_shard13_1_0_replica_n1762", + "shard":"shard13_1_0", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "base_url":"http://N_u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29567251239E11, + "INDEX.sizeInGB":120.66890600975603}}], + "shard13_0_0":[{"core_node1257":{ + "core":"COLL_2_shard13_0_0_replica_n1256", + "shard":"shard13_0_0", + "collection":"COLL_2", + "node_name":"N_u_solr", + "type":"NRT", + "base_url":"http://N_u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30381429251E11, + "INDEX.sizeInGB":121.42716837208718}}]}}, + "N_1f_solr":{"COLL_2":{ + "shard11_0_1":[{"core_node1223":{ + "core":"COLL_2_shard11_0_1_replica_n1222", + "shard":"shard11_0_1", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27989218509E11, + "INDEX.sizeInGB":119.19924850482494}}], + "shard11_1_0":[{"core_node779":{ + "core":"COLL_2_shard11_1_0_replica_n778", + "shard":"shard11_1_0", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.32552454912E11, + "INDEX.sizeInGB":123.44909358024597}}], + "shard11_0_0":[{"core_node1217":{ + "core":"COLL_2_shard11_0_0_replica_n1216", + "shard":"shard11_0_0", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27720861488E11, + "INDEX.sizeInGB":118.94932155311108}}], + "shard11_1_1":[{"core_node783":{ + "core":"COLL_2_shard11_1_1_replica_n782", + "shard":"shard11_1_1", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30995783614E11, + "INDEX.sizeInGB":121.99933045916259}}], + "shard5_0_1":[{"core_node1003":{ + "core":"COLL_2_shard5_0_1_replica_n1002", + "shard":"shard5_0_1", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31534942129E11, + "INDEX.sizeInGB":122.50146095547825}}], + "shard5_0_0":[{"core_node1001":{ + "core":"COLL_2_shard5_0_0_replica_n1000", + "shard":"shard5_0_0", + "collection":"COLL_2", + "node_name":"N_1f_solr", + "type":"NRT", + "base_url":"http://N_1f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31960210955E11, + "INDEX.sizeInGB":122.89752341341227}}]}}, + "N_cs_solr":{"COLL_2":{ + "shard6_1_1":[{"core_node1705":{ + "core":"COLL_2_shard6_1_1_replica_n1704", + "shard":"shard6_1_1", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "base_url":"http://N_cs/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31274462707E11, + "INDEX.sizeInGB":122.25887058954686}}], + "shard10_0_1":[{"core_node828":{ + "core":"COLL_2_shard10_0_1_replica_n826", + "shard":"shard10_0_1", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "base_url":"http://N_cs/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28038688927E11, + "INDEX.sizeInGB":119.245321421884}}], + "shard6_1_0":[{"core_node937":{ + "core":"COLL_2_shard6_1_0_replica_n935", + "shard":"shard6_1_0", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "base_url":"http://N_cs/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29597529819E11, + "INDEX.sizeInGB":120.69710513483733}}], + "shard15_1_0":[{"core_node955":{ + "core":"COLL_2_shard15_1_0_replica_n953", + "shard":"shard15_1_0", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "base_url":"http://N_cs/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33515745782E11, + "INDEX.sizeInGB":124.34622811339796}}], + "shard10_0_0":[{"core_node827":{ + "core":"COLL_2_shard10_0_0_replica_n825", + "shard":"shard10_0_0", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "base_url":"http://N_cs/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29486149433E11, + "INDEX.sizeInGB":120.59337406698614}}], + "shard15_1_1":[{"core_node956":{ + "core":"COLL_2_shard15_1_1_replica_n954", + "shard":"shard15_1_1", + "collection":"COLL_2", + "node_name":"N_cs_solr", + "type":"NRT", + "base_url":"http://N_cs/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30865977458E11, + "INDEX.sizeInGB":121.87843905575573}}]}}, + "N_8_solr":{"COLL_2":{ + "shard16_1_1":[{"core_node853":{ + "core":"COLL_2_shard16_1_1_replica_n851", + "shard":"shard16_1_1", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "base_url":"http://N_8/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33685050832E11, + "INDEX.sizeInGB":124.50390572845936}}], + "shard16_0_1":[{"core_node857":{ + "core":"COLL_2_shard16_0_1_replica_n855", + "shard":"shard16_0_1", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "base_url":"http://N_8/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30788718518E11, + "INDEX.sizeInGB":121.80648606084287}}], + "shard16_1_0":[{"core_node852":{ + "core":"COLL_2_shard16_1_0_replica_n850", + "shard":"shard16_1_0", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "base_url":"http://N_8/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28801317856E11, + "INDEX.sizeInGB":119.95557495951653}}], + "shard16_0_0":[{"core_node856":{ + "core":"COLL_2_shard16_0_0_replica_n854", + "shard":"shard16_0_0", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "base_url":"http://N_8/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2677230126E11, + "INDEX.sizeInGB":118.06590599939227}}], + "shard2_0_0":[{"core_node796":{ + "core":"COLL_2_shard2_0_0_replica_n794", + "shard":"shard2_0_0", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "base_url":"http://N_8/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29517293483E11, + "INDEX.sizeInGB":120.6223792238161}}], + "shard2_0_1":[{"core_node800":{ + "core":"COLL_2_shard2_0_1_replica_n795", + "shard":"shard2_0_1", + "collection":"COLL_2", + "node_name":"N_8_solr", + "type":"NRT", + "base_url":"http://N_8/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31328007233E11, + "INDEX.sizeInGB":122.30873781535774}}]}}, + "N_a_solr":{"COLL_2":{ + "shard3_0_0":[{"core_node1809":{ + "core":"COLL_2_shard3_0_0_replica_n1808", + "shard":"shard3_0_0", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29798330608E11, + "INDEX.sizeInGB":120.88411544263363}}], + "shard14_0_0":[{"core_node1119":{ + "core":"COLL_2_shard14_0_0_replica_n1118", + "shard":"shard14_0_0", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30313698451E11, + "INDEX.sizeInGB":121.36408914905041}}], + "shard15_1_0":[{"core_node1175":{ + "core":"COLL_2_shard15_1_0_replica_n1174", + "shard":"shard15_1_0", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33321224738E11, + "INDEX.sizeInGB":124.16506627388299}}], + "shard14_1_1":[{"core_node836":{ + "core":"COLL_2_shard14_1_1_replica_n834", + "shard":"shard14_1_1", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29318568492E11, + "INDEX.sizeInGB":120.43730215355754}}], + "shard14_0_1":[{"core_node1125":{ + "core":"COLL_2_shard14_0_1_replica_n1124", + "shard":"shard14_0_1", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31102045065E11, + "INDEX.sizeInGB":122.09829414729029}}], + "shard14_1_0":[{"core_node835":{ + "core":"COLL_2_shard14_1_0_replica_n833", + "shard":"shard14_1_0", + "collection":"COLL_2", + "node_name":"N_a_solr", + "type":"NRT", + "base_url":"http://N_a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27418065808E11, + "INDEX.sizeInGB":118.66732110083103}}]}}, + "N_3a7_solr":{"COLL_2":{ + "shard7_0_0":[{"core_node775":{ + "core":"COLL_2_shard7_0_0_replica_n2", + "shard":"shard7_0_0", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "base_url":"http://N_3a7/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29074533898E11, + "INDEX.sizeInGB":120.21002722717822}}], + "shard2_0_0":[{"core_node1823":{ + "core":"COLL_2_shard2_0_0_replica_n1822", + "shard":"shard2_0_0", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "base_url":"http://N_3a7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29476268104E11, + "INDEX.sizeInGB":120.58417136222124}}], + "shard14_0_0":[{"core_node839":{ + "core":"COLL_2_shard14_0_0_replica_n837", + "shard":"shard14_0_0", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "base_url":"http://N_3a7/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30330451538E11, + "INDEX.sizeInGB":121.37969167716801}}], + "shard3_1_1":[{"core_node462":{ + "core":"COLL_2_shard3_1_1_replica_n2", + "shard":"shard3_1_1", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "base_url":"http://N_3a7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2992912768E11, + "INDEX.sizeInGB":121.00592970848083}}], + "shard14_1_1":[{"core_node1825":{ + "core":"COLL_2_shard14_1_1_replica_n1824", + "shard":"shard14_1_1", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "base_url":"http://N_3a7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.300425186E11, + "INDEX.sizeInGB":121.11153323203325}}], + "shard14_0_1":[{"core_node841":{ + "core":"COLL_2_shard14_0_1_replica_n838", + "shard":"shard14_0_1", + "collection":"COLL_2", + "node_name":"N_3a7_solr", + "type":"NRT", + "base_url":"http://N_3a7/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31168916273E11, + "INDEX.sizeInGB":122.1605728128925}}]}}, + "N_11_solr":{"COLL_2":{ + "shard6_0_0":[{"core_node1210":{ + "core":"COLL_2_shard6_0_0_replica_n1209", + "shard":"shard6_0_0", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28939953876E11, + "INDEX.sizeInGB":120.08468981459737}}], + "shard6_0_1":[{"core_node1212":{ + "core":"COLL_2_shard6_0_1_replica_n1211", + "shard":"shard6_0_1", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28744354495E11, + "INDEX.sizeInGB":119.90252369549125}}], + "shard9_1_1":[{"core_node1155":{ + "core":"COLL_2_shard9_1_1_replica_n1154", + "shard":"shard9_1_1", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.33894519282E11, + "INDEX.sizeInGB":124.69898842461407}}], + "shard9_1_0":[{"core_node1153":{ + "core":"COLL_2_shard9_1_0_replica_n1152", + "shard":"shard9_1_0", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31406038908E11, + "INDEX.sizeInGB":122.3814104758203}}], + "shard9_0_1":[{"core_node438":{ + "core":"COLL_2_shard9_0_1_replica_n436", + "shard":"shard9_0_1", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29282915395E11, + "INDEX.sizeInGB":120.40409761946648}}], + "shard12_1_1":[{"core_node662":{ + "core":"COLL_2_shard12_1_1_replica_n2", + "shard":"shard12_1_1", + "collection":"COLL_2", + "node_name":"N_11_solr", + "type":"NRT", + "base_url":"http://N_11/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.26693447901E11, + "INDEX.sizeInGB":117.99246808607131}}]}}, + "N_4f_solr":{"COLL_2":{ + "shard2_0_1":[{"core_node915":{ + "core":"COLL_2_shard2_0_1_replica_n914", + "shard":"shard2_0_1", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "base_url":"http://N_4f/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31386626219E11, + "INDEX.sizeInGB":122.36333100032061}}], + "shard2_1_0":[{"core_node975":{ + "core":"COLL_2_shard2_1_0_replica_n974", + "shard":"shard2_1_0", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "base_url":"http://N_4f/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.3001251468E11, + "INDEX.sizeInGB":121.0835899040103}}], + "shard6_0_0":[{"core_node1182":{ + "core":"COLL_2_shard6_0_0_replica_n1180", + "shard":"shard6_0_0", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "base_url":"http://N_4f/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28922958966E11, + "INDEX.sizeInGB":120.06886207126081}}], + "shard6_0_1":[{"core_node1189":{ + "core":"COLL_2_shard6_0_1_replica_n1181", + "shard":"shard6_0_1", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "base_url":"http://N_4f/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28773562289E11, + "INDEX.sizeInGB":119.92972557339817}}], + "shard3_0_1":[{"core_node546":{ + "core":"COLL_2_shard3_0_1_replica_n2", + "shard":"shard3_0_1", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "base_url":"http://N_4f/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31838927317E11, + "INDEX.sizeInGB":122.78456922341138}}], + "shard2_1_1":[{"core_node1685":{ + "core":"COLL_2_shard2_1_1_replica_n1684", + "shard":"shard2_1_1", + "collection":"COLL_2", + "node_name":"N_4f_solr", + "type":"NRT", + "base_url":"http://N_4f/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2812596905E11, + "INDEX.sizeInGB":119.32660737074912}}]}}, + "N_1i_solr":{"COLL_2":{ + "shard17_1_0":[{"core_node1200":{ + "core":"COLL_2_shard17_1_0_replica_n1198", + "shard":"shard17_1_0", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "base_url":"http://N_1i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29069936299E11, + "INDEX.sizeInGB":120.20574537944049}}], + "shard17_0_1":[{"core_node1117":{ + "core":"COLL_2_shard17_0_1_replica_n1116", + "shard":"shard17_0_1", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "base_url":"http://N_1i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30694171889E11, + "INDEX.sizeInGB":121.71843265090138}}], + "shard10_1_1":[{"core_node1779":{ + "core":"COLL_2_shard10_1_1_replica_n1778", + "shard":"shard10_1_1", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "base_url":"http://N_1i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30255789623E11, + "INDEX.sizeInGB":121.31015735026449}}], + "shard17_0_0":[{"core_node1781":{ + "core":"COLL_2_shard17_0_0_replica_n1780", + "shard":"shard17_0_0", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "base_url":"http://N_1i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30702509646E11, + "INDEX.sizeInGB":121.72619779221714}}], + "shard10_1_0":[{"core_node1693":{ + "core":"COLL_2_shard10_1_0_replica_n1692", + "shard":"shard10_1_0", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "base_url":"http://N_1i/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27561685082E11, + "INDEX.sizeInGB":118.80107697285712}}], + "shard17_1_1":[{"core_node1203":{ + "core":"COLL_2_shard17_1_1_replica_n1199", + "shard":"shard17_1_1", + "collection":"COLL_2", + "node_name":"N_1i_solr", + "type":"NRT", + "base_url":"http://N_1i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28764084367E11, + "INDEX.sizeInGB":119.92089857067913}}]}}, + "N_9o_solr":{"COLL_2":{ + "shard11_0_1":[{"core_node1221":{ + "core":"COLL_2_shard11_0_1_replica_n1220", + "shard":"shard11_0_1", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "base_url":"http://N_9o/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28020049235E11, + "INDEX.sizeInGB":119.22796185594052}}], + "shard11_1_0":[{"core_node781":{ + "core":"COLL_2_shard11_1_0_replica_n780", + "shard":"shard11_1_0", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "base_url":"http://N_9o/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.32420261013E11, + "INDEX.sizeInGB":123.32597841788083}}], + "shard11_0_0":[{"core_node1219":{ + "core":"COLL_2_shard11_0_0_replica_n1218", + "shard":"shard11_0_0", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "base_url":"http://N_9o/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28002391411E11, + "INDEX.sizeInGB":119.21151672583073}}], + "shard7_0_0":[{"core_node766":{ + "core":"COLL_2_shard7_0_0_replica_n764", + "shard":"shard7_0_0", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "base_url":"http://N_9o/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28994593549E11, + "INDEX.sizeInGB":120.13557697553188}}], + "shard11_1_1":[{"core_node785":{ + "core":"COLL_2_shard11_1_1_replica_n784", + "shard":"shard11_1_1", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "base_url":"http://N_9o/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30909357727E11, + "INDEX.sizeInGB":121.91884007956833}}], + "shard7_0_1":[{"core_node769":{ + "core":"COLL_2_shard7_0_1_replica_n765", + "shard":"shard7_0_1", + "collection":"COLL_2", + "node_name":"N_9o_solr", + "type":"NRT", + "base_url":"http://N_9o/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28908501869E11, + "INDEX.sizeInGB":120.0553978504613}}]}}, + "N_2_solr":{"COLL_2":{ + "shard5_1_0":[{"core_node1137":{ + "core":"COLL_2_shard5_1_0_replica_n1136", + "shard":"shard5_1_0", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "base_url":"http://N_2/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":7.6877250282E10, + "INDEX.sizeInGB":71.59751866199076}}], + "shard5_1_1":[{"core_node1139":{ + "core":"COLL_2_shard5_1_1_replica_n1138", + "shard":"shard5_1_1", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "base_url":"http://N_2/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29952609098E11, + "INDEX.sizeInGB":121.02779848314822}}], + "shard7_0_1":[{"core_node776":{ + "core":"COLL_2_shard7_0_1_replica_n1", + "shard":"shard7_0_1", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "base_url":"http://N_2/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2890128588E11, + "INDEX.sizeInGB":120.04867743700743}}], + "shard9_0_1":[{"core_node478":{ + "core":"COLL_2_shard9_0_1_replica_n2", + "shard":"shard9_0_1", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "base_url":"http://N_2/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29212951693E11, + "INDEX.sizeInGB":120.33893884439021}}], + "shard12_0_1":[{"core_node1255":{ + "core":"COLL_2_shard12_0_1_replica_n1254", + "shard":"shard12_0_1", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "base_url":"http://N_2/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30384315739E11, + "INDEX.sizeInGB":121.42985662352294}}], + "shard12_0_0":[{"core_node1249":{ + "core":"COLL_2_shard12_0_0_replica_n1248", + "shard":"shard12_0_0", + "collection":"COLL_2", + "node_name":"N_2_solr", + "type":"NRT", + "base_url":"http://N_2/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29421522442E11, + "INDEX.sizeInGB":120.53318549133837}}]}}, + "N_t_solr":{"COLL_2":{ + "shard11_0_1":[{"core_node1195":{ + "core":"COLL_2_shard11_0_1_replica_n1184", + "shard":"shard11_0_1", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "base_url":"http://N_t/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27980394382E11, + "INDEX.sizeInGB":119.19103039614856}}], + "shard11_1_0":[{"core_node1791":{ + "core":"COLL_2_shard11_1_0_replica_n1790", + "shard":"shard11_1_0", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "base_url":"http://N_t/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.32416023485E11, + "INDEX.sizeInGB":123.32203191239387}}], + "shard11_0_0":[{"core_node1185":{ + "core":"COLL_2_shard11_0_0_replica_n1183", + "shard":"shard11_0_0", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "base_url":"http://N_t/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2777477116E11, + "INDEX.sizeInGB":118.99952884763479}}], + "shard10_1_1":[{"core_node1743":{ + "core":"COLL_2_shard10_1_1_replica_n1742", + "shard":"shard10_1_1", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "base_url":"http://N_t/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30757016285E11, + "INDEX.sizeInGB":121.77696105558425}}], + "shard10_0_1":[{"core_node905":{ + "core":"COLL_2_shard10_0_1_replica_n904", + "shard":"shard10_0_1", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "base_url":"http://N_t/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28142990156E11, + "INDEX.sizeInGB":119.34245951101184}}], + "shard10_0_0":[{"core_node1733":{ + "core":"COLL_2_shard10_0_0_replica_n1732", + "shard":"shard10_0_0", + "collection":"COLL_2", + "node_name":"N_t_solr", + "type":"NRT", + "base_url":"http://N_t/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2914349283E11, + "INDEX.sizeInGB":120.27425023727119}}]}}, + "N_2u_solr":{"COLL_2":{ + "shard17_1_0":[{"core_node1225":{ + "core":"COLL_2_shard17_1_0_replica_n1224", + "shard":"shard17_1_0", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "base_url":"http://N_2u/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29066474889E11, + "INDEX.sizeInGB":120.20252169016749}}], + "shard17_0_1":[{"core_node1115":{ + "core":"COLL_2_shard17_0_1_replica_n1114", + "shard":"shard17_0_1", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "base_url":"http://N_2u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30049647193E11, + "INDEX.sizeInGB":121.1181722516194}}], + "shard17_0_0":[{"core_node1735":{ + "core":"COLL_2_shard17_0_0_replica_n1734", + "shard":"shard17_0_0", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "base_url":"http://N_2u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31102615765E11, + "INDEX.sizeInGB":122.09882565308362}}], + "shard3_1_1":[{"core_node461":{ + "core":"COLL_2_shard3_1_1_replica_n1", + "shard":"shard3_1_1", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "base_url":"http://N_2u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29953637358E11, + "INDEX.sizeInGB":121.02875612489879}}], + "shard17_1_1":[{"core_node1231":{ + "core":"COLL_2_shard17_1_1_replica_n1230", + "shard":"shard17_1_1", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "base_url":"http://N_2u/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.287734207E11, + "INDEX.sizeInGB":119.92959370836616}}], + "shard12_1_0":[{"core_node660":{ + "core":"COLL_2_shard12_1_0_replica_n2", + "shard":"shard12_1_0", + "collection":"COLL_2", + "node_name":"N_2u_solr", + "type":"NRT", + "base_url":"http://N_2u/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27387972534E11, + "INDEX.sizeInGB":118.63929455541074}}]}}, + "N_m_solr":{"COLL_2":{ + "shard6_1_1":[{"core_node1171":{ + "core":"COLL_2_shard6_1_1_replica_n1170", + "shard":"shard6_1_1", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31256081422E11, + "INDEX.sizeInGB":122.24175168387592}}], + "shard17_1_0":[{"core_node1227":{ + "core":"COLL_2_shard17_1_0_replica_n1226", + "shard":"shard17_1_0", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29049722959E11, + "INDEX.sizeInGB":120.18692023959011}}], + "shard6_0_0":[{"core_node1208":{ + "core":"COLL_2_shard6_0_0_replica_n1207", + "shard":"shard6_0_0", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28936808614E11, + "INDEX.sizeInGB":120.08176056109369}}], + "shard6_0_1":[{"core_node1214":{ + "core":"COLL_2_shard6_0_1_replica_n1213", + "shard":"shard6_0_1", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28745543493E11, + "INDEX.sizeInGB":119.90363103616983}}], + "shard9_0_1":[{"core_node477":{ + "core":"COLL_2_shard9_0_1_replica_n1", + "shard":"shard9_0_1", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29063920601E11, + "INDEX.sizeInGB":120.20014282409102}}], + "shard17_1_1":[{"core_node1229":{ + "core":"COLL_2_shard17_1_1_replica_n1228", + "shard":"shard17_1_1", + "collection":"COLL_2", + "node_name":"N_m_solr", + "type":"NRT", + "base_url":"http://N_m/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28816978409E11, + "INDEX.sizeInGB":119.97015998605639}}]}}, + "N_7_solr":{"COLL_2":{ + "shard13_1_1":[{"core_node808":{ + "core":"COLL_2_shard13_1_1_replica_n806", + "shard":"shard13_1_1", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2961448776E11, + "INDEX.sizeInGB":120.71289844810963}}], + "shard15_0_1":[{"core_node610":{ + "core":"COLL_2_shard15_0_1_replica_n608", + "shard":"shard15_0_1", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2722802278E11, + "INDEX.sizeInGB":118.49032973870635}}], + "shard15_0_0":[{"core_node609":{ + "core":"COLL_2_shard15_0_0_replica_n607", + "shard":"shard15_0_0", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27258670055E11, + "INDEX.sizeInGB":118.5188722377643}}], + "shard13_0_1":[{"core_node1767":{ + "core":"COLL_2_shard13_0_1_replica_n1766", + "shard":"shard13_0_1", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30339106107E11, + "INDEX.sizeInGB":121.38775187265128}}], + "shard13_1_0":[{"core_node1689":{ + "core":"COLL_2_shard13_1_0_replica_n1688", + "shard":"shard13_1_0", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29592823396E11, + "INDEX.sizeInGB":120.69272193685174}}], + "shard13_0_0":[{"core_node1713":{ + "core":"COLL_2_shard13_0_0_replica_n1712", + "shard":"shard13_0_0", + "collection":"COLL_2", + "node_name":"N_7_solr", + "type":"NRT", + "base_url":"http://N_7/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30437704659E11, + "INDEX.sizeInGB":121.47957892995328}}]}}, + "N_6c_solr":{"COLL_2":{ + "shard17_0_1":[{"core_node848":{ + "core":"COLL_2_shard17_0_1_replica_n843", + "shard":"shard17_0_1", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "base_url":"http://N_6c/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30730929322E11, + "INDEX.sizeInGB":121.7526656780392}}], + "shard17_0_0":[{"core_node844":{ + "core":"COLL_2_shard17_0_0_replica_n842", + "shard":"shard17_0_0", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "base_url":"http://N_6c/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30743109221E11, + "INDEX.sizeInGB":121.76400909293443}}], + "shard4_0_0":[{"core_node445":{ + "core":"COLL_2_shard4_0_0_replica_n443", + "shard":"shard4_0_0", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "base_url":"http://N_6c/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28741762257E11, + "INDEX.sizeInGB":119.90010948572308}}], + "shard4_1_0":[{"core_node457":{ + "core":"COLL_2_shard4_1_0_replica_n455", + "shard":"shard4_1_0", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "base_url":"http://N_6c/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27664473589E11, + "INDEX.sizeInGB":118.89680622983724}}], + "shard4_0_1":[{"core_node446":{ + "core":"COLL_2_shard4_0_1_replica_n444", + "shard":"shard4_0_1", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "base_url":"http://N_6c/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28032413116E11, + "INDEX.sizeInGB":119.23947661742568}}], + "shard4_1_1":[{"core_node458":{ + "core":"COLL_2_shard4_1_1_replica_n456", + "shard":"shard4_1_1", + "collection":"COLL_2", + "node_name":"N_6c_solr", + "type":"NRT", + "base_url":"http://N_6c/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27865802727E11, + "INDEX.sizeInGB":119.08430860098451}}]}}, + "N_6i_solr":{"COLL_2":{ + "shard10_1_1":[{"core_node840":{ + "core":"COLL_2_shard10_1_1_replica_n830", + "shard":"shard10_1_1", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30273229534E11, + "INDEX.sizeInGB":121.32639953307807}}], + "shard10_1_0":[{"core_node831":{ + "core":"COLL_2_shard10_1_0_replica_n829", + "shard":"shard10_1_0", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27564995026E11, + "INDEX.sizeInGB":118.80415959842503}}], + "shard10_0_1":[{"core_node1739":{ + "core":"COLL_2_shard10_0_1_replica_n1738", + "shard":"shard10_0_1", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28024871739E11, + "INDEX.sizeInGB":119.2324531627819}}], + "shard2_1_0":[{"core_node1727":{ + "core":"COLL_2_shard2_1_0_replica_n1726", + "shard":"shard2_1_0", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30025926492E11, + "INDEX.sizeInGB":121.0960806272924}}], + "shard10_0_0":[{"core_node897":{ + "core":"COLL_2_shard10_0_0_replica_n896", + "shard":"shard10_0_0", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29103730913E11, + "INDEX.sizeInGB":120.2372190663591}}], + "shard2_1_1":[{"core_node979":{ + "core":"COLL_2_shard2_1_1_replica_n978", + "shard":"shard2_1_1", + "collection":"COLL_2", + "node_name":"N_6i_solr", + "type":"NRT", + "base_url":"http://N_6i/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2815510735E11, + "INDEX.sizeInGB":119.35374452732503}}]}}, + "N_3_solr":{"COLL_2":{ + "shard16_1_1":[{"core_node997":{ + "core":"COLL_2_shard16_1_1_replica_n996", + "shard":"shard16_1_1", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.26611980672E11, + "INDEX.sizeInGB":117.91659581661224}}], + "shard16_1_0":[{"core_node991":{ + "core":"COLL_2_shard16_1_0_replica_n990", + "shard":"shard16_1_0", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28724323652E11, + "INDEX.sizeInGB":119.88386851921678}}], + "shard1_1_1":[{"core_node474":{ + "core":"COLL_2_shard1_1_1_replica_n2", + "shard":"shard1_1_1", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29556889925E11, + "INDEX.sizeInGB":120.65925628412515}}], + "shard4_0_0":[{"core_node1737":{ + "core":"COLL_2_shard4_0_0_replica_n1736", + "shard":"shard4_0_0", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28645187639E11, + "INDEX.sizeInGB":119.81016736384481}}], + "shard4_1_0":[{"core_node523":{ + "core":"COLL_2_shard4_1_0_replica_n1", + "shard":"shard4_1_0", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27649471364E11, + "INDEX.sizeInGB":118.88283431902528}}], + "shard9_0_0":[{"core_node1815":{ + "core":"COLL_2_shard9_0_0_replica_n1814", + "shard":"shard9_0_0", + "collection":"COLL_2", + "node_name":"N_3_solr", + "type":"NRT", + "base_url":"http://N_3/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29037175651E11, + "INDEX.sizeInGB":120.17523464839906}}]}}, + "N_1d_solr":{"COLL_2":{ + "shard3_1_0":[{"core_node425":{ + "core":"COLL_2_shard3_1_0_replica_n423", + "shard":"shard3_1_0", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "base_url":"http://N_1d/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2828759808E11, + "INDEX.sizeInGB":119.47713613510132}}], + "shard3_1_1":[{"core_node426":{ + "core":"COLL_2_shard3_1_1_replica_n424", + "shard":"shard3_1_1", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "base_url":"http://N_1d/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29948029547E11, + "INDEX.sizeInGB":121.02353344392031}}], + "shard15_0_0":[{"core_node732":{ + "core":"COLL_2_shard15_0_0_replica_n2", + "shard":"shard15_0_0", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "base_url":"http://N_1d/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27262832088E11, + "INDEX.sizeInGB":118.5227484330535}}], + "shard12_1_0":[{"core_node1789":{ + "core":"COLL_2_shard12_1_0_replica_n1788", + "shard":"shard12_1_0", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "base_url":"http://N_1d/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27487519935E11, + "INDEX.sizeInGB":118.73200529720634}}], + "shard14_1_1":[{"core_node1741":{ + "core":"COLL_2_shard14_1_1_replica_n1740", + "shard":"shard14_1_1", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "base_url":"http://N_1d/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29231781669E11, + "INDEX.sizeInGB":120.35647562611848}}], + "shard14_1_0":[{"core_node1129":{ + "core":"COLL_2_shard14_1_0_replica_n1128", + "shard":"shard14_1_0", + "collection":"COLL_2", + "node_name":"N_1d_solr", + "type":"NRT", + "base_url":"http://N_1d/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27407685053E11, + "INDEX.sizeInGB":118.65765326935798}}]}}, + "N_1_solr":{"COLL_2":{ + "shard16_1_1":[{"core_node995":{ + "core":"COLL_2_shard16_1_1_replica_n994", + "shard":"shard16_1_1", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.26672765511E11, + "INDEX.sizeInGB":117.97320610936731}}], + "shard16_0_1":[{"core_node989":{ + "core":"COLL_2_shard16_0_1_replica_n988", + "shard":"shard16_0_1", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.3069803609E11, + "INDEX.sizeInGB":121.72203146852553}}], + "shard16_1_0":[{"core_node993":{ + "core":"COLL_2_shard16_1_0_replica_n992", + "shard":"shard16_1_0", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28812502313E11, + "INDEX.sizeInGB":119.96599129680544}}], + "shard16_0_0":[{"core_node983":{ + "core":"COLL_2_shard16_0_0_replica_n982", + "shard":"shard16_0_0", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.26766519189E11, + "INDEX.sizeInGB":118.06052102614194}}], + "shard18_0_0":[{"core_node875":{ + "core":"COLL_2_shard18_0_0_replica_n2", + "shard":"shard18_0_0", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28033512867E11, + "INDEX.sizeInGB":119.24050084035844}}], + "shard12_1_1":[{"core_node586":{ + "core":"COLL_2_shard12_1_1_replica_n584", + "shard":"shard12_1_1", + "collection":"COLL_2", + "node_name":"N_1_solr", + "type":"NRT", + "base_url":"http://N_1/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2671712403E11, + "INDEX.sizeInGB":118.01451819948852}}]}}, + "N_aw_solr":{"COLL_2":{ + "shard18_1_1":[{"core_node1821":{ + "core":"COLL_2_shard18_1_1_replica_n1820", + "shard":"shard18_1_1", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "base_url":"http://N_aw/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28188518759E11, + "INDEX.sizeInGB":119.38486132677644}}], + "shard4_1_1":[{"core_node525":{ + "core":"COLL_2_shard4_1_1_replica_n1", + "shard":"shard4_1_1", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "base_url":"http://N_aw/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27899653279E11, + "INDEX.sizeInGB":119.11583438422531}}], + "shard3_1_0":[{"core_node460":{ + "core":"COLL_2_shard3_1_0_replica_n2", + "shard":"shard3_1_0", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "base_url":"http://N_aw/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28273400877E11, + "INDEX.sizeInGB":119.46391395945102}}], + "shard15_0_1":[{"core_node1817":{ + "core":"COLL_2_shard15_0_1_replica_n1816", + "shard":"shard15_0_1", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "base_url":"http://N_aw/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27129784031E11, + "INDEX.sizeInGB":118.39883777406067}}], + "shard12_1_1":[{"core_node661":{ + "core":"COLL_2_shard12_1_1_replica_n1", + "shard":"shard12_1_1", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "base_url":"http://N_aw/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.26701654869E11, + "INDEX.sizeInGB":118.00011142063886}}], + "shard12_1_0":[{"core_node659":{ + "core":"COLL_2_shard12_1_0_replica_n1", + "shard":"shard12_1_0", + "collection":"COLL_2", + "node_name":"N_aw_solr", + "type":"NRT", + "base_url":"http://N_aw/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27434400341E11, + "INDEX.sizeInGB":118.68253382015973}}]}}, + "N_1h_solr":{"COLL_2":{ + "shard1_0_0":[{"core_node1729":{ + "core":"COLL_2_shard1_0_0_replica_n1728", + "shard":"shard1_0_0", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.7176945428E10, + "INDEX.sizeInGB":53.25018002465367}}], + "shard7_1_0":[{"core_node1145":{ + "core":"COLL_2_shard7_1_0_replica_n1144", + "shard":"shard7_1_0", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2949609012E11, + "INDEX.sizeInGB":120.60263205319643}}], + "shard7_1_1":[{"core_node1701":{ + "core":"COLL_2_shard7_1_1_replica_n1700", + "shard":"shard7_1_1", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28489170345E11, + "INDEX.sizeInGB":119.66486493591219}}], + "shard3_0_1":[{"core_node510":{ + "core":"COLL_2_shard3_0_1_replica_n508", + "shard":"shard3_0_1", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31866901019E11, + "INDEX.sizeInGB":122.81062176357955}}], + "shard12_0_1":[{"core_node1761":{ + "core":"COLL_2_shard12_0_1_replica_n1760", + "shard":"shard12_0_1", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30342308934E11, + "INDEX.sizeInGB":121.39073473773897}}], + "shard12_0_0":[{"core_node1697":{ + "core":"COLL_2_shard12_0_0_replica_n1696", + "shard":"shard12_0_0", + "collection":"COLL_2", + "node_name":"N_1h_solr", + "type":"NRT", + "base_url":"http://N_1h/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29369271388E11, + "INDEX.sizeInGB":120.48452290520072}}]}}, + "N_29_solr":{"COLL_2":{ + "shard8_0_0":[{"core_node1691":{ + "core":"COLL_2_shard8_0_0_replica_n1690", + "shard":"shard8_0_0", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "base_url":"http://N_29/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.30176337999E11, + "INDEX.sizeInGB":121.23616225924343}}], + "shard8_0_1":[{"core_node1787":{ + "core":"COLL_2_shard8_0_1_replica_n1786", + "shard":"shard8_0_1", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "base_url":"http://N_29/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.32692723859E11, + "INDEX.sizeInGB":123.57972921710461}}], + "shard7_1_0":[{"core_node1143":{ + "core":"COLL_2_shard7_1_0_replica_n1142", + "shard":"shard7_1_0", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "base_url":"http://N_29/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2946739865E11, + "INDEX.sizeInGB":120.57591103948653}}], + "shard7_0_1":[{"core_node777":{ + "core":"COLL_2_shard7_0_1_replica_n2", + "shard":"shard7_0_1", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "base_url":"http://N_29/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":8.6794048237E10, + "INDEX.sizeInGB":80.83325646538287}}], + "shard7_1_1":[{"core_node1759":{ + "core":"COLL_2_shard7_1_1_replica_n1758", + "shard":"shard7_1_1", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "base_url":"http://N_29/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28546712309E11, + "INDEX.sizeInGB":119.7184550659731}}], + "shard6_1_0":[{"core_node1793":{ + "core":"COLL_2_shard6_1_0_replica_n1792", + "shard":"shard6_1_0", + "collection":"COLL_2", + "node_name":"N_29_solr", + "type":"NRT", + "base_url":"http://N_29/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29365181039E11, + "INDEX.sizeInGB":120.48071347083896}}]}}, + "N_e_solr":{"COLL_2":{ + "shard1_0_1":[{"core_node1719":{ + "core":"COLL_2_shard1_0_1_replica_n1718", + "shard":"shard1_0_1", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "base_url":"http://N_e/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.9506746089E10, + "INDEX.sizeInGB":55.41997597459704}}], + "shard18_0_0":[{"core_node1819":{ + "core":"COLL_2_shard18_0_0_replica_n1818", + "shard":"shard18_0_0", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "base_url":"http://N_e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28218931509E11, + "INDEX.sizeInGB":119.41318540740758}}], + "shard13_1_1":[{"core_node925":{ + "core":"COLL_2_shard13_1_1_replica_n924", + "shard":"shard13_1_1", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "base_url":"http://N_e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29598508564E11, + "INDEX.sizeInGB":120.69801666215062}}], + "shard18_1_0":[{"core_node672":{ + "core":"COLL_2_shard18_1_0_replica_n2", + "shard":"shard18_1_0", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "base_url":"http://N_e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29108586002E11, + "INDEX.sizeInGB":120.24174072034657}}], + "shard15_0_0":[{"core_node731":{ + "core":"COLL_2_shard15_0_0_replica_n1", + "shard":"shard15_0_0", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "base_url":"http://N_e/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27235871561E11, + "INDEX.sizeInGB":118.49763948563486}}], + "shard13_1_0":[{"core_node923":{ + "core":"COLL_2_shard13_1_0_replica_n922", + "shard":"shard13_1_0", + "collection":"COLL_2", + "node_name":"N_e_solr", + "type":"NRT", + "base_url":"http://N_e/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29514183189E11, + "INDEX.sizeInGB":120.6194825368002}}]}}, + "N_2w_solr":{"COLL_2":{ + "shard1_0_1":[{"core_node1677":{ + "core":"COLL_2_shard1_0_1_replica_n1676", + "shard":"shard1_0_1", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.9557275352E10, + "INDEX.sizeInGB":55.46703501790762}}], + "shard1_1_1":[{"core_node1807":{ + "core":"COLL_2_shard1_1_1_replica_n1806", + "shard":"shard1_1_1", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.2954748046E11, + "INDEX.sizeInGB":120.6504930369556}}], + "shard4_1_0":[{"core_node1775":{ + "core":"COLL_2_shard4_1_0_replica_n1774", + "shard":"shard4_1_0", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27659935903E11, + "INDEX.sizeInGB":118.89258018042892}}], + "shard18_1_1":[{"core_node673":{ + "core":"COLL_2_shard18_1_1_replica_n1", + "shard":"shard18_1_1", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28226679933E11, + "INDEX.sizeInGB":119.42040168959647}}], + "shard4_1_1":[{"core_node1805":{ + "core":"COLL_2_shard4_1_1_replica_n1804", + "shard":"shard4_1_1", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.27878088796E11, + "INDEX.sizeInGB":119.0957508943975}}], + "shard18_1_0":[{"core_node671":{ + "core":"COLL_2_shard18_1_0_replica_n1", + "shard":"shard18_1_0", + "collection":"COLL_2", + "node_name":"N_2w_solr", + "type":"NRT", + "base_url":"http://N_2w/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.28884297502E11, + "INDEX.sizeInGB":120.03285577706993}}]}}, + "N_5_solr":{"COLL_2":{ + "shard1_1_0":[{"core_node1721":{ + "core":"COLL_2_shard1_1_0_replica_n1720", + "shard":"shard1_1_0", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "base_url":"http://N_5/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29009851855E11, + "INDEX.sizeInGB":120.14978738036007}}], + "shard1_0_1":[{"core_node1669":{ + "core":"COLL_2_shard1_0_1_replica_n1668", + "shard":"shard1_0_1", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "base_url":"http://N_5/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.9574276743E10, + "INDEX.sizeInGB":55.482868797145784}}], + "shard1_1_1":[{"core_node418":{ + "core":"COLL_2_shard1_1_1_replica_n416", + "shard":"shard1_1_1", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "base_url":"http://N_5/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29698716918E11, + "INDEX.sizeInGB":120.79134296439588}}], + "shard2_0_0":[{"core_node911":{ + "core":"COLL_2_shard2_0_0_replica_n910", + "shard":"shard2_0_0", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "base_url":"http://N_5/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.29504451209E11, + "INDEX.sizeInGB":120.6104189241305}}], + "shard2_0_1":[{"core_node917":{ + "core":"COLL_2_shard2_0_1_replica_n916", + "shard":"shard2_0_1", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "base_url":"http://N_5/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":1.31334463143E11, + "INDEX.sizeInGB":122.31475035008043}}], + "shard1_0_0":[{"core_node1725":{ + "core":"COLL_2_shard1_0_0_replica_n1724", + "shard":"shard1_0_0", + "collection":"COLL_2", + "node_name":"N_5_solr", + "type":"NRT", + "base_url":"http://N_5/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.7183711221E10, + "INDEX.sizeInGB":53.25648116040975}}]}}, + "N_do_solr":{ + "COLL_1":{ + "shard3_0_0":[{"core_node112":{ + "core":"COLL_1_shard3_0_0_replica_n111", + "shard":"shard3_0_0", + "collection":"COLL_1", + "node_name":"N_do_solr", + "type":"NRT", + "base_url":"http://N_do/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.4957115524E10, + "INDEX.sizeInGB":41.86957657709718}}], + "shard3_1_0":[{"core_node116":{ + "core":"COLL_1_shard3_1_0_replica_n115", + "shard":"shard3_1_0", + "collection":"COLL_1", + "node_name":"N_do_solr", + "type":"NRT", + "base_url":"http://N_do/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3732753925E10, + "INDEX.sizeInGB":40.72930098045617}}], + "shard3_0_1":[{"core_node114":{ + "core":"COLL_1_shard3_0_1_replica_n113", + "shard":"shard3_0_1", + "collection":"COLL_1", + "node_name":"N_do_solr", + "type":"NRT", + "base_url":"http://N_do/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.577095697E10, + "INDEX.sizeInGB":42.62752548791468}}], + "shard3_1_1":[{"core_node118":{ + "core":"COLL_1_shard3_1_1_replica_n117", + "shard":"shard3_1_1", + "collection":"COLL_1", + "node_name":"N_do_solr", + "type":"NRT", + "base_url":"http://N_do/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.8532509927E10, + "INDEX.sizeInGB":45.19942209776491}}]}, + "COLL_0":{"shard3":[{"core_node15":{ + "core":"COLL_0_shard3_replica_n12", + "shard":"shard3", + "collection":"COLL_0", + "node_name":"N_do_solr", + "type":"NRT", + "base_url":"http://N_do/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.1297025422E10, + "INDEX.sizeInGB":29.147626293823123}}]}}, + "N_3a_solr":{ + "COLL_1":{ + "shard3_0_0":[{"core_node73":{ + "core":"COLL_1_shard3_0_0_replica_n71", + "shard":"shard3_0_0", + "collection":"COLL_1", + "node_name":"N_3a_solr", + "type":"NRT", + "base_url":"http://N_3a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5160600486E10, + "INDEX.sizeInGB":42.05908671580255}}], + "shard3_1_0":[{"core_node77":{ + "core":"COLL_1_shard3_1_0_replica_n75", + "shard":"shard3_1_0", + "collection":"COLL_1", + "node_name":"N_3a_solr", + "type":"NRT", + "base_url":"http://N_3a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5090380622E10, + "INDEX.sizeInGB":41.99368937127292}}], + "shard3_0_1":[{"core_node74":{ + "core":"COLL_1_shard3_0_1_replica_n72", + "shard":"shard3_0_1", + "collection":"COLL_1", + "node_name":"N_3a_solr", + "type":"NRT", + "base_url":"http://N_3a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5879426317E10, + "INDEX.sizeInGB":42.72854543942958}}], + "shard3_1_1":[{"core_node78":{ + "core":"COLL_1_shard3_1_1_replica_n76", + "shard":"shard3_1_1", + "collection":"COLL_1", + "node_name":"N_3a_solr", + "type":"NRT", + "base_url":"http://N_3a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.6849085882E10, + "INDEX.sizeInGB":43.631611282005906}}]}, + "COLL_0":{"shard3":[{"core_node17":{ + "core":"COLL_0_shard3_replica_n14", + "shard":"shard3", + "collection":"COLL_0", + "node_name":"N_3a_solr", + "type":"NRT", + "base_url":"http://N_3a/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.0819950704E10, + "INDEX.sizeInGB":28.70331583917141}}]}}, + "N_v_solr":{ + "COLL_1":{ + "shard3_0_0":[{"core_node120":{ + "core":"COLL_1_shard3_0_0_replica_n119", + "shard":"shard3_0_0", + "collection":"COLL_1", + "node_name":"N_v_solr", + "type":"NRT", + "base_url":"http://N_v/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3809517838E10, + "INDEX.sizeInGB":40.80079294554889}}], + "shard3_1_0":[{"core_node124":{ + "core":"COLL_1_shard3_1_0_replica_n123", + "shard":"shard3_1_0", + "collection":"COLL_1", + "node_name":"N_v_solr", + "type":"NRT", + "base_url":"http://N_v/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5638162031E10, + "INDEX.sizeInGB":42.503850563429296}}], + "shard3_0_1":[{"core_node122":{ + "core":"COLL_1_shard3_0_1_replica_n121", + "shard":"shard3_0_1", + "collection":"COLL_1", + "node_name":"N_v_solr", + "type":"NRT", + "base_url":"http://N_v/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.6310602091E10, + "INDEX.sizeInGB":43.13010917138308}}], + "shard3_1_1":[{"core_node126":{ + "core":"COLL_1_shard3_1_1_replica_n125", + "shard":"shard3_1_1", + "collection":"COLL_1", + "node_name":"N_v_solr", + "type":"NRT", + "base_url":"http://N_v/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.4257494507E10, + "INDEX.sizeInGB":41.21800373028964}}]}, + "COLL_0":{"shard3":[{"core_node18":{ + "core":"COLL_0_shard3_replica_n16", + "shard":"shard3", + "collection":"COLL_0", + "node_name":"N_v_solr", + "type":"NRT", + "base_url":"http://N_v/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.8932093807E10, + "INDEX.sizeInGB":26.94511209335178}}]}}, + "N_13_solr":{ + "COLL_1":{ + "shard1_1_0":[{"core_node61":{ + "core":"COLL_1_shard1_1_0_replica_n59", + "shard":"shard1_1_0", + "collection":"COLL_1", + "node_name":"N_13_solr", + "type":"NRT", + "base_url":"http://N_13/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3783419579E10, + "INDEX.sizeInGB":40.77648704778403}}], + "shard1_0_1":[{"core_node58":{ + "core":"COLL_1_shard1_0_1_replica_n56", + "shard":"shard1_0_1", + "collection":"COLL_1", + "node_name":"N_13_solr", + "type":"NRT", + "base_url":"http://N_13/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.4932001726E10, + "INDEX.sizeInGB":41.846187530085444}}], + "shard1_1_1":[{"core_node62":{ + "core":"COLL_1_shard1_1_1_replica_n60", + "shard":"shard1_1_1", + "collection":"COLL_1", + "node_name":"N_13_solr", + "type":"NRT", + "base_url":"http://N_13/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.811959042E10, + "INDEX.sizeInGB":44.814860839396715}}], + "shard1_0_0":[{"core_node57":{ + "core":"COLL_1_shard1_0_0_replica_n55", + "shard":"shard1_0_0", + "collection":"COLL_1", + "node_name":"N_13_solr", + "type":"NRT", + "base_url":"http://N_13/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5921892273E10, + "INDEX.sizeInGB":42.76809494290501}}]}, + "COLL_0":{"shard2":[{"core_node13":{ + "core":"COLL_0_shard2_replica_n10", + "shard":"shard2", + "collection":"COLL_0", + "node_name":"N_13_solr", + "type":"NRT", + "base_url":"http://N_13/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.4248182159E10, + "INDEX.sizeInGB":31.896105184219778}}]}}, + "N_3to_solr":{ + "COLL_1":{ + "shard1_1_0":[{"core_node84":{ + "core":"COLL_1_shard1_1_0_replica_n83", + "shard":"shard1_1_0", + "collection":"COLL_1", + "node_name":"N_3to_solr", + "type":"NRT", + "base_url":"http://N_3to/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3892348528E10, + "INDEX.sizeInGB":40.87793503701687}}], + "shard1_0_1":[{"core_node82":{ + "core":"COLL_1_shard1_0_1_replica_n81", + "shard":"shard1_0_1", + "collection":"COLL_1", + "node_name":"N_3to_solr", + "type":"NRT", + "base_url":"http://N_3to/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.4936912617E10, + "INDEX.sizeInGB":41.85076115373522}}], + "shard1_1_1":[{"core_node86":{ + "core":"COLL_1_shard1_1_1_replica_n85", + "shard":"shard1_1_1", + "collection":"COLL_1", + "node_name":"N_3to_solr", + "type":"NRT", + "base_url":"http://N_3to/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":5.1015133973E10, + "INDEX.sizeInGB":47.511545916087925}}], + "shard1_0_0":[{"core_node80":{ + "core":"COLL_1_shard1_0_0_replica_n79", + "shard":"shard1_0_0", + "collection":"COLL_1", + "node_name":"N_3to_solr", + "type":"NRT", + "base_url":"http://N_3to/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.644843302E10, + "INDEX.sizeInGB":43.258474227041006}}]}, + "COLL_0":{"shard2":[{"core_node11":{ + "core":"COLL_0_shard2_replica_n8", + "shard":"shard2", + "collection":"COLL_0", + "node_name":"N_3to_solr", + "type":"NRT", + "base_url":"http://N_3to/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.0722710385E10, + "INDEX.sizeInGB":28.6127537349239}}]}}, + "N_16_solr":{ + "COLL_1":{ + "shard2_0_0":[{"core_node100":{ + "core":"COLL_1_shard2_0_0_replica_n99", + "shard":"shard2_0_0", + "collection":"COLL_1", + "node_name":"N_16_solr", + "type":"NRT", + "base_url":"http://N_16/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.8764329025E10, + "INDEX.sizeInGB":45.41532045695931}}], + "shard2_0_1":[{"core_node102":{ + "core":"COLL_1_shard2_0_1_replica_n101", + "shard":"shard2_0_1", + "collection":"COLL_1", + "node_name":"N_16_solr", + "type":"NRT", + "base_url":"http://N_16/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3740343099E10, + "INDEX.sizeInGB":40.73636894952506}}], + "shard2_1_0":[{"core_node96":{ + "core":"COLL_1_shard2_1_0_replica_n95", + "shard":"shard2_1_0", + "collection":"COLL_1", + "node_name":"N_16_solr", + "type":"NRT", + "base_url":"http://N_16/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5585236311E10, + "INDEX.sizeInGB":42.45455964561552}}], + "shard2_1_1":[{"core_node98":{ + "core":"COLL_1_shard2_1_1_replica_n97", + "shard":"shard2_1_1", + "collection":"COLL_1", + "node_name":"N_16_solr", + "type":"NRT", + "base_url":"http://N_16/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.527594328E10, + "INDEX.sizeInGB":42.16650806367397}}]}, + "COLL_0":{"shard1":[{"core_node5":{ + "core":"COLL_0_shard1_replica_n2", + "shard":"shard1", + "collection":"COLL_0", + "node_name":"N_16_solr", + "type":"NRT", + "base_url":"http://N_16/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.3775978753E10, + "INDEX.sizeInGB":31.45633149240166}}]}}, + "N_d4_solr":{ + "COLL_1":{ + "shard2_0_0":[{"core_node69":{ + "core":"COLL_1_shard2_0_0_replica_n67", + "shard":"shard2_0_0", + "collection":"COLL_1", + "node_name":"N_d4_solr", + "type":"NRT", + "base_url":"http://N_d4/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.497304707E10, + "INDEX.sizeInGB":41.8844139855355}}], + "shard2_0_1":[{"core_node70":{ + "core":"COLL_1_shard2_0_1_replica_n68", + "shard":"shard2_0_1", + "collection":"COLL_1", + "node_name":"N_d4_solr", + "type":"NRT", + "base_url":"http://N_d4/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5692831033E10, + "INDEX.sizeInGB":42.554765039123595}}], + "shard2_1_0":[{"core_node65":{ + "core":"COLL_1_shard2_1_0_replica_n63", + "shard":"shard2_1_0", + "collection":"COLL_1", + "node_name":"N_d4_solr", + "type":"NRT", + "base_url":"http://N_d4/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5935880044E10, + "INDEX.sizeInGB":42.78112206980586}}], + "shard2_1_1":[{"core_node66":{ + "core":"COLL_1_shard2_1_1_replica_n64", + "shard":"shard2_1_1", + "collection":"COLL_1", + "node_name":"N_d4_solr", + "type":"NRT", + "base_url":"http://N_d4/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5166045429E10, + "INDEX.sizeInGB":42.064157714135945}}]}, + "COLL_0":{"shard1":[{"core_node3":{ + "core":"COLL_0_shard1_replica_n1", + "shard":"shard1", + "collection":"COLL_0", + "node_name":"N_d4_solr", + "type":"NRT", + "base_url":"http://N_d4/solr", + "leader":"true", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":3.401835331E10, + "INDEX.sizeInGB":31.682060388848186}}]}}, + "N_b9_solr":{ + "COLL_1":{ + "shard1_1_0":[{"core_node92":{ + "core":"COLL_1_shard1_1_0_replica_n91", + "shard":"shard1_1_0", + "collection":"COLL_1", + "node_name":"N_b9_solr", + "type":"NRT", + "base_url":"http://N_b9/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5724314347E10, + "INDEX.sizeInGB":42.5840861601755}}], + "shard1_0_1":[{"core_node90":{ + "core":"COLL_1_shard1_0_1_replica_n89", + "shard":"shard1_0_1", + "collection":"COLL_1", + "node_name":"N_b9_solr", + "type":"NRT", + "base_url":"http://N_b9/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.6030616744E10, + "INDEX.sizeInGB":42.869352497160435}}], + "shard1_1_1":[{"core_node94":{ + "core":"COLL_1_shard1_1_1_replica_n93", + "shard":"shard1_1_1", + "collection":"COLL_1", + "node_name":"N_b9_solr", + "type":"NRT", + "base_url":"http://N_b9/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.574559386E10, + "INDEX.sizeInGB":42.603904251009226}}], + "shard1_0_0":[{"core_node88":{ + "core":"COLL_1_shard1_0_0_replica_n87", + "shard":"shard1_0_0", + "collection":"COLL_1", + "node_name":"N_b9_solr", + "type":"NRT", + "base_url":"http://N_b9/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.5100613575E10, + "INDEX.sizeInGB":42.0032195514068}}]}, + "COLL_0":{"shard2":[{"core_node9":{ + "core":"COLL_0_shard2_replica_n6", + "shard":"shard2", + "collection":"COLL_0", + "node_name":"N_b9_solr", + "type":"NRT", + "base_url":"http://N_b9/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.8865621899E10, + "INDEX.sizeInGB":26.883205304853618}}]}}, + "N_74_solr":{ + "COLL_1":{ + "shard2_0_0":[{"core_node108":{ + "core":"COLL_1_shard2_0_0_replica_n107", + "shard":"shard2_0_0", + "collection":"COLL_1", + "node_name":"N_74_solr", + "type":"NRT", + "base_url":"http://N_74/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3767024396E10, + "INDEX.sizeInGB":40.76121784374118}}], + "shard2_0_1":[{"core_node110":{ + "core":"COLL_1_shard2_0_1_replica_n109", + "shard":"shard2_0_1", + "collection":"COLL_1", + "node_name":"N_74_solr", + "type":"NRT", + "base_url":"http://N_74/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.8622428842E10, + "INDEX.sizeInGB":45.28316561318934}}], + "shard2_1_0":[{"core_node104":{ + "core":"COLL_1_shard2_1_0_replica_n103", + "shard":"shard2_1_0", + "collection":"COLL_1", + "node_name":"N_74_solr", + "type":"NRT", + "base_url":"http://N_74/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.4599223614E10, + "INDEX.sizeInGB":41.536263762041926}}], + "shard2_1_1":[{"core_node106":{ + "core":"COLL_1_shard2_1_1_replica_n105", + "shard":"shard2_1_1", + "collection":"COLL_1", + "node_name":"N_74_solr", + "type":"NRT", + "base_url":"http://N_74/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":4.3768191618E10, + "INDEX.sizeInGB":40.762304903939366}}]}, + "COLL_0":{"shard1":[{"core_node7":{ + "core":"COLL_0_shard1_replica_n4", + "shard":"shard1", + "collection":"COLL_0", + "node_name":"N_74_solr", + "type":"NRT", + "base_url":"http://N_74/solr", + "state":"active", + "force_set_state":"false", + "INDEX.sizeInBytes":2.9252853492E10, + "INDEX.sizeInGB":27.24384282901883}}]}}}} \ No newline at end of file diff --git a/solr/core/src/test-files/solr/simSnapshot/statistics.json b/solr/core/src/test-files/solr/simSnapshot/statistics.json new file mode 100644 index 000000000000..735c36e14ba3 --- /dev/null +++ b/solr/core/src/test-files/solr/simSnapshot/statistics.json @@ -0,0 +1,2045 @@ +{ + "coresPerNodes":{ + "5":9, + "6":36, + "12":2, + "13":1}, + "sortedNodeStats":{ + "N_0_solr":{ + "isLive":true, + "cores":12.0, + "freedisk":719.6562576293945, + "sysprop.pool":"pool-03", + "node":"N_0_solr", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_6":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":4.4921656871E10, + "INDEX.sizeInGB":41.83655313309282, + "coreNode":"core_node6", + "leader":true}}, + "COLL_l":{"shard1_replica_n9":{ + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7, + "coreNode":"core_node10", + "leader":true}}, + "COLL_x":{"shard1_replica_n9":{ + "INDEX.sizeInBytes":3.0928583E8, + "INDEX.sizeInGB":0.2880448754876852, + "coreNode":"core_node10", + "leader":true}}, + "COLL_1b":{"shard1_replica_n9":{ + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7, + "coreNode":"core_node10", + "leader":true}}, + "COLL_1r":{"shard1_replica_n2":{ + "INDEX.sizeInBytes":4.25884524E8, + "INDEX.sizeInGB":0.39663587138056755, + "coreNode":"core_node5", + "leader":true}}, + "COLL_8":{"shard1_replica_n2":{ + "INDEX.sizeInBytes":399225.0, + "INDEX.sizeInGB":3.718072548508644E-4, + "coreNode":"core_node5", + "leader":true}}, + "COLL_q":{"shard1_replica_n9":{ + "INDEX.sizeInBytes":4.9242789E8, + "INDEX.sizeInGB":0.45860921032726765, + "coreNode":"core_node10", + "leader":true}}, + "COLL_4":{"shard1_replica_n2":{ + "INDEX.sizeInBytes":2.58797271E8, + "INDEX.sizeInGB":0.24102374073117971, + "coreNode":"core_node5", + "leader":true}}, + "COLL_1x":{"shard1_replica_n9":{ + "INDEX.sizeInBytes":4264901.0, + "INDEX.sizeInGB":0.003971998579800129, + "coreNode":"core_node10", + "leader":true}}, + "COLL_1t":{"shard1_replica_n2":{ + "INDEX.sizeInBytes":8.7485800719E10, + "INDEX.sizeInGB":81.47750116791576, + "coreNode":"core_node5", + "leader":true}}, + "COLL_2k":{"shard1_replica_n9":{ + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7, + "coreNode":"core_node10", + "leader":true}}, + "COLL_22":{"shard1_replica_n9":{ + "INDEX.sizeInBytes":2.4351639993E10, + "INDEX.sizeInGB":22.679232054390013, + "coreNode":"core_node10", + "leader":true}}}}, + "N_3_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4272.45711517334, + "sysprop.pool":"pool-01", + "node":"N_3_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard16_1_0_replica_n990":{ + "INDEX.sizeInBytes":1.28724323652E11, + "INDEX.sizeInGB":119.88386851921678, + "coreNode":"core_node991"}, + "shard16_1_1_replica_n996":{ + "INDEX.sizeInBytes":1.26611980672E11, + "INDEX.sizeInGB":117.91659581661224, + "coreNode":"core_node997"}, + "shard1_1_1_replica_n2":{ + "INDEX.sizeInBytes":1.29556889925E11, + "INDEX.sizeInGB":120.65925628412515, + "coreNode":"core_node474"}, + "shard4_0_0_replica_n1736":{ + "INDEX.sizeInBytes":1.28645187639E11, + "INDEX.sizeInGB":119.81016736384481, + "coreNode":"core_node1737"}, + "shard4_1_0_replica_n1":{ + "INDEX.sizeInBytes":1.27649471364E11, + "INDEX.sizeInGB":118.88283431902528, + "coreNode":"core_node523"}, + "shard9_0_0_replica_n1814":{ + "INDEX.sizeInBytes":1.29037175651E11, + "INDEX.sizeInGB":120.17523464839906, + "coreNode":"core_node1815"}}}}, + "N_1_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4274.765396118164, + "sysprop.pool":"pool-01", + "node":"N_1_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard12_1_1_replica_n584":{ + "INDEX.sizeInBytes":1.2671712403E11, + "INDEX.sizeInGB":118.01451819948852, + "coreNode":"core_node586"}, + "shard16_0_0_replica_n982":{ + "INDEX.sizeInBytes":1.26766519189E11, + "INDEX.sizeInGB":118.06052102614194, + "coreNode":"core_node983"}, + "shard16_0_1_replica_n988":{ + "INDEX.sizeInBytes":1.3069803609E11, + "INDEX.sizeInGB":121.72203146852553, + "coreNode":"core_node989"}, + "shard16_1_0_replica_n992":{ + "INDEX.sizeInBytes":1.28812502313E11, + "INDEX.sizeInGB":119.96599129680544, + "coreNode":"core_node993"}, + "shard16_1_1_replica_n994":{ + "INDEX.sizeInBytes":1.26672765511E11, + "INDEX.sizeInGB":117.97320610936731, + "coreNode":"core_node995"}, + "shard18_0_0_replica_n2":{ + "INDEX.sizeInBytes":1.28033512867E11, + "INDEX.sizeInGB":119.24050084035844, + "coreNode":"core_node875"}}}}, + "N_2_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4266.604637145996, + "sysprop.pool":"pool-01", + "node":"N_2_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard12_0_0_replica_n1248":{ + "INDEX.sizeInBytes":1.29421522442E11, + "INDEX.sizeInGB":120.53318549133837, + "coreNode":"core_node1249", + "leader":true}, + "shard12_0_1_replica_n1254":{ + "INDEX.sizeInBytes":1.30384315739E11, + "INDEX.sizeInGB":121.42985662352294, + "coreNode":"core_node1255", + "leader":true}, + "shard5_1_0_replica_n1136":{ + "INDEX.sizeInBytes":7.6877250282E10, + "INDEX.sizeInGB":71.59751866199076, + "coreNode":"core_node1137"}, + "shard5_1_1_replica_n1138":{ + "INDEX.sizeInBytes":1.29952609098E11, + "INDEX.sizeInGB":121.02779848314822, + "coreNode":"core_node1139"}, + "shard7_0_1_replica_n1":{ + "INDEX.sizeInBytes":1.2890128588E11, + "INDEX.sizeInGB":120.04867743700743, + "coreNode":"core_node776", + "leader":true}, + "shard9_0_1_replica_n2":{ + "INDEX.sizeInBytes":1.29212951693E11, + "INDEX.sizeInGB":120.33893884439021, + "coreNode":"core_node478", + "leader":true}}}}, + "N_6_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4252.47643661499, + "sysprop.pool":"pool-01", + "node":"N_6_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard15_1_1_replica_n1708":{ + "INDEX.sizeInBytes":1.30846984322E11, + "INDEX.sizeInGB":121.86075031943619, + "coreNode":"core_node1709"}, + "shard3_1_0_replica_n1":{ + "INDEX.sizeInBytes":1.32652501535E11, + "INDEX.sizeInGB":123.54226925875992, + "coreNode":"core_node459"}, + "shard4_0_0_replica_n2":{ + "INDEX.sizeInBytes":1.28680029361E11, + "INDEX.sizeInGB":119.84261624608189, + "coreNode":"core_node520"}, + "shard4_0_1_replica_n1802":{ + "INDEX.sizeInBytes":1.28153346526E11, + "INDEX.sizeInGB":119.35210463218391, + "coreNode":"core_node1803"}, + "shard8_1_0_replica_n1810":{ + "INDEX.sizeInBytes":1.35679249773E11, + "INDEX.sizeInGB":126.36114822048694, + "coreNode":"core_node1811"}, + "shard9_0_0_replica_n1798":{ + "INDEX.sizeInBytes":1.35157081196E11, + "INDEX.sizeInGB":125.874840836972, + "coreNode":"core_node1799", + "leader":true}}}}, + "N_5_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4397.149795532227, + "sysprop.pool":"pool-01", + "node":"N_5_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard1_0_0_replica_n1724":{ + "INDEX.sizeInBytes":5.7183711221E10, + "INDEX.sizeInGB":53.25648116040975, + "coreNode":"core_node1725", + "leader":true}, + "shard1_0_1_replica_n1668":{ + "INDEX.sizeInBytes":5.9574276743E10, + "INDEX.sizeInGB":55.482868797145784, + "coreNode":"core_node1669"}, + "shard1_1_0_replica_n1720":{ + "INDEX.sizeInBytes":1.29009851855E11, + "INDEX.sizeInGB":120.14978738036007, + "coreNode":"core_node1721"}, + "shard1_1_1_replica_n416":{ + "INDEX.sizeInBytes":1.29698716918E11, + "INDEX.sizeInGB":120.79134296439588, + "coreNode":"core_node418", + "leader":true}, + "shard2_0_0_replica_n910":{ + "INDEX.sizeInBytes":1.29504451209E11, + "INDEX.sizeInGB":120.6104189241305, + "coreNode":"core_node911"}, + "shard2_0_1_replica_n916":{ + "INDEX.sizeInBytes":1.31334463143E11, + "INDEX.sizeInGB":122.31475035008043, + "coreNode":"core_node917"}}}}, + "N_4_solr":{ + "isLive":true, + "cores":12.0, + "freedisk":875.4758682250977, + "sysprop.pool":"pool-03", + "node":"N_4_solr", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_6":{"shard1_replica_n2":{ + "INDEX.sizeInBytes":2.0738852096E10, + "INDEX.sizeInGB":19.314561128616333, + "coreNode":"core_node5"}}, + "COLL_l":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7, + "coreNode":"core_node6"}}, + "COLL_x":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":3.03301808E8, + "INDEX.sizeInGB":0.28247182071208954, + "coreNode":"core_node6"}}, + "COLL_1b":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7, + "coreNode":"core_node6"}}, + "COLL_1r":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":4.46826689E8, + "INDEX.sizeInGB":0.4161397824063897, + "coreNode":"core_node6"}}, + "COLL_8":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":356048.0, + "INDEX.sizeInGB":3.315955400466919E-4, + "coreNode":"core_node6"}}, + "COLL_q":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":4.9242789E8, + "INDEX.sizeInGB":0.45860921032726765, + "coreNode":"core_node6"}}, + "COLL_4":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":2.59832461E8, + "INDEX.sizeInGB":0.2419878365471959, + "coreNode":"core_node6"}}, + "COLL_1x":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":4255591.0, + "INDEX.sizeInGB":0.003963327966630459, + "coreNode":"core_node6"}}, + "COLL_1t":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":8.5380419785E10, + "INDEX.sizeInGB":79.51671237591654, + "coreNode":"core_node6"}}, + "COLL_2k":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7, + "coreNode":"core_node6"}}, + "COLL_22":{"shard1_replica_n2":{ + "INDEX.sizeInBytes":2.436290627E10, + "INDEX.sizeInGB":22.689724592491984, + "coreNode":"core_node5"}}}}, + "N_7_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4268.472709655762, + "sysprop.pool":"pool-01", + "node":"N_7_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard13_0_0_replica_n1712":{ + "INDEX.sizeInBytes":1.30437704659E11, + "INDEX.sizeInGB":121.47957892995328, + "coreNode":"core_node1713"}, + "shard13_0_1_replica_n1766":{ + "INDEX.sizeInBytes":1.30339106107E11, + "INDEX.sizeInGB":121.38775187265128, + "coreNode":"core_node1767"}, + "shard13_1_0_replica_n1688":{ + "INDEX.sizeInBytes":1.29592823396E11, + "INDEX.sizeInGB":120.69272193685174, + "coreNode":"core_node1689"}, + "shard13_1_1_replica_n806":{ + "INDEX.sizeInBytes":1.2961448776E11, + "INDEX.sizeInGB":120.71289844810963, + "coreNode":"core_node808"}, + "shard15_0_0_replica_n607":{ + "INDEX.sizeInBytes":1.27258670055E11, + "INDEX.sizeInGB":118.5188722377643, + "coreNode":"core_node609"}, + "shard15_0_1_replica_n608":{ + "INDEX.sizeInBytes":1.2722802278E11, + "INDEX.sizeInGB":118.49032973870635, + "coreNode":"core_node610"}}}}, + "N_a_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4262.172649383545, + "sysprop.pool":"pool-01", + "node":"N_a_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard14_0_0_replica_n1118":{ + "INDEX.sizeInBytes":1.30313698451E11, + "INDEX.sizeInGB":121.36408914905041, + "coreNode":"core_node1119"}, + "shard14_0_1_replica_n1124":{ + "INDEX.sizeInBytes":1.31102045065E11, + "INDEX.sizeInGB":122.09829414729029, + "coreNode":"core_node1125"}, + "shard14_1_0_replica_n833":{ + "INDEX.sizeInBytes":1.27418065808E11, + "INDEX.sizeInGB":118.66732110083103, + "coreNode":"core_node835"}, + "shard14_1_1_replica_n834":{ + "INDEX.sizeInBytes":1.29318568492E11, + "INDEX.sizeInGB":120.43730215355754, + "coreNode":"core_node836"}, + "shard15_1_0_replica_n1174":{ + "INDEX.sizeInBytes":1.33321224738E11, + "INDEX.sizeInGB":124.16506627388299, + "coreNode":"core_node1175"}, + "shard3_0_0_replica_n1808":{ + "INDEX.sizeInBytes":1.29798330608E11, + "INDEX.sizeInGB":120.88411544263363, + "coreNode":"core_node1809"}}}}, + "N_13_solr":{ + "isLive":true, + "cores":5.0, + "freedisk":718.1634063720703, + "sysprop.pool":"pool-02", + "node":"N_13_solr", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_1":{ + "shard1_0_0_replica_n55":{ + "INDEX.sizeInBytes":4.5921892273E10, + "INDEX.sizeInGB":42.76809494290501, + "coreNode":"core_node57"}, + "shard1_0_1_replica_n56":{ + "INDEX.sizeInBytes":4.4932001726E10, + "INDEX.sizeInGB":41.846187530085444, + "coreNode":"core_node58"}, + "shard1_1_0_replica_n59":{ + "INDEX.sizeInBytes":4.3783419579E10, + "INDEX.sizeInGB":40.77648704778403, + "coreNode":"core_node61"}, + "shard1_1_1_replica_n60":{ + "INDEX.sizeInBytes":4.811959042E10, + "INDEX.sizeInGB":44.814860839396715, + "coreNode":"core_node62"}}, + "COLL_0":{"shard2_replica_n10":{ + "INDEX.sizeInBytes":3.4248182159E10, + "INDEX.sizeInGB":31.896105184219778, + "coreNode":"core_node13"}}}}, + "N_1d_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4273.009799957275, + "sysprop.pool":"pool-01", + "node":"N_1d_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard12_1_0_replica_n1788":{ + "INDEX.sizeInBytes":1.27487519935E11, + "INDEX.sizeInGB":118.73200529720634, + "coreNode":"core_node1789"}, + "shard14_1_0_replica_n1128":{ + "INDEX.sizeInBytes":1.27407685053E11, + "INDEX.sizeInGB":118.65765326935798, + "coreNode":"core_node1129", + "leader":true}, + "shard14_1_1_replica_n1740":{ + "INDEX.sizeInBytes":1.29231781669E11, + "INDEX.sizeInGB":120.35647562611848, + "coreNode":"core_node1741", + "leader":true}, + "shard15_0_0_replica_n2":{ + "INDEX.sizeInBytes":1.27262832088E11, + "INDEX.sizeInGB":118.5227484330535, + "coreNode":"core_node732", + "leader":true}, + "shard3_1_0_replica_n423":{ + "INDEX.sizeInBytes":1.2828759808E11, + "INDEX.sizeInGB":119.47713613510132, + "coreNode":"core_node425", + "leader":true}, + "shard3_1_1_replica_n424":{ + "INDEX.sizeInBytes":1.29948029547E11, + "INDEX.sizeInGB":121.02353344392031, + "coreNode":"core_node426", + "leader":true}}}}, + "N_1m_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4257.921604156494, + "sysprop.pool":"pool-01", + "node":"N_1m_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard1_1_0_replica_n1678":{ + "INDEX.sizeInBytes":1.28970690262E11, + "INDEX.sizeInGB":120.11331530474126, + "coreNode":"core_node1679"}, + "shard6_1_0_replica_n1166":{ + "INDEX.sizeInBytes":1.29376799009E11, + "INDEX.sizeInGB":120.49153354857117, + "coreNode":"core_node1167"}, + "shard6_1_1_replica_n1744":{ + "INDEX.sizeInBytes":1.31273933482E11, + "INDEX.sizeInGB":122.25837771035731, + "coreNode":"core_node1745"}, + "shard8_0_0_replica_n886":{ + "INDEX.sizeInBytes":1.30145902623E11, + "INDEX.sizeInGB":121.20781710650772, + "coreNode":"core_node887"}, + "shard8_0_1_replica_n892":{ + "INDEX.sizeInBytes":1.32681734677E11, + "INDEX.sizeInGB":123.56949474383146, + "coreNode":"core_node893"}, + "shard8_1_1_replica_n1710":{ + "INDEX.sizeInBytes":1.33374089494E11, + "INDEX.sizeInGB":124.21430041454732, + "coreNode":"core_node1711"}}}}, + "N_17_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4093.756145477295, + "sysprop.pool":"pool-01", + "node":"N_17_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard11_1_1_replica_n762":{ + "INDEX.sizeInBytes":1.30871431234E11, + "INDEX.sizeInGB":121.88351828046143, + "coreNode":"core_node768", + "leader":true}, + "shard12_0_0_replica_n1750":{ + "INDEX.sizeInBytes":1.2936875619E11, + "INDEX.sizeInGB":120.48404308967292, + "coreNode":"core_node1751"}, + "shard12_0_1_replica_n1698":{ + "INDEX.sizeInBytes":1.30350286057E11, + "INDEX.sizeInGB":121.39816401246935, + "coreNode":"core_node1699"}, + "shard14_0_0_replica_n1120":{ + "INDEX.sizeInBytes":1.3029908264E11, + "INDEX.sizeInGB":121.3504771143198, + "coreNode":"core_node1121"}, + "shard14_0_1_replica_n1122":{ + "INDEX.sizeInBytes":1.31146492351E11, + "INDEX.sizeInGB":122.13968890812248, + "coreNode":"core_node1123"}, + "shard18_0_1_replica_n2":{ + "INDEX.sizeInBytes":1.28174988934E11, + "INDEX.sizeInGB":119.37226069532335, + "coreNode":"core_node877"}}}}, + "N_11_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4264.325901031494, + "sysprop.pool":"pool-01", + "node":"N_11_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard12_1_1_replica_n2":{ + "INDEX.sizeInBytes":1.26693447901E11, + "INDEX.sizeInGB":117.99246808607131, + "coreNode":"core_node662"}, + "shard6_0_0_replica_n1209":{ + "INDEX.sizeInBytes":1.28939953876E11, + "INDEX.sizeInGB":120.08468981459737, + "coreNode":"core_node1210"}, + "shard6_0_1_replica_n1211":{ + "INDEX.sizeInBytes":1.28744354495E11, + "INDEX.sizeInGB":119.90252369549125, + "coreNode":"core_node1212"}, + "shard9_0_1_replica_n436":{ + "INDEX.sizeInBytes":1.29282915395E11, + "INDEX.sizeInGB":120.40409761946648, + "coreNode":"core_node438"}, + "shard9_1_0_replica_n1152":{ + "INDEX.sizeInBytes":1.31406038908E11, + "INDEX.sizeInGB":122.3814104758203, + "coreNode":"core_node1153"}, + "shard9_1_1_replica_n1154":{ + "INDEX.sizeInBytes":1.33894519282E11, + "INDEX.sizeInGB":124.69898842461407, + "coreNode":"core_node1155"}}}}, + "N_z_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4215.115695953369, + "sysprop.pool":"pool-01", + "node":"N_z_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard14_1_0_replica_n1126":{ + "INDEX.sizeInBytes":1.27443177079E11, + "INDEX.sizeInGB":118.69070779439062, + "coreNode":"core_node1127"}, + "shard1_0_0_replica_n1716":{ + "INDEX.sizeInBytes":5.7185112146E10, + "INDEX.sizeInGB":53.25778587348759, + "coreNode":"core_node1717"}, + "shard8_0_0_replica_n1730":{ + "INDEX.sizeInBytes":1.30170301246E11, + "INDEX.sizeInGB":121.23054009489715, + "coreNode":"core_node1731"}, + "shard8_0_1_replica_n1694":{ + "INDEX.sizeInBytes":1.39918850407E11, + "INDEX.sizeInGB":130.30958399828523, + "coreNode":"core_node1695", + "leader":true}, + "shard8_1_0_replica_n1706":{ + "INDEX.sizeInBytes":1.35679630668E11, + "INDEX.sizeInGB":126.361502956599, + "coreNode":"core_node1707", + "leader":true}, + "shard8_1_1_replica_n1754":{ + "INDEX.sizeInBytes":1.33314153125E11, + "INDEX.sizeInGB":124.15848032105714, + "coreNode":"core_node1755", + "leader":true}}}}, + "N_t_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4266.856658935547, + "sysprop.pool":"pool-01", + "node":"N_t_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard10_0_0_replica_n1732":{ + "INDEX.sizeInBytes":1.2914349283E11, + "INDEX.sizeInGB":120.27425023727119, + "coreNode":"core_node1733"}, + "shard10_0_1_replica_n904":{ + "INDEX.sizeInBytes":1.28142990156E11, + "INDEX.sizeInGB":119.34245951101184, + "coreNode":"core_node905"}, + "shard10_1_1_replica_n1742":{ + "INDEX.sizeInBytes":1.30757016285E11, + "INDEX.sizeInGB":121.77696105558425, + "coreNode":"core_node1743", + "leader":true}, + "shard11_0_0_replica_n1183":{ + "INDEX.sizeInBytes":1.2777477116E11, + "INDEX.sizeInGB":118.99952884763479, + "coreNode":"core_node1185", + "leader":true}, + "shard11_0_1_replica_n1184":{ + "INDEX.sizeInBytes":1.27980394382E11, + "INDEX.sizeInGB":119.19103039614856, + "coreNode":"core_node1195", + "leader":true}, + "shard11_1_0_replica_n1790":{ + "INDEX.sizeInBytes":1.32416023485E11, + "INDEX.sizeInGB":123.32203191239387, + "coreNode":"core_node1791"}}}}, + "N_1c_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4181.229598999023, + "sysprop.pool":"pool-01", + "node":"N_1c_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard18_0_0_replica_n1":{ + "INDEX.sizeInBytes":1.28011422432E11, + "INDEX.sizeInGB":119.21992751955986, + "coreNode":"core_node874", + "leader":true}, + "shard18_0_1_replica_n1":{ + "INDEX.sizeInBytes":1.30729375574E11, + "INDEX.sizeInGB":121.75121863745153, + "coreNode":"core_node876"}, + "shard5_0_0_replica_n998":{ + "INDEX.sizeInBytes":1.31937405764E11, + "INDEX.sizeInGB":122.87628442421556, + "coreNode":"core_node999", + "leader":true}, + "shard5_0_1_replica_n1702":{ + "INDEX.sizeInBytes":1.31521149156E11, + "INDEX.sizeInGB":122.48861524835229, + "coreNode":"core_node1703", + "leader":true}, + "shard5_1_0_replica_n1134":{ + "INDEX.sizeInBytes":1.30030877168E11, + "INDEX.sizeInGB":121.1006913036108, + "coreNode":"core_node1135", + "leader":true}, + "shard5_1_1_replica_n1140":{ + "INDEX.sizeInBytes":1.29917464329E11, + "INDEX.sizeInGB":120.99506736639887, + "coreNode":"core_node1141", + "leader":true}}}}, + "N_1i_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4266.027156829834, + "sysprop.pool":"pool-01", + "node":"N_1i_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard10_1_0_replica_n1692":{ + "INDEX.sizeInBytes":1.27561685082E11, + "INDEX.sizeInGB":118.80107697285712, + "coreNode":"core_node1693", + "leader":true}, + "shard10_1_1_replica_n1778":{ + "INDEX.sizeInBytes":1.30255789623E11, + "INDEX.sizeInGB":121.31015735026449, + "coreNode":"core_node1779"}, + "shard17_0_0_replica_n1780":{ + "INDEX.sizeInBytes":1.30702509646E11, + "INDEX.sizeInGB":121.72619779221714, + "coreNode":"core_node1781"}, + "shard17_0_1_replica_n1116":{ + "INDEX.sizeInBytes":1.30694171889E11, + "INDEX.sizeInGB":121.71843265090138, + "coreNode":"core_node1117"}, + "shard17_1_0_replica_n1198":{ + "INDEX.sizeInBytes":1.29069936299E11, + "INDEX.sizeInGB":120.20574537944049, + "coreNode":"core_node1200"}, + "shard17_1_1_replica_n1199":{ + "INDEX.sizeInBytes":1.28764084367E11, + "INDEX.sizeInGB":119.92089857067913, + "coreNode":"core_node1203"}}}}, + "N_g_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4007.3253440856934, + "sysprop.pool":"pool-01", + "node":"N_g_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard2_1_0_replica_n1680":{ + "INDEX.sizeInBytes":1.3012044407E11, + "INDEX.sizeInGB":121.18410698138177, + "coreNode":"core_node1681"}, + "shard5_0_0_replica_n1768":{ + "INDEX.sizeInBytes":1.31922267714E11, + "INDEX.sizeInGB":122.8621860165149, + "coreNode":"core_node1769"}, + "shard5_0_1_replica_n1770":{ + "INDEX.sizeInBytes":1.31464210597E11, + "INDEX.sizeInGB":122.43558708298951, + "coreNode":"core_node1771"}, + "shard5_1_0_replica_n1782":{ + "INDEX.sizeInBytes":1.30012462556E11, + "INDEX.sizeInGB":121.08354135975242, + "coreNode":"core_node1783"}, + "shard5_1_1_replica_n859":{ + "INDEX.sizeInBytes":1.29967769078E11, + "INDEX.sizeInGB":121.04191731475294, + "coreNode":"core_node861"}, + "shard9_0_0_replica_n1682":{ + "INDEX.sizeInBytes":1.29248772716E11, + "INDEX.sizeInGB":120.37229977175593, + "coreNode":"core_node1683"}}}}, + "N_8_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4262.037788391113, + "sysprop.pool":"pool-01", + "node":"N_8_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard16_0_0_replica_n854":{ + "INDEX.sizeInBytes":1.2677230126E11, + "INDEX.sizeInGB":118.06590599939227, + "coreNode":"core_node856", + "leader":true}, + "shard16_0_1_replica_n855":{ + "INDEX.sizeInBytes":1.30788718518E11, + "INDEX.sizeInGB":121.80648606084287, + "coreNode":"core_node857", + "leader":true}, + "shard16_1_0_replica_n850":{ + "INDEX.sizeInBytes":1.28801317856E11, + "INDEX.sizeInGB":119.95557495951653, + "coreNode":"core_node852", + "leader":true}, + "shard16_1_1_replica_n851":{ + "INDEX.sizeInBytes":1.33685050832E11, + "INDEX.sizeInGB":124.50390572845936, + "coreNode":"core_node853", + "leader":true}, + "shard2_0_0_replica_n794":{ + "INDEX.sizeInBytes":1.29517293483E11, + "INDEX.sizeInGB":120.6223792238161, + "coreNode":"core_node796", + "leader":true}, + "shard2_0_1_replica_n795":{ + "INDEX.sizeInBytes":1.31328007233E11, + "INDEX.sizeInGB":122.30873781535774, + "coreNode":"core_node800", + "leader":true}}}}, + "N_1f_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4260.807849884033, + "sysprop.pool":"pool-01", + "node":"N_1f_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard11_0_0_replica_n1216":{ + "INDEX.sizeInBytes":1.27720861488E11, + "INDEX.sizeInGB":118.94932155311108, + "coreNode":"core_node1217"}, + "shard11_0_1_replica_n1222":{ + "INDEX.sizeInBytes":1.27989218509E11, + "INDEX.sizeInGB":119.19924850482494, + "coreNode":"core_node1223"}, + "shard11_1_0_replica_n778":{ + "INDEX.sizeInBytes":1.32552454912E11, + "INDEX.sizeInGB":123.44909358024597, + "coreNode":"core_node779"}, + "shard11_1_1_replica_n782":{ + "INDEX.sizeInBytes":1.30995783614E11, + "INDEX.sizeInGB":121.99933045916259, + "coreNode":"core_node783"}, + "shard5_0_0_replica_n1000":{ + "INDEX.sizeInBytes":1.31960210955E11, + "INDEX.sizeInGB":122.89752341341227, + "coreNode":"core_node1001"}, + "shard5_0_1_replica_n1002":{ + "INDEX.sizeInBytes":1.31534942129E11, + "INDEX.sizeInGB":122.50146095547825, + "coreNode":"core_node1003"}}}}, + "N_v_solr":{ + "isLive":true, + "cores":5.0, + "freedisk":412.18456649780273, + "sysprop.pool":"pool-02", + "node":"N_v_solr", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_1":{ + "shard3_0_0_replica_n119":{ + "INDEX.sizeInBytes":4.3809517838E10, + "INDEX.sizeInGB":40.80079294554889, + "coreNode":"core_node120"}, + "shard3_0_1_replica_n121":{ + "INDEX.sizeInBytes":4.6310602091E10, + "INDEX.sizeInGB":43.13010917138308, + "coreNode":"core_node122"}, + "shard3_1_0_replica_n123":{ + "INDEX.sizeInBytes":4.5638162031E10, + "INDEX.sizeInGB":42.503850563429296, + "coreNode":"core_node124"}, + "shard3_1_1_replica_n125":{ + "INDEX.sizeInBytes":4.4257494507E10, + "INDEX.sizeInGB":41.21800373028964, + "coreNode":"core_node126"}}, + "COLL_0":{"shard3_replica_n16":{ + "INDEX.sizeInBytes":2.8932093807E10, + "INDEX.sizeInGB":26.94511209335178, + "coreNode":"core_node18"}}}}, + "N_m_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4267.171646118164, + "sysprop.pool":"pool-01", + "node":"N_m_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard17_1_0_replica_n1226":{ + "INDEX.sizeInBytes":1.29049722959E11, + "INDEX.sizeInGB":120.18692023959011, + "coreNode":"core_node1227"}, + "shard17_1_1_replica_n1228":{ + "INDEX.sizeInBytes":1.28816978409E11, + "INDEX.sizeInGB":119.97015998605639, + "coreNode":"core_node1229"}, + "shard6_0_0_replica_n1207":{ + "INDEX.sizeInBytes":1.28936808614E11, + "INDEX.sizeInGB":120.08176056109369, + "coreNode":"core_node1208"}, + "shard6_0_1_replica_n1213":{ + "INDEX.sizeInBytes":1.28745543493E11, + "INDEX.sizeInGB":119.90363103616983, + "coreNode":"core_node1214"}, + "shard6_1_1_replica_n1170":{ + "INDEX.sizeInBytes":1.31256081422E11, + "INDEX.sizeInGB":122.24175168387592, + "coreNode":"core_node1171"}, + "shard9_0_1_replica_n1":{ + "INDEX.sizeInBytes":1.29063920601E11, + "INDEX.sizeInGB":120.20014282409102, + "coreNode":"core_node477"}}}}, + "N_16_solr":{ + "isLive":true, + "cores":5.0, + "freedisk":795.7872657775879, + "sysprop.pool":"pool-02", + "node":"N_16_solr", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_1":{ + "shard2_0_0_replica_n99":{ + "INDEX.sizeInBytes":4.8764329025E10, + "INDEX.sizeInGB":45.41532045695931, + "coreNode":"core_node100"}, + "shard2_0_1_replica_n101":{ + "INDEX.sizeInBytes":4.3740343099E10, + "INDEX.sizeInGB":40.73636894952506, + "coreNode":"core_node102"}, + "shard2_1_0_replica_n95":{ + "INDEX.sizeInBytes":4.5585236311E10, + "INDEX.sizeInGB":42.45455964561552, + "coreNode":"core_node96"}, + "shard2_1_1_replica_n97":{ + "INDEX.sizeInBytes":4.527594328E10, + "INDEX.sizeInGB":42.16650806367397, + "coreNode":"core_node98"}}, + "COLL_0":{"shard1_replica_n2":{ + "INDEX.sizeInBytes":3.3775978753E10, + "INDEX.sizeInGB":31.45633149240166, + "coreNode":"core_node5"}}}}, + "N_3a_solr":{ + "isLive":true, + "cores":5.0, + "freedisk":407.706729888916, + "sysprop.pool":"pool-02", + "node":"N_3a_solr", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_1":{ + "shard3_0_0_replica_n71":{ + "INDEX.sizeInBytes":4.5160600486E10, + "INDEX.sizeInGB":42.05908671580255, + "coreNode":"core_node73"}, + "shard3_0_1_replica_n72":{ + "INDEX.sizeInBytes":4.5879426317E10, + "INDEX.sizeInGB":42.72854543942958, + "coreNode":"core_node74"}, + "shard3_1_0_replica_n75":{ + "INDEX.sizeInBytes":4.5090380622E10, + "INDEX.sizeInGB":41.99368937127292, + "coreNode":"core_node77"}, + "shard3_1_1_replica_n76":{ + "INDEX.sizeInBytes":4.6849085882E10, + "INDEX.sizeInGB":43.631611282005906, + "coreNode":"core_node78"}}, + "COLL_0":{"shard3_replica_n14":{ + "INDEX.sizeInBytes":3.0819950704E10, + "INDEX.sizeInGB":28.70331583917141, + "coreNode":"core_node17"}}}}, + "N_u_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4260.821304321289, + "sysprop.pool":"pool-01", + "node":"N_u_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard13_0_0_replica_n1256":{ + "INDEX.sizeInBytes":1.30381429251E11, + "INDEX.sizeInGB":121.42716837208718, + "coreNode":"core_node1257"}, + "shard13_0_1_replica_n1262":{ + "INDEX.sizeInBytes":1.30321828131E11, + "INDEX.sizeInGB":121.37166050355881, + "coreNode":"core_node1263"}, + "shard13_1_0_replica_n1762":{ + "INDEX.sizeInBytes":1.29567251239E11, + "INDEX.sizeInGB":120.66890600975603, + "coreNode":"core_node1763"}, + "shard13_1_1_replica_n920":{ + "INDEX.sizeInBytes":1.29634542289E11, + "INDEX.sizeInGB":120.73157568369061, + "coreNode":"core_node921", + "leader":true}, + "shard15_0_1_replica_n2":{ + "INDEX.sizeInBytes":1.27250282639E11, + "INDEX.sizeInGB":118.51106084790081, + "coreNode":"core_node734", + "leader":true}, + "shard8_1_0_replica_n1764":{ + "INDEX.sizeInBytes":1.35571920799E11, + "INDEX.sizeInGB":126.26119032409042, + "coreNode":"core_node1765"}}}}, + "N_e_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4334.874732971191, + "sysprop.pool":"pool-01", + "node":"N_e_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard13_1_0_replica_n922":{ + "INDEX.sizeInBytes":1.29514183189E11, + "INDEX.sizeInGB":120.6194825368002, + "coreNode":"core_node923", + "leader":true}, + "shard13_1_1_replica_n924":{ + "INDEX.sizeInBytes":1.29598508564E11, + "INDEX.sizeInGB":120.69801666215062, + "coreNode":"core_node925"}, + "shard15_0_0_replica_n1":{ + "INDEX.sizeInBytes":1.27235871561E11, + "INDEX.sizeInGB":118.49763948563486, + "coreNode":"core_node731"}, + "shard18_0_0_replica_n1818":{ + "INDEX.sizeInBytes":1.28218931509E11, + "INDEX.sizeInGB":119.41318540740758, + "coreNode":"core_node1819"}, + "shard18_1_0_replica_n2":{ + "INDEX.sizeInBytes":1.29108586002E11, + "INDEX.sizeInGB":120.24174072034657, + "coreNode":"core_node672"}, + "shard1_0_1_replica_n1718":{ + "INDEX.sizeInBytes":5.9506746089E10, + "INDEX.sizeInGB":55.41997597459704, + "coreNode":"core_node1719", + "leader":true}}}}, + "N_29_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4303.548599243164, + "sysprop.pool":"pool-01", + "node":"N_29_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard6_1_0_replica_n1792":{ + "INDEX.sizeInBytes":1.29365181039E11, + "INDEX.sizeInGB":120.48071347083896, + "coreNode":"core_node1793"}, + "shard7_0_1_replica_n2":{ + "INDEX.sizeInBytes":8.6794048237E10, + "INDEX.sizeInGB":80.83325646538287, + "coreNode":"core_node777"}, + "shard7_1_0_replica_n1142":{ + "INDEX.sizeInBytes":1.2946739865E11, + "INDEX.sizeInGB":120.57591103948653, + "coreNode":"core_node1143"}, + "shard7_1_1_replica_n1758":{ + "INDEX.sizeInBytes":1.28546712309E11, + "INDEX.sizeInGB":119.7184550659731, + "coreNode":"core_node1759"}, + "shard8_0_0_replica_n1690":{ + "INDEX.sizeInBytes":1.30176337999E11, + "INDEX.sizeInGB":121.23616225924343, + "coreNode":"core_node1691", + "leader":true}, + "shard8_0_1_replica_n1786":{ + "INDEX.sizeInBytes":1.32692723859E11, + "INDEX.sizeInGB":123.57972921710461, + "coreNode":"core_node1787"}}}}, + "N_2u_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4266.648368835449, + "sysprop.pool":"pool-01", + "node":"N_2u_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard12_1_0_replica_n2":{ + "INDEX.sizeInBytes":1.27387972534E11, + "INDEX.sizeInGB":118.63929455541074, + "coreNode":"core_node660"}, + "shard17_0_0_replica_n1734":{ + "INDEX.sizeInBytes":1.31102615765E11, + "INDEX.sizeInGB":122.09882565308362, + "coreNode":"core_node1735"}, + "shard17_0_1_replica_n1114":{ + "INDEX.sizeInBytes":1.30049647193E11, + "INDEX.sizeInGB":121.1181722516194, + "coreNode":"core_node1115"}, + "shard17_1_0_replica_n1224":{ + "INDEX.sizeInBytes":1.29066474889E11, + "INDEX.sizeInGB":120.20252169016749, + "coreNode":"core_node1225", + "leader":true}, + "shard17_1_1_replica_n1230":{ + "INDEX.sizeInBytes":1.287734207E11, + "INDEX.sizeInGB":119.92959370836616, + "coreNode":"core_node1231", + "leader":true}, + "shard3_1_1_replica_n1":{ + "INDEX.sizeInBytes":1.29953637358E11, + "INDEX.sizeInGB":121.02875612489879, + "coreNode":"core_node461"}}}}, + "N_2w_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4336.208312988281, + "sysprop.pool":"pool-01", + "node":"N_2w_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard18_1_0_replica_n1":{ + "INDEX.sizeInBytes":1.28884297502E11, + "INDEX.sizeInGB":120.03285577706993, + "coreNode":"core_node671"}, + "shard18_1_1_replica_n1":{ + "INDEX.sizeInBytes":1.28226679933E11, + "INDEX.sizeInGB":119.42040168959647, + "coreNode":"core_node673"}, + "shard1_0_1_replica_n1676":{ + "INDEX.sizeInBytes":5.9557275352E10, + "INDEX.sizeInGB":55.46703501790762, + "coreNode":"core_node1677"}, + "shard1_1_1_replica_n1806":{ + "INDEX.sizeInBytes":1.2954748046E11, + "INDEX.sizeInGB":120.6504930369556, + "coreNode":"core_node1807"}, + "shard4_1_0_replica_n1774":{ + "INDEX.sizeInBytes":1.27659935903E11, + "INDEX.sizeInGB":118.89258018042892, + "coreNode":"core_node1775"}, + "shard4_1_1_replica_n1804":{ + "INDEX.sizeInBytes":1.27878088796E11, + "INDEX.sizeInGB":119.0957508943975, + "coreNode":"core_node1805"}}}}, + "N_74_solr":{ + "isLive":true, + "cores":5.0, + "freedisk":802.5921897888184, + "sysprop.pool":"pool-02", + "node":"N_74_solr", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_1":{ + "shard2_0_0_replica_n107":{ + "INDEX.sizeInBytes":4.3767024396E10, + "INDEX.sizeInGB":40.76121784374118, + "coreNode":"core_node108"}, + "shard2_0_1_replica_n109":{ + "INDEX.sizeInBytes":4.8622428842E10, + "INDEX.sizeInGB":45.28316561318934, + "coreNode":"core_node110"}, + "shard2_1_0_replica_n103":{ + "INDEX.sizeInBytes":4.4599223614E10, + "INDEX.sizeInGB":41.536263762041926, + "coreNode":"core_node104"}, + "shard2_1_1_replica_n105":{ + "INDEX.sizeInBytes":4.3768191618E10, + "INDEX.sizeInGB":40.762304903939366, + "coreNode":"core_node106"}}, + "COLL_0":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":2.9252853492E10, + "INDEX.sizeInGB":27.24384282901883, + "coreNode":"core_node7"}}}}, + "N_6i_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4269.712917327881, + "sysprop.pool":"pool-01", + "node":"N_6i_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard10_0_0_replica_n896":{ + "INDEX.sizeInBytes":1.29103730913E11, + "INDEX.sizeInGB":120.2372190663591, + "coreNode":"core_node897"}, + "shard10_0_1_replica_n1738":{ + "INDEX.sizeInBytes":1.28024871739E11, + "INDEX.sizeInGB":119.2324531627819, + "coreNode":"core_node1739"}, + "shard10_1_0_replica_n829":{ + "INDEX.sizeInBytes":1.27564995026E11, + "INDEX.sizeInGB":118.80415959842503, + "coreNode":"core_node831"}, + "shard10_1_1_replica_n830":{ + "INDEX.sizeInBytes":1.30273229534E11, + "INDEX.sizeInGB":121.32639953307807, + "coreNode":"core_node840"}, + "shard2_1_0_replica_n1726":{ + "INDEX.sizeInBytes":1.30025926492E11, + "INDEX.sizeInGB":121.0960806272924, + "coreNode":"core_node1727"}, + "shard2_1_1_replica_n978":{ + "INDEX.sizeInBytes":1.2815510735E11, + "INDEX.sizeInGB":119.35374452732503, + "coreNode":"core_node979"}}}}, + "N_dj_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4162.087951660156, + "sysprop.pool":"pool-01", + "node":"N_dj_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard13_0_0_replica_n1748":{ + "INDEX.sizeInBytes":1.30427736106E11, + "INDEX.sizeInGB":121.47029499150813, + "coreNode":"core_node1749", + "leader":true}, + "shard13_0_1_replica_n1714":{ + "INDEX.sizeInBytes":1.30355121703E11, + "INDEX.sizeInGB":121.402667558752, + "coreNode":"core_node1715", + "leader":true}, + "shard18_0_1_replica_n771":{ + "INDEX.sizeInBytes":1.30821199599E11, + "INDEX.sizeInGB":121.83673642482609, + "coreNode":"core_node773", + "leader":true}, + "shard1_1_0_replica_n1":{ + "INDEX.sizeInBytes":1.29057719236E11, + "INDEX.sizeInGB":120.19436735287309, + "coreNode":"core_node471", + "leader":true}, + "shard7_1_0_replica_n926":{ + "INDEX.sizeInBytes":1.29963886019E11, + "INDEX.sizeInGB":121.03830093424767, + "coreNode":"core_node928", + "leader":true}, + "shard7_1_1_replica_n927":{ + "INDEX.sizeInBytes":1.28538540188E11, + "INDEX.sizeInGB":119.71084418520331, + "coreNode":"core_node941", + "leader":true}}}}, + "N_6c_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4269.135753631592, + "sysprop.pool":"pool-01", + "node":"N_6c_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard17_0_0_replica_n842":{ + "INDEX.sizeInBytes":1.30743109221E11, + "INDEX.sizeInGB":121.76400909293443, + "coreNode":"core_node844", + "leader":true}, + "shard17_0_1_replica_n843":{ + "INDEX.sizeInBytes":1.30730929322E11, + "INDEX.sizeInGB":121.7526656780392, + "coreNode":"core_node848", + "leader":true}, + "shard4_0_0_replica_n443":{ + "INDEX.sizeInBytes":1.28741762257E11, + "INDEX.sizeInGB":119.90010948572308, + "coreNode":"core_node445", + "leader":true}, + "shard4_0_1_replica_n444":{ + "INDEX.sizeInBytes":1.28032413116E11, + "INDEX.sizeInGB":119.23947661742568, + "coreNode":"core_node446", + "leader":true}, + "shard4_1_0_replica_n455":{ + "INDEX.sizeInBytes":1.27664473589E11, + "INDEX.sizeInGB":118.89680622983724, + "coreNode":"core_node457", + "leader":true}, + "shard4_1_1_replica_n456":{ + "INDEX.sizeInBytes":1.27865802727E11, + "INDEX.sizeInGB":119.08430860098451, + "coreNode":"core_node458", + "leader":true}}}}, + "N_9o_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4265.881809234619, + "sysprop.pool":"pool-01", + "node":"N_9o_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard11_0_0_replica_n1218":{ + "INDEX.sizeInBytes":1.28002391411E11, + "INDEX.sizeInGB":119.21151672583073, + "coreNode":"core_node1219"}, + "shard11_0_1_replica_n1220":{ + "INDEX.sizeInBytes":1.28020049235E11, + "INDEX.sizeInGB":119.22796185594052, + "coreNode":"core_node1221"}, + "shard11_1_0_replica_n780":{ + "INDEX.sizeInBytes":1.32420261013E11, + "INDEX.sizeInGB":123.32597841788083, + "coreNode":"core_node781", + "leader":true}, + "shard11_1_1_replica_n784":{ + "INDEX.sizeInBytes":1.30909357727E11, + "INDEX.sizeInGB":121.91884007956833, + "coreNode":"core_node785"}, + "shard7_0_0_replica_n764":{ + "INDEX.sizeInBytes":1.28994593549E11, + "INDEX.sizeInGB":120.13557697553188, + "coreNode":"core_node766"}, + "shard7_0_1_replica_n765":{ + "INDEX.sizeInBytes":1.28908501869E11, + "INDEX.sizeInGB":120.0553978504613, + "coreNode":"core_node769"}}}}, + "N_4g_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4259.9677734375, + "sysprop.pool":"pool-01", + "node":"N_4g_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard18_1_0_replica_n623":{ + "INDEX.sizeInBytes":1.28955475131E11, + "INDEX.sizeInGB":120.09914510976523, + "coreNode":"core_node625", + "leader":true}, + "shard18_1_1_replica_n624":{ + "INDEX.sizeInBytes":1.28190099634E11, + "INDEX.sizeInGB":119.38633363135159, + "coreNode":"core_node626", + "leader":true}, + "shard2_1_1_replica_n1812":{ + "INDEX.sizeInBytes":1.28164947427E11, + "INDEX.sizeInGB":119.36290881317109, + "coreNode":"core_node1813"}, + "shard8_1_1_replica_n1794":{ + "INDEX.sizeInBytes":1.33276674177E11, + "INDEX.sizeInGB":124.1235753307119, + "coreNode":"core_node1795"}, + "shard9_1_0_replica_n929":{ + "INDEX.sizeInBytes":1.31111103315E11, + "INDEX.sizeInGB":122.1067303000018, + "coreNode":"core_node931", + "leader":true}, + "shard9_1_1_replica_n930":{ + "INDEX.sizeInBytes":1.33928213329E11, + "INDEX.sizeInGB":124.73036845121533, + "coreNode":"core_node944", + "leader":true}}}}, + "N_cs_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4260.629165649414, + "sysprop.pool":"pool-01", + "node":"N_cs_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard10_0_0_replica_n825":{ + "INDEX.sizeInBytes":1.29486149433E11, + "INDEX.sizeInGB":120.59337406698614, + "coreNode":"core_node827", + "leader":true}, + "shard10_0_1_replica_n826":{ + "INDEX.sizeInBytes":1.28038688927E11, + "INDEX.sizeInGB":119.245321421884, + "coreNode":"core_node828", + "leader":true}, + "shard15_1_0_replica_n953":{ + "INDEX.sizeInBytes":1.33515745782E11, + "INDEX.sizeInGB":124.34622811339796, + "coreNode":"core_node955", + "leader":true}, + "shard15_1_1_replica_n954":{ + "INDEX.sizeInBytes":1.30865977458E11, + "INDEX.sizeInGB":121.87843905575573, + "coreNode":"core_node956", + "leader":true}, + "shard6_1_0_replica_n935":{ + "INDEX.sizeInBytes":1.29597529819E11, + "INDEX.sizeInGB":120.69710513483733, + "coreNode":"core_node937", + "leader":true}, + "shard6_1_1_replica_n1704":{ + "INDEX.sizeInBytes":1.31274462707E11, + "INDEX.sizeInGB":122.25887058954686, + "coreNode":"core_node1705", + "leader":true}}}}, + "N_d4_solr":{ + "isLive":true, + "cores":5.0, + "freedisk":797.2159843444824, + "sysprop.pool":"pool-02", + "node":"N_d4_solr", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_1":{ + "shard2_0_0_replica_n67":{ + "INDEX.sizeInBytes":4.497304707E10, + "INDEX.sizeInGB":41.8844139855355, + "coreNode":"core_node69", + "leader":true}, + "shard2_0_1_replica_n68":{ + "INDEX.sizeInBytes":4.5692831033E10, + "INDEX.sizeInGB":42.554765039123595, + "coreNode":"core_node70", + "leader":true}, + "shard2_1_0_replica_n63":{ + "INDEX.sizeInBytes":4.5935880044E10, + "INDEX.sizeInGB":42.78112206980586, + "coreNode":"core_node65", + "leader":true}, + "shard2_1_1_replica_n64":{ + "INDEX.sizeInBytes":4.5166045429E10, + "INDEX.sizeInGB":42.064157714135945, + "coreNode":"core_node66", + "leader":true}}, + "COLL_0":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":3.401835331E10, + "INDEX.sizeInGB":31.682060388848186, + "coreNode":"core_node3", + "leader":true}}}}, + "N_do_solr":{ + "isLive":true, + "cores":5.0, + "freedisk":407.25314712524414, + "sysprop.pool":"pool-02", + "node":"N_do_solr", + "sysprop.az":"us-east-1c", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_1":{ + "shard3_0_0_replica_n111":{ + "INDEX.sizeInBytes":4.4957115524E10, + "INDEX.sizeInGB":41.86957657709718, + "coreNode":"core_node112", + "leader":true}, + "shard3_0_1_replica_n113":{ + "INDEX.sizeInBytes":4.577095697E10, + "INDEX.sizeInGB":42.62752548791468, + "coreNode":"core_node114", + "leader":true}, + "shard3_1_0_replica_n115":{ + "INDEX.sizeInBytes":4.3732753925E10, + "INDEX.sizeInGB":40.72930098045617, + "coreNode":"core_node116", + "leader":true}, + "shard3_1_1_replica_n117":{ + "INDEX.sizeInBytes":4.8532509927E10, + "INDEX.sizeInGB":45.19942209776491, + "coreNode":"core_node118", + "leader":true}}, + "COLL_0":{"shard3_replica_n12":{ + "INDEX.sizeInBytes":3.1297025422E10, + "INDEX.sizeInGB":29.147626293823123, + "coreNode":"core_node15", + "leader":true}}}}, + "N_4f_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4264.210151672363, + "sysprop.pool":"pool-01", + "node":"N_4f_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard2_0_1_replica_n914":{ + "INDEX.sizeInBytes":1.31386626219E11, + "INDEX.sizeInGB":122.36333100032061, + "coreNode":"core_node915"}, + "shard2_1_0_replica_n974":{ + "INDEX.sizeInBytes":1.3001251468E11, + "INDEX.sizeInGB":121.0835899040103, + "coreNode":"core_node975", + "leader":true}, + "shard2_1_1_replica_n1684":{ + "INDEX.sizeInBytes":1.2812596905E11, + "INDEX.sizeInGB":119.32660737074912, + "coreNode":"core_node1685", + "leader":true}, + "shard3_0_1_replica_n2":{ + "INDEX.sizeInBytes":1.31838927317E11, + "INDEX.sizeInGB":122.78456922341138, + "coreNode":"core_node546", + "leader":true}, + "shard6_0_0_replica_n1180":{ + "INDEX.sizeInBytes":1.28922958966E11, + "INDEX.sizeInGB":120.06886207126081, + "coreNode":"core_node1182", + "leader":true}, + "shard6_0_1_replica_n1181":{ + "INDEX.sizeInBytes":1.28773562289E11, + "INDEX.sizeInGB":119.92972557339817, + "coreNode":"core_node1189", + "leader":true}}}}, + "N_1h_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4297.329685211182, + "sysprop.pool":"pool-01", + "node":"N_1h_solr", + "sysprop.az":"us-east-1b", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard12_0_0_replica_n1696":{ + "INDEX.sizeInBytes":1.29369271388E11, + "INDEX.sizeInGB":120.48452290520072, + "coreNode":"core_node1697"}, + "shard12_0_1_replica_n1760":{ + "INDEX.sizeInBytes":1.30342308934E11, + "INDEX.sizeInGB":121.39073473773897, + "coreNode":"core_node1761"}, + "shard1_0_0_replica_n1728":{ + "INDEX.sizeInBytes":5.7176945428E10, + "INDEX.sizeInGB":53.25018002465367, + "coreNode":"core_node1729"}, + "shard3_0_1_replica_n508":{ + "INDEX.sizeInBytes":1.31866901019E11, + "INDEX.sizeInGB":122.81062176357955, + "coreNode":"core_node510"}, + "shard7_1_0_replica_n1144":{ + "INDEX.sizeInBytes":1.2949609012E11, + "INDEX.sizeInGB":120.60263205319643, + "coreNode":"core_node1145"}, + "shard7_1_1_replica_n1700":{ + "INDEX.sizeInBytes":1.28489170345E11, + "INDEX.sizeInGB":119.66486493591219, + "coreNode":"core_node1701"}}}}, + "N_7e_solr":{ + "isLive":true, + "cores":13.0, + "freedisk":873.6022491455078, + "sysprop.pool":"pool-03", + "node":"N_7e_solr", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_6":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":1.6446420654E10, + "INDEX.sizeInGB":15.316922826692462, + "coreNode":"core_node3"}}, + "COLL_5":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":5.854396964E9, + "INDEX.sizeInGB":5.452332053333521, + "coreNode":"core_node2", + "leader":true}}, + "COLL_l":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7, + "coreNode":"core_node3"}}, + "COLL_x":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":3.18270873E8, + "INDEX.sizeInGB":0.296412848867476, + "coreNode":"core_node3"}}, + "COLL_1b":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7, + "coreNode":"core_node3"}}, + "COLL_1r":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":4.12015174E8, + "INDEX.sizeInGB":0.38371903263032436, + "coreNode":"core_node3"}}, + "COLL_8":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":356048.0, + "INDEX.sizeInGB":3.315955400466919E-4, + "coreNode":"core_node3"}}, + "COLL_q":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":4.9242789E8, + "INDEX.sizeInGB":0.45860921032726765, + "coreNode":"core_node3"}}, + "COLL_4":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":2.58881858E8, + "INDEX.sizeInGB":0.2411025185137987, + "coreNode":"core_node3"}}, + "COLL_1x":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":4248411.0, + "INDEX.sizeInGB":0.00395664107054472, + "coreNode":"core_node3"}}, + "COLL_1t":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":8.5774407615E10, + "INDEX.sizeInGB":79.8836421361193, + "coreNode":"core_node3"}}, + "COLL_2k":{"shard1_replica_n1":{ + "INDEX.sizeInBytes":135.0, + "INDEX.sizeInGB":1.257285475730896E-7, + "coreNode":"core_node3"}}, + "COLL_22":{"shard1_replica_n4":{ + "INDEX.sizeInBytes":2.4348956483E10, + "INDEX.sizeInGB":22.676732840947807, + "coreNode":"core_node6"}}}}, + "N_b9_solr":{ + "isLive":true, + "cores":5.0, + "freedisk":801.2417984008789, + "sysprop.pool":"pool-02", + "node":"N_b9_solr", + "sysprop.az":"us-east-1b", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_1":{ + "shard1_0_0_replica_n87":{ + "INDEX.sizeInBytes":4.5100613575E10, + "INDEX.sizeInGB":42.0032195514068, + "coreNode":"core_node88"}, + "shard1_0_1_replica_n89":{ + "INDEX.sizeInBytes":4.6030616744E10, + "INDEX.sizeInGB":42.869352497160435, + "coreNode":"core_node90"}, + "shard1_1_0_replica_n91":{ + "INDEX.sizeInBytes":4.5724314347E10, + "INDEX.sizeInGB":42.5840861601755, + "coreNode":"core_node92"}, + "shard1_1_1_replica_n93":{ + "INDEX.sizeInBytes":4.574559386E10, + "INDEX.sizeInGB":42.603904251009226, + "coreNode":"core_node94"}}, + "COLL_0":{"shard2_replica_n6":{ + "INDEX.sizeInBytes":2.8865621899E10, + "INDEX.sizeInGB":26.883205304853618, + "coreNode":"core_node9"}}}}, + "N_aw_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4276.759601593018, + "sysprop.pool":"pool-01", + "node":"N_aw_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard12_1_0_replica_n1":{ + "INDEX.sizeInBytes":1.27434400341E11, + "INDEX.sizeInGB":118.68253382015973, + "coreNode":"core_node659", + "leader":true}, + "shard12_1_1_replica_n1":{ + "INDEX.sizeInBytes":1.26701654869E11, + "INDEX.sizeInGB":118.00011142063886, + "coreNode":"core_node661", + "leader":true}, + "shard15_0_1_replica_n1816":{ + "INDEX.sizeInBytes":1.27129784031E11, + "INDEX.sizeInGB":118.39883777406067, + "coreNode":"core_node1817"}, + "shard18_1_1_replica_n1820":{ + "INDEX.sizeInBytes":1.28188518759E11, + "INDEX.sizeInGB":119.38486132677644, + "coreNode":"core_node1821"}, + "shard3_1_0_replica_n2":{ + "INDEX.sizeInBytes":1.28273400877E11, + "INDEX.sizeInGB":119.46391395945102, + "coreNode":"core_node460"}, + "shard4_1_1_replica_n1":{ + "INDEX.sizeInBytes":1.27899653279E11, + "INDEX.sizeInGB":119.11583438422531, + "coreNode":"core_node525"}}}}, + "N_3a7_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4263.317134857178, + "sysprop.pool":"pool-01", + "node":"N_3a7_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard14_0_0_replica_n837":{ + "INDEX.sizeInBytes":1.30330451538E11, + "INDEX.sizeInGB":121.37969167716801, + "coreNode":"core_node839", + "leader":true}, + "shard14_0_1_replica_n838":{ + "INDEX.sizeInBytes":1.31168916273E11, + "INDEX.sizeInGB":122.1605728128925, + "coreNode":"core_node841", + "leader":true}, + "shard14_1_1_replica_n1824":{ + "INDEX.sizeInBytes":1.300425186E11, + "INDEX.sizeInGB":121.11153323203325, + "coreNode":"core_node1825"}, + "shard2_0_0_replica_n1822":{ + "INDEX.sizeInBytes":1.29476268104E11, + "INDEX.sizeInGB":120.58417136222124, + "coreNode":"core_node1823"}, + "shard3_1_1_replica_n2":{ + "INDEX.sizeInBytes":1.2992912768E11, + "INDEX.sizeInGB":121.00592970848083, + "coreNode":"core_node462"}, + "shard7_0_0_replica_n2":{ + "INDEX.sizeInBytes":1.29074533898E11, + "INDEX.sizeInGB":120.21002722717822, + "coreNode":"core_node775", + "leader":true}}}}, + "N_303_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4111.4668045043945, + "sysprop.pool":"pool-01", + "node":"N_303_solr", + "sysprop.az":"us-east-1c", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard16_0_0_replica_n1784":{ + "INDEX.sizeInBytes":1.26747476604E11, + "INDEX.sizeInGB":118.04278623685241, + "coreNode":"core_node1785"}, + "shard16_0_1_replica_n986":{ + "INDEX.sizeInBytes":1.30738903625E11, + "INDEX.sizeInGB":121.76009232643992, + "coreNode":"core_node987"}, + "shard3_0_0_replica_n2":{ + "INDEX.sizeInBytes":1.29792212268E11, + "INDEX.sizeInGB":120.87841729447246, + "coreNode":"core_node544", + "leader":true}, + "shard4_0_1_replica_n1772":{ + "INDEX.sizeInBytes":1.28126128215E11, + "INDEX.sizeInGB":119.3267556047067, + "coreNode":"core_node1773"}, + "shard9_1_0_replica_n1150":{ + "INDEX.sizeInBytes":1.31117387108E11, + "INDEX.sizeInGB":122.11258253827691, + "coreNode":"core_node1151"}, + "shard9_1_1_replica_n1162":{ + "INDEX.sizeInBytes":1.36568824379E11, + "INDEX.sizeInGB":127.18962913285941, + "coreNode":"core_node1163"}}}}, + "N_3to_solr":{ + "isLive":true, + "cores":5.0, + "freedisk":794.5433731079102, + "sysprop.pool":"pool-02", + "node":"N_3to_solr", + "sysprop.az":"us-east-1a", + "totaldisk":999.51171875, + "withCollection":null, + "replicas":{ + "COLL_1":{ + "shard1_0_0_replica_n79":{ + "INDEX.sizeInBytes":4.644843302E10, + "INDEX.sizeInGB":43.258474227041006, + "coreNode":"core_node80", + "leader":true}, + "shard1_0_1_replica_n81":{ + "INDEX.sizeInBytes":4.4936912617E10, + "INDEX.sizeInGB":41.85076115373522, + "coreNode":"core_node82", + "leader":true}, + "shard1_1_0_replica_n83":{ + "INDEX.sizeInBytes":4.3892348528E10, + "INDEX.sizeInGB":40.87793503701687, + "coreNode":"core_node84", + "leader":true}, + "shard1_1_1_replica_n85":{ + "INDEX.sizeInBytes":5.1015133973E10, + "INDEX.sizeInGB":47.511545916087925, + "coreNode":"core_node86", + "leader":true}}, + "COLL_0":{"shard2_replica_n8":{ + "INDEX.sizeInBytes":3.0722710385E10, + "INDEX.sizeInGB":28.6127537349239, + "coreNode":"core_node11", + "leader":true}}}}, + "N_65p_solr":{ + "isLive":true, + "cores":6.0, + "freedisk":4260.997627258301, + "sysprop.pool":"pool-01", + "node":"N_65p_solr", + "sysprop.az":"us-east-1a", + "totaldisk":4998.009765625, + "withCollection":null, + "replicas":{"COLL_2":{ + "shard10_1_0_replica_n1796":{ + "INDEX.sizeInBytes":1.27583656591E11, + "INDEX.sizeInGB":118.82153953518718, + "coreNode":"core_node1797"}, + "shard15_1_0_replica_n1172":{ + "INDEX.sizeInBytes":1.33316507698E11, + "INDEX.sizeInGB":124.16067318804562, + "coreNode":"core_node1173"}, + "shard15_1_1_replica_n1746":{ + "INDEX.sizeInBytes":1.30883359905E11, + "INDEX.sizeInGB":121.89462772104889, + "coreNode":"core_node1747"}, + "shard3_0_0_replica_n1":{ + "INDEX.sizeInBytes":1.29871412511E11, + "INDEX.sizeInGB":120.95217826869339, + "coreNode":"core_node543"}, + "shard3_0_1_replica_n1":{ + "INDEX.sizeInBytes":1.31838835644E11, + "INDEX.sizeInGB":122.784483846277, + "coreNode":"core_node545"}, + "shard7_0_0_replica_n1":{ + "INDEX.sizeInBytes":1.29027793373E11, + "INDEX.sizeInGB":120.16649672109634, + "coreNode":"core_node774"}}}}}, + "collectionStats":{ + "COLL_2":{ + "activeShards":72, + "inactiveShards":0, + "rf":2, + "maxShardsPerNode":1, + "maxActualShardsPerNode":6, + "minActualShardsPerNode":6, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":216, + "numNodes":36, + "maxCoresPerNode":6, + "minCoresPerNode":6, + "avgShardSize":119.19235682340029, + "maxShardSize":130.30958399828523, + "minShardSize":53.25648116040975}, + "COLL_1":{ + "activeShards":12, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":4, + "minActualShardsPerNode":4, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":36, + "numNodes":9, + "maxCoresPerNode":4, + "minCoresPerNode":4, + "avgShardSize":42.76741669047624, + "maxShardSize":47.511545916087925, + "minShardSize":40.72930098045617}, + "COLL_0":{ + "activeShards":3, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":9, + "numNodes":9, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":29.814146805865068, + "maxShardSize":31.682060388848186, + "minShardSize":28.6127537349239}, + "COLL_6":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":41.83655313309282, + "maxShardSize":41.83655313309282, + "minShardSize":41.83655313309282}, + "COLL_5":{ + "activeShards":1, + "inactiveShards":0, + "rf":1, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":1, + "numNodes":1, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":5.452332053333521, + "maxShardSize":5.452332053333521, + "minShardSize":5.452332053333521}, + "COLL_l":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":1.257285475730896E-7, + "maxShardSize":1.257285475730896E-7, + "minShardSize":1.257285475730896E-7}, + "COLL_x":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":0.2880448754876852, + "maxShardSize":0.2880448754876852, + "minShardSize":0.2880448754876852}, + "COLL_1b":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":1.257285475730896E-7, + "maxShardSize":1.257285475730896E-7, + "minShardSize":1.257285475730896E-7}, + "COLL_1r":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":0.39663587138056755, + "maxShardSize":0.39663587138056755, + "minShardSize":0.39663587138056755}, + "COLL_8":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":3.718072548508644E-4, + "maxShardSize":3.718072548508644E-4, + "minShardSize":3.718072548508644E-4}, + "COLL_q":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":0.45860921032726765, + "maxShardSize":0.45860921032726765, + "minShardSize":0.45860921032726765}, + "COLL_4":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":0.24102374073117971, + "maxShardSize":0.24102374073117971, + "minShardSize":0.24102374073117971}, + "COLL_1x":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":0.003971998579800129, + "maxShardSize":0.003971998579800129, + "minShardSize":0.003971998579800129}, + "COLL_1t":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":81.47750116791576, + "maxShardSize":81.47750116791576, + "minShardSize":81.47750116791576}, + "COLL_2k":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":1.257285475730896E-7, + "maxShardSize":1.257285475730896E-7, + "minShardSize":1.257285475730896E-7}, + "COLL_22":{ + "activeShards":1, + "inactiveShards":0, + "rf":3, + "maxShardsPerNode":1, + "maxActualShardsPerNode":1, + "minActualShardsPerNode":1, + "maxShardReplicasPerNode":1, + "minShardReplicasPerNode":1, + "numCores":3, + "numNodes":3, + "maxCoresPerNode":1, + "minCoresPerNode":1, + "avgShardSize":22.679232054390013, + "maxShardSize":22.679232054390013, + "minShardSize":22.679232054390013}}} \ No newline at end of file diff --git a/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java b/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java index 795c3e8ff312..fc995e3d4ffb 100644 --- a/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java +++ b/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java @@ -19,6 +19,7 @@ import java.io.File; import org.apache.commons.io.FileUtils; +import org.apache.solr.common.params.ModifiableSolrParams; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -61,4 +62,25 @@ public static void AfterClass() throws Exception { public void testCorrectCore() throws Exception { assertEquals("should be core1", "core1", h.getCore().getName()); } + + @Test + public void testParams() throws Exception { + final ModifiableSolrParams params = new ModifiableSolrParams(); + assertEquals(params.toString(), params().toString()); + + params.add("q", "*:*"); + assertEquals(params.toString(), params("q", "*:*").toString()); + + params.add("rows", "42"); + assertEquals(params.toString(), params("q", "*:*", "rows", "42").toString()); + + expectThrows(RuntimeException.class, () -> { + params("parameterWithoutValue"); + }); + + expectThrows(RuntimeException.class, () -> { + params("q", "*:*", "rows", "42", "parameterWithoutValue"); + }); + } + } diff --git a/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java b/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java index e75d7007dd92..7cea5f0a057b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java @@ -25,6 +25,7 @@ import com.codahale.metrics.Metered; import com.codahale.metrics.MetricRegistry; import org.apache.lucene.util.TestUtil; +import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.UpdateRequest; @@ -37,6 +38,7 @@ import org.apache.solr.handler.component.QueryComponent; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.search.facet.FacetModule; +import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; @@ -60,35 +62,63 @@ public class CloudExitableDirectoryReaderTest extends SolrCloudTestCase { private static final String COLLECTION = "exitable"; private static Map fiveHundredsByNode; + + /** + * Client used for all test requests. + *

+ * LBSolrClient (and by extension CloudSolrClient) has it's own enforcement of timeAllowed + * in an attempt to prevent "retrying" failed requests far longer then the client requested. + * Because of this client side logic, we do not want to use any LBSolrClient (derivative) in + * this test, in order to ensure that on a "slow" machine, the client doesn't pre-emptively + * abort any of our requests that use very low 'timeAllowed' values. + *

+ *

+ * ie: This test is not about testing the SolrClient, so keep the SOlrClient simple. + *

+ */ + private static SolrClient client; @BeforeClass public static void setupCluster() throws Exception { - Builder clusterBuilder = configureCluster(2) + // create one more node then shard, so that we also test the case of proxied requests. + Builder clusterBuilder = configureCluster(3) .addConfig("conf", TEST_PATH().resolve("configsets").resolve("exitable-directory").resolve("conf")); clusterBuilder.withMetrics(true); clusterBuilder .configure(); + // pick an arbitrary node to use for our requests + client = cluster.getRandomJetty(random()).newClient(); + CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1) .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); cluster.getSolrClient().waitForState(COLLECTION, DEFAULT_TIMEOUT, TimeUnit.SECONDS, (n, c) -> DocCollection.isFullyActive(n, c, 2, 1)); - fiveHundredsByNode = new LinkedHashMap<>(); + fiveHundredsByNode = new LinkedHashMap<>(); + int httpOk = 0; for (JettySolrRunner jetty: cluster.getJettySolrRunners()) { MetricRegistry metricRegistry = ((JettySolrRunnerWithMetrics)jetty).getMetricRegistry(); - Metered httpOk = (Metered) metricRegistry.getMetrics() - .get("org.eclipse.jetty.servlet.ServletContextHandler.2xx-responses"); - assertTrue("expeting some http activity during collection creation",httpOk.getCount()>0); + + httpOk += ((Metered) metricRegistry.getMetrics() + .get("org.eclipse.jetty.servlet.ServletContextHandler.2xx-responses")).getCount(); Metered old = fiveHundredsByNode.put(jetty.getNodeName(), (Metered) metricRegistry.getMetrics() .get("org.eclipse.jetty.servlet.ServletContextHandler.5xx-responses")); assertNull("expecting uniq nodenames",old); } - + assertTrue("expecting some http activity during collection creation", httpOk > 0); indexDocs(); } + + @AfterClass + public static void closeClient() throws Exception { + if (null != client) { + client.close(); + client = null; + } + } public static void indexDocs() throws Exception { int counter; @@ -109,7 +139,7 @@ public static void indexDocs() throws Exception { req.add(sdoc("id", Integer.toString(counter), "name", "dummy term doc" + counter, "num",""+counter)); - req.commit(cluster.getSolrClient(), COLLECTION); + req.commit(client, COLLECTION); } @Test @@ -205,7 +235,7 @@ public void testCreepThenBite() throws Exception { try(Trap catchClass = catchCount(boundary)){ params.set("boundary", boundary); - QueryResponse rsp = cluster.getSolrClient().query(COLLECTION, + QueryResponse rsp = client.query(COLLECTION, params); assertEquals(""+rsp, rsp.getStatus(), 0); assertNo500s(""+rsp); @@ -226,7 +256,7 @@ public void testCreepThenBite() throws Exception { try(Trap catchCount = catchCount(boundary)){ params.set("omitHeader", "" + omitHeader); params.set("boundary", boundary); - QueryResponse rsp = cluster.getSolrClient().query(COLLECTION, + QueryResponse rsp = client.query(COLLECTION, params); assertEquals(""+rsp, rsp.getStatus(), 0); assertNo500s(""+rsp); @@ -260,7 +290,7 @@ public void assertPartialResults(ModifiableSolrParams p) throws Exception { } public void assertPartialResults(ModifiableSolrParams p, Runnable postRequestCheck) throws Exception { - QueryResponse rsp = cluster.getSolrClient().query(COLLECTION, p); + QueryResponse rsp = client.query(COLLECTION, p); postRequestCheck.run(); assertEquals(rsp.getStatus(), 0); assertEquals(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY+" were expected at "+rsp, @@ -269,7 +299,7 @@ public void assertPartialResults(ModifiableSolrParams p, Runnable postRequestChe } public void assertSuccess(ModifiableSolrParams p) throws Exception { - QueryResponse rsp = cluster.getSolrClient().query(COLLECTION, p); + QueryResponse rsp = client.query(COLLECTION, p); assertEquals(rsp.getStatus(), 0); assertEquals("Wrong #docs in response", NUM_DOCS_PER_TYPE - 1, rsp.getResults().getNumFound()); assertNotEquals(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY+" weren't expected "+rsp, diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java index 843b238ad834..025460c895b6 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java @@ -81,7 +81,7 @@ public void beforeTest() throws Exception { fail("no overseer leader!"); } } - + @After public void afterTest() throws Exception { try { @@ -100,7 +100,9 @@ public void test() throws Exception { CloudSolrClient cloudClient = cluster.getSolrClient(); - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf1", 2, REPLICATION); + // random create tlog or pull type replicas with nrt + boolean isTlog = random().nextBoolean(); + CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf1", 2, 1, isTlog ? 1 : 0, !isTlog ? 1 : 0); create.setMaxShardsPerNode(2); create.setAutoAddReplicas(false); cloudClient.request(create); @@ -126,8 +128,8 @@ public void test() throws Exception { } } - int sourceNumCores = getNumOfCores(cloudClient, replica.getNodeName(), coll); - int targetNumCores = getNumOfCores(cloudClient, targetNode, coll); + int sourceNumCores = getNumOfCores(cloudClient, replica.getNodeName(), coll, replica.getType().name()); + int targetNumCores = getNumOfCores(cloudClient, targetNode, coll, replica.getType().name()); CollectionAdminRequest.MoveReplica moveReplica = createMoveReplicaRequest(coll, replica, targetNode); moveReplica.setInPlaceMove(inPlaceMove); @@ -146,8 +148,8 @@ public void test() throws Exception { Thread.sleep(500); } assertTrue(success); - assertEquals("should be one less core on the source node!", sourceNumCores - 1, getNumOfCores(cloudClient, replica.getNodeName(), coll)); - assertEquals("should be one more core on target node!", targetNumCores + 1, getNumOfCores(cloudClient, targetNode, coll)); + assertEquals("should be one less core on the source node!", sourceNumCores - 1, getNumOfCores(cloudClient, replica.getNodeName(), coll, replica.getType().name())); + assertEquals("should be one more core on target node!", targetNumCores + 1, getNumOfCores(cloudClient, targetNode, coll, replica.getType().name())); // wait for recovery boolean recovered = false; for (int i = 0; i < 300; i++) { @@ -230,6 +232,7 @@ public void test() throws Exception { assertEquals(100, cluster.getSolrClient().query(coll, new SolrQuery("*:*")).getResults().getNumFound()); } + //Commented out 5-Dec-2017 // @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-11458") @Test @@ -242,7 +245,9 @@ public void testFailedMove() throws Exception { CloudSolrClient cloudClient = cluster.getSolrClient(); - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf1", 2, REPLICATION); + // random create tlog or pull type replicas with nrt + boolean isTlog = random().nextBoolean(); + CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf1", 2, 1, isTlog ? 1 : 0, !isTlog ? 1 : 0); create.setAutoAddReplicas(false); cloudClient.request(create); @@ -315,28 +320,40 @@ private Replica getRandomReplica(String coll, CloudSolrClient cloudClient) { } private void checkNumOfCores(CloudSolrClient cloudClient, String nodeName, String collectionName, int expectedCores) throws IOException, SolrServerException { - assertEquals(nodeName + " does not have expected number of cores",expectedCores, getNumOfCores(cloudClient, nodeName, collectionName)); + assertEquals(nodeName + " does not have expected number of cores", expectedCores, getNumOfCores(cloudClient, nodeName, collectionName)); } private int getNumOfCores(CloudSolrClient cloudClient, String nodeName, String collectionName) throws IOException, SolrServerException { + return getNumOfCores(cloudClient, nodeName, collectionName, null); + } + + private int getNumOfCores(CloudSolrClient cloudClient, String nodeName, String collectionName, String replicaType) throws IOException, SolrServerException { try (HttpSolrClient coreclient = getHttpSolrClient(cloudClient.getZkStateReader().getBaseUrlForNodeName(nodeName))) { CoreAdminResponse status = CoreAdminRequest.getStatus(null, coreclient); if (status.getCoreStatus().size() == 0) { return 0; } - // filter size by collection name - if (collectionName == null) { + if (collectionName == null && replicaType == null) { return status.getCoreStatus().size(); - } else { - int size = 0; - for (Map.Entry> stringNamedListEntry : status.getCoreStatus()) { + } + // filter size by collection name + int size = 0; + for (Map.Entry> stringNamedListEntry : status.getCoreStatus()) { + if (collectionName != null) { String coll = (String) stringNamedListEntry.getValue().findRecursive("cloud", "collection"); - if (collectionName.equals(coll)) { - size++; + if (!collectionName.equals(coll)) { + continue; + } + } + if (replicaType != null) { + String type = (String) stringNamedListEntry.getValue().findRecursive("cloud", "replicaType"); + if (!replicaType.equals(type)) { + continue; } } - return size; + size++; } + return size; } } diff --git a/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java b/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java index 9d4b74cb96a9..2e1f3c8e2c65 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java @@ -18,19 +18,37 @@ package org.apache.solr.cloud; import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; +import org.apache.solr.client.solrj.request.UpdateRequest; +import org.apache.solr.client.solrj.response.UpdateResponse; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.DocCollection; +import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SplitShardTest extends SolrCloudTestCase { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final String COLLECTION_NAME = "splitshardtest-collection"; @@ -133,4 +151,143 @@ public void testSplitFuzz() throws Exception { assertEquals("wrong range in s1_1", expected1, delta1); } + + CloudSolrClient createCollection(String collectionName, int repFactor) throws Exception { + + CollectionAdminRequest + .createCollection(collectionName, "conf", 1, repFactor) + .setMaxShardsPerNode(100) + .process(cluster.getSolrClient()); + + cluster.waitForActiveCollection(collectionName, 1, repFactor); + + CloudSolrClient client = cluster.getSolrClient(); + client.setDefaultCollection(collectionName); + return client; + } + + + long getNumDocs(CloudSolrClient client) throws Exception { + String collectionName = client.getDefaultCollection(); + DocCollection collection = client.getZkStateReader().getClusterState().getCollection(collectionName); + Collection slices = collection.getSlices(); + + long totCount = 0; + for (Slice slice : slices) { + if (!slice.getState().equals(Slice.State.ACTIVE)) continue; + long lastReplicaCount = -1; + for (Replica replica : slice.getReplicas()) { + SolrClient replicaClient = getHttpSolrClient(replica.getBaseUrl() + "/" + replica.getCoreName()); + long numFound = 0; + try { + numFound = replicaClient.query(params("q", "*:*", "distrib", "false")).getResults().getNumFound(); + log.info("Replica count=" + numFound + " for " + replica); + } finally { + replicaClient.close(); + } + if (lastReplicaCount >= 0) { + assertEquals("Replica doc count for " + replica, lastReplicaCount, numFound); + } + lastReplicaCount = numFound; + } + totCount += lastReplicaCount; + } + + + long cloudClientDocs = client.query(new SolrQuery("*:*")).getResults().getNumFound(); + assertEquals("Sum of shard count should equal distrib query doc count", totCount, cloudClientDocs); + return totCount; + } + + void doLiveSplitShard(String collectionName, int repFactor, int nThreads) throws Exception { + final CloudSolrClient client = createCollection(collectionName, repFactor); + + final ConcurrentHashMap model = new ConcurrentHashMap<>(); // what the index should contain + final AtomicBoolean doIndex = new AtomicBoolean(true); + final AtomicInteger docsIndexed = new AtomicInteger(); + Thread[] indexThreads = new Thread[nThreads]; + try { + + for (int i=0; i { + while (doIndex.get()) { + try { + // Thread.sleep(10); // cap indexing rate at 100 docs per second per thread + int currDoc = docsIndexed.incrementAndGet(); + String docId = "doc_" + currDoc; + + // Try all docs in the same update request + UpdateRequest updateReq = new UpdateRequest(); + updateReq.add(sdoc("id", docId)); + // UpdateResponse ursp = updateReq.commit(client, collectionName); // uncomment this if you want a commit each time + UpdateResponse ursp = updateReq.process(client, collectionName); + assertEquals(0, ursp.getStatus()); // for now, don't accept any failures + if (ursp.getStatus() == 0) { + model.put(docId, 1L); // in the future, keep track of a version per document and reuse ids to keep index from growing too large + } + } catch (Exception e) { + fail(e.getMessage()); + break; + } + } + }); + } + + for (Thread thread : indexThreads) { + thread.start(); + } + + Thread.sleep(100); // wait for a few docs to be indexed before invoking split + int docCount = model.size(); + + CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(collectionName) + .setShardName("shard1"); + splitShard.process(client); + waitForState("Timed out waiting for sub shards to be active.", + collectionName, activeClusterShape(2, 3*repFactor)); // 2 repFactor for the new split shards, 1 repFactor for old replicas + + // make sure that docs were able to be indexed during the split + assertTrue(model.size() > docCount); + + Thread.sleep(100); // wait for a few more docs to be indexed after split + + } finally { + // shut down the indexers + doIndex.set(false); + for (Thread thread : indexThreads) { + thread.join(); + } + } + + client.commit(); // final commit is needed for visibility + + long numDocs = getNumDocs(client); + if (numDocs != model.size()) { + SolrDocumentList results = client.query(new SolrQuery("q","*:*", "fl","id", "rows", Integer.toString(model.size()) )).getResults(); + Map leftover = new HashMap<>(model); + for (SolrDocument doc : results) { + String id = (String) doc.get("id"); + leftover.remove(id); + } + log.error("MISSING DOCUMENTS: " + leftover); + } + + assertEquals("Documents are missing!", docsIndexed.get(), numDocs); + log.info("Number of documents indexed and queried : " + numDocs); + } + + + + @Test + public void testLiveSplit() throws Exception { + // Debugging tips: if this fails, it may be easier to debug by lowering the number fo threads to 1 and looping the test + // until you get another failure. + // You may need to further instrument things like DistributedZkUpdateProcessor to display the cluster state for the collection, etc. + // Using more threads increases the chance to hit a concurrency bug, but too many threads can overwhelm single-threaded buffering + // replay after the low level index split and result in subShard leaders that can't catch up and + // become active (a known issue that still needs to be resolved.) + doLiveSplitShard("livesplit1", 1, 4); + } + + } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java index 1bc54f23eac2..c082e371ff48 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java @@ -27,7 +27,7 @@ public class TestClusterProperties extends SolrCloudTestCase { private ClusterProperties props; - + @BeforeClass public static void setupCluster() throws Exception { configureCluster(1).configure(); @@ -49,7 +49,7 @@ public void testClusterProperties() throws Exception { CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false").process(cluster.getSolrClient()); assertEquals("false", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "true")); } - + @Test public void testSetPluginClusterProperty() throws Exception { String propertyName = ClusterProperties.EXT_PROPRTTY_PREFIX + "pluginA.propertyA"; @@ -57,7 +57,7 @@ public void testSetPluginClusterProperty() throws Exception { .process(cluster.getSolrClient()); assertEquals("valueA", props.getClusterProperty(propertyName, null)); } - + @Test(expected = SolrException.class) public void testSetInvalidPluginClusterProperty() throws Exception { String propertyName = "pluginA.propertyA"; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java b/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java index ca172e94e2f6..146ad82fb0bd 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java @@ -195,7 +195,7 @@ public void test() throws Exception { } - public static byte[] readFile(String fname) throws IOException { + private byte[] readFile(String fname) throws IOException { byte[] buf = null; try (FileInputStream fis = new FileInputStream(getFile(fname))) { buf = new byte[fis.available()]; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java b/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java new file mode 100644 index 000000000000..1cd70f4fc373 --- /dev/null +++ b/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.cloud; + +import java.lang.invoke.MethodHandles; +import java.util.List; +import java.util.Map; + +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.SolrRequest; +import org.apache.solr.client.solrj.impl.Http2SolrClient; +import org.apache.solr.client.solrj.request.CollectionAdminRequest; +import org.apache.solr.client.solrj.request.QueryRequest; +import org.apache.solr.client.solrj.request.UpdateRequest; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.cloud.Replica; +import org.apache.solr.common.util.Utils; +import org.junit.BeforeClass; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TestQueryingOnDownCollection extends SolrCloudTestCase { + + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final String COLLECTION_NAME = "infected"; + + private static final String USERNAME = "solr"; + private static final String PASSWORD = "solr"; + + @BeforeClass + public static void setupCluster() throws Exception { + configureCluster(3) + .addConfig("conf", configset("cloud-minimal")) + .withSecurityJson(STD_CONF) + .configure(); + } + + @Test + /** + * Assert that requests to "down collection", i.e. a collection which has all replicas in down state + * (but are hosted on nodes that are live), fail fast and throw meaningful exceptions + */ + public void testQueryToDownCollectionShouldFailFast() throws Exception { + + CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 2, 1) + .setBasicAuthCredentials(USERNAME, PASSWORD) + .process(cluster.getSolrClient()); + + // Add some dummy documents + UpdateRequest update = (UpdateRequest) new UpdateRequest().setBasicAuthCredentials(USERNAME, PASSWORD); + for (int i = 0; i < 100; i++) { + update.add("id", Integer.toString(i)); + } + update.commit(cluster.getSolrClient(), COLLECTION_NAME); + + // Bring down replicas but keep nodes up. This could've been done by some combinations of collections API operations; + // however, to make it faster, altering cluster state directly! ;-) + downAllReplicas(); + + // assert all replicas are in down state + List replicas = getCollectionState(COLLECTION_NAME).getReplicas(); + for (Replica replica: replicas){ + assertEquals(replica.getState(), Replica.State.DOWN); + } + + // assert all nodes as active + assertEquals(3, cluster.getSolrClient().getClusterStateProvider().getLiveNodes().size()); + + SolrClient client = cluster.getJettySolrRunner(0).newClient(); + + SolrRequest req = new QueryRequest(new SolrQuery("*:*").setRows(0)).setBasicAuthCredentials(USERNAME, PASSWORD); + + // Without the SOLR-13793 fix, this causes requests to "down collection" to pile up (until the nodes run out + // of serviceable threads and they crash, even for other collections hosted on the nodes). + SolrException error = expectThrows(SolrException.class, + "Request should fail after trying all replica nodes once", + () -> client.request(req, COLLECTION_NAME) + ); + + client.close(); + + assertEquals(error.code(), SolrException.ErrorCode.INVALID_STATE.code); + assertTrue(error.getMessage().contains("No active replicas found for collection: " + COLLECTION_NAME)); + + // run same set of tests on v2 client which uses V2HttpCall + Http2SolrClient v2Client = new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) + .build(); + + error = expectThrows(SolrException.class, + "Request should fail after trying all replica nodes once", + () -> v2Client.request(req, COLLECTION_NAME) + ); + + v2Client.close(); + + assertEquals(error.code(), SolrException.ErrorCode.INVALID_STATE.code); + assertTrue(error.getMessage().contains("No active replicas found for collection: " + COLLECTION_NAME)); + } + + private void downAllReplicas() throws Exception { + byte[] collectionState = cluster.getZkClient().getData("/collections/" + COLLECTION_NAME + "/state.json", + null, null, true); + + Map> infectedState = (Map>) Utils.fromJSON(collectionState); + Map shards = (Map) infectedState.get(COLLECTION_NAME).get("shards"); + for(Map.Entry shard: shards.entrySet()) { + Map replicas = (Map) ((Map) shard.getValue() ).get("replicas"); + for (Map.Entry replica : replicas.entrySet()) { + ((Map) replica.getValue()).put("state", Replica.State.DOWN.toString()); + } + } + + cluster.getZkClient().setData("/collections/" + COLLECTION_NAME + "/state.json", Utils.toJSON(infectedState) + , true); + } + + protected static final String STD_CONF = "{\n" + + " \"authentication\":{\n" + + " \"blockUnknown\": true,\n" + + " \"class\":\"solr.BasicAuthPlugin\",\n" + + " \"credentials\":{\"solr\":\"EEKn7ywYk5jY8vG9TyqlG2jvYuvh1Q7kCCor6Hqm320= 6zkmjMjkMKyJX6/f0VarEWQujju5BzxZXub6WOrEKCw=\"}\n" + + " },\n" + + " \"authorization\":{\n" + + " \"class\":\"solr.RuleBasedAuthorizationPlugin\",\n" + + " \"permissions\":[\n" + + " {\"name\":\"security-edit\", \"role\":\"admin\"},\n" + + " {\"name\":\"collection-admin-edit\", \"role\":\"admin\"},\n" + + " {\"name\":\"core-admin-edit\", \"role\":\"admin\"}\n" + + " ],\n" + + " \"user-role\":{\"solr\":\"admin\"}\n" + + " }\n" + + "}"; + + +} diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java index a5dedc3a8a6a..68898fb4a6aa 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java @@ -24,6 +24,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; @@ -31,7 +32,9 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.request.V2Request; +import org.apache.solr.cloud.MiniSolrCloudCluster; import org.apache.solr.cloud.SolrCloudTestCase; +import org.apache.solr.common.cloud.CollectionStatePredicate; import org.apache.solr.common.cloud.ClusterStateUtil; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; @@ -49,16 +52,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +@org.apache.solr.util.LogLevel("org.apache.solr.cloud.autoscaling=DEBUG;org.apache.solr.cloud.autoscaling.NodeLostTrigger=TRACE;org.apache.solr.cloud.Overseer=DEBUG;org.apache.solr.cloud.overseer=DEBUG") public class AutoAddReplicasIntegrationTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private static final String COLLECTION1 = "testSimple1"; - private static final String COLLECTION2 = "testSimple2"; + protected String getConfigSet() { return "cloud-minimal"; } - + @Before public void setupCluster() throws Exception { configureCluster(3) @@ -82,102 +84,267 @@ public void tearDown() throws Exception { } } + /** + * Test that basic autoAddReplicaLogic kicks in when a node is lost + */ @Test public void testSimple() throws Exception { - JettySolrRunner jetty1 = cluster.getJettySolrRunner(0); - JettySolrRunner jetty2 = cluster.getJettySolrRunner(1); - JettySolrRunner jetty3 = cluster.getJettySolrRunner(2); - CollectionAdminRequest.createCollection(COLLECTION1, "conf", 2, 2) - .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName()) - .setAutoAddReplicas(true) - .setMaxShardsPerNode(2) - .process(cluster.getSolrClient()); + final String COLLECTION = "test_simple"; + final ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); + final JettySolrRunner jetty1 = cluster.getJettySolrRunner(1); + final JettySolrRunner jetty2 = cluster.getJettySolrRunner(2); + log.info("Creating {} using jetty1:{}/{} and jetty2:{}/{}", COLLECTION, + jetty1.getNodeName(), jetty1.getLocalPort(), + jetty2.getNodeName(), jetty2.getLocalPort()); + + CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2) + .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName()) + .setAutoAddReplicas(true) + .setMaxShardsPerNode(2) + .process(cluster.getSolrClient()); - cluster.waitForActiveCollection(COLLECTION1, 2, 4); + cluster.waitForActiveCollection(COLLECTION, 2, 4); - CollectionAdminRequest.createCollection(COLLECTION2, "conf", 2, 2) - .setCreateNodeSet(jetty2.getNodeName()+","+jetty3.getNodeName()) - .setAutoAddReplicas(false) - .setMaxShardsPerNode(2) - .process(cluster.getSolrClient()); + // start the tests + JettySolrRunner lostJetty = random().nextBoolean() ? jetty1 : jetty2; + String lostNodeName = lostJetty.getNodeName(); + List replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION, zkStateReader, lostNodeName); + log.info("Stopping random node: {} / {}", lostNodeName, lostJetty.getLocalPort()); + lostJetty.stop(); - cluster.waitForActiveCollection(COLLECTION2, 2, 4); + cluster.waitForJettyToStop(lostJetty); + waitForNodeLeave(lostNodeName); - // the number of cores in jetty1 (5) will be larger than jetty3 (1) - CollectionAdminRequest.createCollection("testSimple3", "conf", 3, 1) - .setCreateNodeSet(jetty1.getNodeName()) - .setAutoAddReplicas(false) - .setMaxShardsPerNode(3) - .process(cluster.getSolrClient()); + waitForState(COLLECTION + "=(2,4) w/o down replicas", + COLLECTION, clusterShapeNoDownReplicas(2,4), 90, TimeUnit.SECONDS); + + checkSharedFsReplicasMovedCorrectly(replacedHdfsReplicas, zkStateReader, COLLECTION); + + log.info("Re-starting (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort()); + lostJetty.start(); + + waitForNodeLive(lostJetty); + + assertTrue("Timeout waiting for all live and active", + ClusterStateUtil.waitForAllActiveAndLiveReplicas(zkStateReader, 90000)); + + } - cluster.waitForActiveCollection("testSimple3", 3, 3); + /** + * Test that basic autoAddReplicaLogic logic is not used if the cluster prop for it is disabled + * (even if sys prop is set after collection is created) + */ + @Test + public void testClusterPropOverridesCollecitonProp() throws Exception { + final String COLLECTION = "test_clusterprop"; + final ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); + final JettySolrRunner jetty1 = cluster.getJettySolrRunner(1); + final JettySolrRunner jetty2 = cluster.getJettySolrRunner(2); + + log.info("Creating {} using jetty1:{}/{} and jetty2:{}/{}", COLLECTION, + jetty1.getNodeName(), jetty1.getLocalPort(), + jetty2.getNodeName(), jetty2.getLocalPort()); + + CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2) + .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName()) + .setAutoAddReplicas(true) + .setMaxShardsPerNode(2) + .process(cluster.getSolrClient()); - ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); + cluster.waitForActiveCollection(COLLECTION, 2, 4); - // start the tests - JettySolrRunner lostJetty = random().nextBoolean() ? cluster.getJettySolrRunner(0) : cluster.getJettySolrRunner(1); + // check cluster property is considered + disableAutoAddReplicasInCluster(); + + JettySolrRunner lostJetty = random().nextBoolean() ? jetty1 : jetty2; String lostNodeName = lostJetty.getNodeName(); - List replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION1, zkStateReader, lostNodeName); + List replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION, zkStateReader, lostNodeName); + + log.info("Stopping random node: {} / {}", lostNodeName, lostJetty.getLocalPort()); lostJetty.stop(); cluster.waitForJettyToStop(lostJetty); waitForNodeLeave(lostNodeName); - // ensure that 2 shards have 2 active replicas and only 4 replicas in total - // i.e. old replicas have been deleted. - // todo remove the condition for total replicas == 4 after SOLR-11591 is fixed - waitForState("Waiting for collection " + COLLECTION1, COLLECTION1, (liveNodes, collectionState) -> clusterShape(2, 4).matches(liveNodes, collectionState) - && collectionState.getReplicas().size() == 4, 90, TimeUnit.SECONDS); - checkSharedFsReplicasMovedCorrectly(replacedHdfsReplicas, zkStateReader, COLLECTION1); + waitForState(COLLECTION + "=(2,2)", COLLECTION, + clusterShape(2, 2), 90, TimeUnit.SECONDS); + + + log.info("Re-starting (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort()); lostJetty.start(); - cluster.waitForAllNodes(30); + waitForNodeLive(lostJetty); + + assertTrue("Timeout waiting for all live and active", + ClusterStateUtil.waitForAllActiveAndLiveReplicas(zkStateReader, 90000)); - assertTrue("Timeout waiting for all live and active", ClusterStateUtil.waitForAllActiveAndLiveReplicas(cluster.getSolrClient().getZkStateReader(), 90000)); + waitForState(COLLECTION + "=(2,4) w/o down replicas", + COLLECTION, clusterShapeNoDownReplicas(2,4), 90, TimeUnit.SECONDS); - // check cluster property is considered - disableAutoAddReplicasInCluster(); - lostNodeName = jetty3.getNodeName(); - jetty3.stop(); + } + + /** + * Test that we can modify a collection after creation to add autoAddReplicas. + */ + @Test + public void testAddCollectionPropAfterCreation() throws Exception { + final String COLLECTION = "test_addprop"; + final ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); + final JettySolrRunner jetty1 = cluster.getJettySolrRunner(1); + final JettySolrRunner jetty2 = cluster.getJettySolrRunner(2); + + log.info("Creating {} using jetty1:{}/{} and jetty2:{}/{}", COLLECTION, + jetty1.getNodeName(), jetty1.getLocalPort(), + jetty2.getNodeName(), jetty2.getLocalPort()); + + CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2) + .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName()) + .setAutoAddReplicas(false) // NOTE: false + .setMaxShardsPerNode(2) + .process(cluster.getSolrClient()); - cluster.waitForJettyToStop(jetty3); + cluster.waitForActiveCollection(COLLECTION, 2, 4); + + log.info("Modifying {} to use autoAddReplicas", COLLECTION); + new CollectionAdminRequest.AsyncCollectionAdminRequest(CollectionParams.CollectionAction.MODIFYCOLLECTION) { + @Override + public SolrParams getParams() { + ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); + params.set("collection", COLLECTION); + params.set("autoAddReplicas", true); + return params; + } + }.process(cluster.getSolrClient()); + + JettySolrRunner lostJetty = random().nextBoolean() ? jetty1 : jetty2; + String lostNodeName = lostJetty.getNodeName(); + List replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION, zkStateReader, lostNodeName); + + log.info("Stopping random node: {} / {}", lostNodeName, lostJetty.getLocalPort()); + lostJetty.stop(); + + cluster.waitForJettyToStop(lostJetty); waitForNodeLeave(lostNodeName); + + waitForState(COLLECTION + "=(2,4) w/o down replicas", + COLLECTION, clusterShapeNoDownReplicas(2,4), 90, TimeUnit.SECONDS); + checkSharedFsReplicasMovedCorrectly(replacedHdfsReplicas, zkStateReader, COLLECTION); + + log.info("Re-starting (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort()); + lostJetty.start(); + + waitForNodeLive(lostJetty); + + assertTrue("Timeout waiting for all live and active", + ClusterStateUtil.waitForAllActiveAndLiveReplicas(zkStateReader, 90000)); + } + + /** + * Test a specific sequence of problematic events: + *
    + *
  • create a collection with autoAddReplicas=false
  • + *
  • stop a nodeX in use by the collection
  • + *
  • re-start nodeX
  • + *
  • set autoAddReplicas=true
  • + *
  • re-stop nodeX
  • + *
+ */ + @Test + @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13811") + public void testRapidStopStartStopWithPropChange() throws Exception { + + // This is the collection we'll be focused on in our testing... + final String COLLECTION = "test_stoptwice"; + // This is a collection we'll use as a "marker" to ensure we "wait" for the + // autoAddReplicas logic (via NodeLostTrigger) to kick in at least once before proceeding... + final String ALT_COLLECTION = "test_dummy"; - waitForState("Waiting for collection " + COLLECTION1, COLLECTION1, clusterShape(2, 2)); - jetty3.start(); - waitForState("Waiting for collection " + COLLECTION1, COLLECTION1, clusterShape(2, 4)); - waitForState("Waiting for collection " + COLLECTION2, COLLECTION2, clusterShape(2, 4)); - enableAutoAddReplicasInCluster(); + final ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); + final JettySolrRunner jetty1 = cluster.getJettySolrRunner(1); + final JettySolrRunner jetty2 = cluster.getJettySolrRunner(2); + log.info("Creating {} using jetty1:{}/{} and jetty2:{}/{}", COLLECTION, + jetty1.getNodeName(), jetty1.getLocalPort(), + jetty2.getNodeName(), jetty2.getLocalPort()); + + CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2) + .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName()) + .setAutoAddReplicas(false) // NOTE: false + .setMaxShardsPerNode(2) + .process(cluster.getSolrClient()); + + log.info("Creating {} using jetty1:{}/{} and jetty2:{}/{}", ALT_COLLECTION, + jetty1.getNodeName(), jetty1.getLocalPort(), + jetty2.getNodeName(), jetty2.getLocalPort()); + + CollectionAdminRequest.createCollection(ALT_COLLECTION, "conf", 2, 2) + .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName()) + .setAutoAddReplicas(true) // NOTE: true + .setMaxShardsPerNode(2) + .process(cluster.getSolrClient()); + + cluster.waitForActiveCollection(COLLECTION, 2, 4); + cluster.waitForActiveCollection(ALT_COLLECTION, 2, 4); - // test for multiple collections + JettySolrRunner lostJetty = random().nextBoolean() ? jetty1 : jetty2; + String lostNodeName = lostJetty.getNodeName(); + List replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION, zkStateReader, lostNodeName); + + log.info("Stopping random node: {} / {}", lostNodeName, lostJetty.getLocalPort()); + lostJetty.stop(); + + cluster.waitForJettyToStop(lostJetty); + waitForNodeLeave(lostNodeName); + + // ensure that our marker collection indicates that the autoAddReplicas logic + // has detected the down node and done some processing + waitForState(ALT_COLLECTION + "=(2,4) w/o down replicas", + ALT_COLLECTION, clusterShapeNoDownReplicas(2,4), 90, TimeUnit.SECONDS); + + waitForState(COLLECTION + "=(2,2)", COLLECTION, clusterShape(2, 2)); + + log.info("Re-starting (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort()); + lostJetty.start(); + // save time, don't bother waiting for lostJetty to start until after updating collection prop... + + log.info("Modifying {} to use autoAddReplicas", COLLECTION); new CollectionAdminRequest.AsyncCollectionAdminRequest(CollectionParams.CollectionAction.MODIFYCOLLECTION) { @Override public SolrParams getParams() { ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); - params.set("collection", COLLECTION2); + params.set("collection", COLLECTION); params.set("autoAddReplicas", true); return params; } }.process(cluster.getSolrClient()); - lostNodeName = jetty2.getNodeName(); - replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION2, zkStateReader, lostNodeName); - - jetty2.stop(); - - cluster.waitForJettyToStop(jetty2); + // make sure lostJetty is fully up before stopping again... + waitForNodeLive(lostJetty); + + log.info("Re-Stopping (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort()); + lostJetty.stop(); + cluster.waitForJettyToStop(lostJetty); waitForNodeLeave(lostNodeName); - waitForState("Waiting for collection " + COLLECTION1, COLLECTION1, clusterShape(2, 4), 45, TimeUnit.SECONDS); - waitForState("Waiting for collection " + COLLECTION2, COLLECTION2, clusterShape(2, 4), 45, TimeUnit.SECONDS); - checkSharedFsReplicasMovedCorrectly(replacedHdfsReplicas, zkStateReader, COLLECTION2); - // overseer failover test.. + // TODO: this is the problematic situation... + // wether or not NodeLostTrigger noticed that lostJetty was re-started and shutdown *again* + // and that the new auoAddReplicas=true since the last time lostJetty was shutdown is respected + waitForState(COLLECTION + "=(2,4) w/o down replicas", + COLLECTION, clusterShapeNoDownReplicas(2,4), 90, TimeUnit.SECONDS); + checkSharedFsReplicasMovedCorrectly(replacedHdfsReplicas, zkStateReader, COLLECTION); + + log.info("Re-Re-starting (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort()); + lostJetty.start(); + + waitForNodeLive(lostJetty); + + assertTrue("Timeout waiting for all live and active", + ClusterStateUtil.waitForAllActiveAndLiveReplicas(zkStateReader, 90000)); } - + private void disableAutoAddReplicasInCluster() throws SolrServerException, IOException { Map m = makeMap( "action", CollectionParams.CollectionAction.CLUSTERPROP.toLower(), @@ -225,13 +392,44 @@ private List getReplacedSharedFsReplicas(String collection, ZkStateRead return replacedHdfsReplicas; } - private void waitForNodeLeave(String lostNodeName) throws InterruptedException { + /** + * {@link MiniSolrCloudCluster#waitForNode} Doesn't check isRunning first, and we don't want to + * use {@link MiniSolrCloudCluster#waitForAllNodes} because we don't want to waste cycles checking + * nodes we aren't messing with + */ + private void waitForNodeLive(final JettySolrRunner jetty) + throws InterruptedException, TimeoutException, IOException { + log.info("waitForNodeLive: {}/{}", jetty.getNodeName(), jetty.getLocalPort()); + + TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + while(!timeout.hasTimedOut()) { + if (jetty.isRunning()) { + break; + } + try { + Thread.sleep(100); + } catch (InterruptedException e) { + // ignore + } + } + if (timeout.hasTimedOut()) { + throw new TimeoutException("Waiting for Jetty to stop timed out"); + } + cluster.waitForNode(jetty, 30); + } + + private void waitForNodeLeave(String lostNodeName) throws InterruptedException, TimeoutException { log.info("waitForNodeLeave: {}", lostNodeName); ZkStateReader reader = cluster.getSolrClient().getZkStateReader(); - TimeOut timeOut = new TimeOut(20, TimeUnit.SECONDS, TimeSource.NANO_TIME); - while (reader.getClusterState().getLiveNodes().contains(lostNodeName)) { - Thread.sleep(100); - if (timeOut.hasTimedOut()) fail("Wait for " + lostNodeName + " to leave failed!"); - } + reader.waitForLiveNodes(30, TimeUnit.SECONDS, (o, n) -> !n.contains(lostNodeName)); } + + + private static CollectionStatePredicate clusterShapeNoDownReplicas(final int expectedShards, + final int expectedReplicas) { + return (liveNodes, collectionState) + -> (clusterShape(expectedShards, expectedReplicas).matches(liveNodes, collectionState) + && collectionState.getReplicas().size() == expectedReplicas); + } + } diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ExecutePlanActionTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ExecutePlanActionTest.java index d6e44ca05f7b..d286faf57b2e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ExecutePlanActionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ExecutePlanActionTest.java @@ -22,6 +22,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; @@ -33,6 +36,7 @@ import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.cloud.CloudTestUtils.AutoScalingRequest; +import org.apache.solr.cloud.CloudUtil; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; @@ -44,6 +48,7 @@ import org.apache.solr.common.util.Utils; import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.util.LogLevel; +import org.apache.solr.util.TestInjection; import org.apache.zookeeper.data.Stat; import org.junit.After; import org.junit.Before; @@ -66,6 +71,26 @@ public class ExecutePlanActionTest extends SolrCloudTestCase { private SolrResourceLoader loader; private SolrCloudManager cloudManager; + public static class StartAction extends TriggerActionBase { + + @Override + public void process(TriggerEvent event, ActionContext context) throws Exception { + startedProcessing.countDown(); + } + } + + private static CountDownLatch startedProcessing = new CountDownLatch(1); + + public static class FinishAction extends TriggerActionBase { + + @Override + public void process(TriggerEvent event, ActionContext context) throws Exception { + finishedProcessing.countDown(); + } + } + + private static CountDownLatch finishedProcessing = new CountDownLatch(1); + @BeforeClass public static void setupCluster() throws Exception { @@ -84,6 +109,9 @@ public void setUp() throws Exception { cloudManager = cluster.getJettySolrRunner(0).getCoreContainer().getZkController().getSolrCloudManager(); + + finishedProcessing = new CountDownLatch(1); + startedProcessing = new CountDownLatch(1); } @@ -91,6 +119,7 @@ public void setUp() throws Exception { public void tearDown() throws Exception { shutdownCluster(); super.tearDown(); + TestInjection.reset(); } @Test @@ -233,4 +262,119 @@ public void testIntegration() throws Exception { assertNotNull(replicasOnSurvivor); assertEquals(docCollection.toString(), 2, replicasOnSurvivor.size()); } + + @Test + public void testTaskTimeout() throws Exception { + int DELAY = 2000; + boolean taskTimeoutFail = random().nextBoolean(); + TestInjection.delayInExecutePlanAction = DELAY; + CloudSolrClient solrClient = cluster.getSolrClient(); + String triggerName = "node_lost_trigger2"; + + String setTriggerCommand = "{" + + "'set-trigger' : {" + + "'name' : '" + triggerName + "'," + + "'event' : 'nodeLost'," + + "'waitFor' : '1s'," + + "'enabled' : true," + + "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction'}," + + "{'name':'execute_plan','class':'solr.ExecutePlanAction', 'taskTimeoutSeconds' : '1','taskTimeoutFail':'" + taskTimeoutFail + "'}," + + "{'name':'finish','class':'" + FinishAction.class.getName() + "'}]" + + "}}"; + SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); + NamedList response = solrClient.request(req); + assertEquals(response.get("result").toString(), "success"); + + String collectionName = "testTaskTimeout"; + CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, + "conf", 1, 2); + create.setMaxShardsPerNode(1); + create.process(solrClient); + + cluster.waitForActiveCollection(collectionName, 1, 2); + + waitForState("Timed out waiting for replicas of new collection to be active", + collectionName, clusterShape(1, 2)); + + JettySolrRunner sourceNode = cluster.getRandomJetty(random()); + + for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) { + JettySolrRunner runner = cluster.getJettySolrRunner(i); + if (runner == sourceNode) { + JettySolrRunner j = cluster.stopJettySolrRunner(i); + cluster.waitForJettyToStop(j); + } + } + + boolean await = finishedProcessing.await(DELAY * 5, TimeUnit.MILLISECONDS); + if (taskTimeoutFail) { + assertFalse("finished processing event but should fail", await); + } else { + assertTrue("did not finish processing event in time", await); + } + String path = ZkStateReader.SOLR_AUTOSCALING_TRIGGER_STATE_PATH + "/" + triggerName + "/execute_plan"; + assertTrue(path + " does not exist", zkClient().exists(path, true)); + List requests = zkClient().getChildren(path, null, true); + assertFalse("some requests should be still present", requests.isEmpty()); + + // in either case the task will complete and move the replica as needed + waitForState("Timed out waiting for replicas of collection to be 2 again", + collectionName, clusterShape(1, 2)); + } + + @Test + public void testTaskFail() throws Exception { + TestInjection.failInExecutePlanAction = true; + CloudSolrClient solrClient = cluster.getSolrClient(); + String triggerName = "node_lost_trigger3"; + + String setTriggerCommand = "{" + + "'set-trigger' : {" + + "'name' : '" + triggerName + "'," + + "'event' : 'nodeLost'," + + "'waitFor' : '1s'," + + "'enabled' : true," + + "'actions' : [{'name':'start', 'class' : '" + StartAction.class.getName() + "'}," + + "{'name':'compute_plan','class':'solr.ComputePlanAction'}," + + "{'name':'execute_plan','class':'solr.ExecutePlanAction'}," + + "{'name':'finish','class':'" + FinishAction.class.getName() + "'}]" + + "}}"; + SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand); + NamedList response = solrClient.request(req); + assertEquals(response.get("result").toString(), "success"); + + String collectionName = "testTaskFail"; + CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, + "conf", 1, 2); + create.setMaxShardsPerNode(1); + create.process(solrClient); + + cluster.waitForActiveCollection(collectionName, 1, 2); + + waitForState("Timed out waiting for replicas of new collection to be active", + collectionName, clusterShape(1, 2)); + + // don't stop the jetty that runs our SolrCloudManager + JettySolrRunner runner = cluster.stopJettySolrRunner(1); + cluster.waitForJettyToStop(runner); + + boolean await = startedProcessing.await(10, TimeUnit.SECONDS); + assertTrue("did not start processing event in time", await); + await = finishedProcessing.await(2, TimeUnit.SECONDS); + assertFalse("finished processing event but should fail", await); + + String path = ZkStateReader.SOLR_AUTOSCALING_TRIGGER_STATE_PATH + "/" + triggerName + "/execute_plan"; + assertTrue(path + " does not exist", zkClient().exists(path, true)); + List requests = zkClient().getChildren(path, null, true); + assertTrue("there should be no requests pending but got " + requests, requests.isEmpty()); + + // the task never completed - we actually lost a replica + try { + CloudUtil.waitForState(cloudManager, collectionName, 5, TimeUnit.SECONDS, + CloudUtil.clusterShape(1, 2)); + fail("completed a task that should have failed"); + } catch (TimeoutException te) { + // expected + } + } } diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java index c775dcafe53b..849c5c81f9ba 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java @@ -20,12 +20,14 @@ import java.lang.invoke.MethodHandles; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; import org.apache.solr.client.solrj.SolrRequest; @@ -41,6 +43,7 @@ import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.Utils; import org.apache.solr.util.LogLevel; import org.apache.solr.util.TimeOut; import org.apache.zookeeper.KeeperException; @@ -50,6 +53,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACTIVE; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_INACTIVE; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE; + @LogLevel("org.apache.solr.cloud.autoscaling=DEBUG;org.apache.solr.client.solrj.cloud.autoscaling=DEBUG") public class NodeMarkersRegistrationTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -81,7 +88,7 @@ private static CountDownLatch getTriggerFiredLatch() { return triggerFiredLatch; } - @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13376") + //@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13376") @Test public void testNodeMarkersRegistration() throws Exception { triggerFiredLatch = new CountDownLatch(1); @@ -135,10 +142,16 @@ public void testNodeMarkersRegistration() throws Exception { String pathLost = ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + overseerLeader; TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + AtomicBoolean markerInactive = new AtomicBoolean(); try { - timeout.waitFor("zk path to go away", () -> { + timeout.waitFor("nodeLost marker to get inactive", () -> { try { - return !zkClient().exists(pathLost, true); + if (!zkClient().exists(pathLost, true)) { + throw new RuntimeException("marker " + pathLost + " should exist!"); + } + Map markerData = Utils.getJson(zkClient(), pathLost, true); + markerInactive.set(markerData.getOrDefault(MARKER_STATE, MARKER_ACTIVE).equals(MARKER_INACTIVE)); + return markerInactive.get(); } catch (KeeperException e) { throw new RuntimeException(e); } catch (InterruptedException e) { @@ -149,8 +162,8 @@ public void testNodeMarkersRegistration() throws Exception { // okay } - // verify that a znode does NOT exist - the new overseer cleaned up existing nodeLost markers - assertFalse("Path " + pathLost + " exists", zkClient().exists(pathLost, true)); + // verify that the marker is inactive - the new overseer should deactivate markers once they are processed + assertTrue("Marker " + pathLost + " still active!", markerInactive.get()); listener.reset(); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/FakeDocIterator.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/FakeDocIterator.java new file mode 100644 index 000000000000..fbe66aca118f --- /dev/null +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/FakeDocIterator.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.cloud.autoscaling.sim; + +import java.util.Iterator; + +import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.SolrInputField; + +/** + * Lightweight generator of fake documents + * NOTE: this iterator only ever returns the same document N times, which works ok + * for our "bulk index update" simulation. Obviously don't use this for real indexing. + */ +public class FakeDocIterator implements Iterator { + final SolrInputDocument doc = new SolrInputDocument(); + final SolrInputField idField = new SolrInputField("id"); + + final long start, count; + + long current, max; + + FakeDocIterator(long start, long count) { + this.start = start; + this.count = count; + current = start; + max = start + count; + doc.put("id", idField); + idField.setValue("foo"); + } + + @Override + public boolean hasNext() { + return current < max; + } + + @Override + public SolrInputDocument next() { + current++; + return doc; + } +} diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java index 654c29f9d1a5..3ad4f72af3ff 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java @@ -17,7 +17,6 @@ package org.apache.solr.cloud.autoscaling.sim; import java.lang.invoke.MethodHandles; -import java.util.Iterator; import java.util.Locale; import java.util.concurrent.TimeUnit; @@ -30,8 +29,6 @@ import org.apache.solr.cloud.CloudUtil; import org.apache.solr.cloud.autoscaling.ExecutePlanAction; import org.apache.solr.common.SolrDocumentList; -import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.common.SolrInputField; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.TimeSource; @@ -144,36 +141,4 @@ private void addDocs(String collection, long start, long count) throws Exception solrClient.request(ureq); } - // lightweight generator of fake documents - // NOTE: this iterator only ever returns the same document, which works ok - // for our "index update" simulation. Obviously don't use this for real indexing. - private static class FakeDocIterator implements Iterator { - final SolrInputDocument doc = new SolrInputDocument(); - final SolrInputField idField = new SolrInputField("id"); - - final long start, count; - - long current, max; - - FakeDocIterator(long start, long count) { - this.start = start; - this.count = count; - current = start; - max = start + count; - doc.put("id", idField); - idField.setValue("foo"); - } - - @Override - public boolean hasNext() { - return current < max; - } - - @Override - public SolrInputDocument next() { - current++; - return doc; - } - } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimLargeCluster.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimLargeCluster.java index 1cf4f0bffea7..adf2e671a32d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimLargeCluster.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimLargeCluster.java @@ -19,7 +19,10 @@ import java.lang.invoke.MethodHandles; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; @@ -29,6 +32,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; import org.apache.commons.math3.stat.descriptive.SummaryStatistics; import org.apache.lucene.util.TestUtil; @@ -36,7 +40,9 @@ import org.apache.solr.client.solrj.cloud.autoscaling.Suggester; import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventProcessorStage; import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventType; +import org.apache.solr.client.solrj.cloud.autoscaling.Variable; import org.apache.solr.client.solrj.request.CollectionAdminRequest; +import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.cloud.CloudTestUtils; import org.apache.solr.cloud.CloudUtil; import org.apache.solr.cloud.autoscaling.ActionContext; @@ -48,10 +54,13 @@ import org.apache.solr.cloud.autoscaling.TriggerEvent; import org.apache.solr.cloud.autoscaling.TriggerListenerBase; import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.params.CollectionParams; import org.apache.solr.common.util.Pair; import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.Utils; import org.apache.solr.util.LogLevel; import org.apache.solr.util.TimeOut; import org.junit.After; @@ -88,6 +97,9 @@ public void tearDownTest() throws Exception { public void setupTest() throws Exception { configureCluster(NUM_NODES, TimeSource.get("simTime:" + SPEED)); + // disable metrics history collection + cluster.disableMetricsHistory(); + // disable .scheduled_maintenance (once it exists) CloudTestUtils.waitForTriggerToBeScheduled(cluster, ".scheduled_maintenance"); CloudTestUtils.suspendTrigger(cluster, ".scheduled_maintenance"); @@ -752,4 +764,79 @@ public void testSearchRate() throws Exception { assertEquals("shard1", hint.second()); }); } + + @Test + public void testFreediskTracking() throws Exception { + int NUM_DOCS = 100000; + String collectionName = "testFreeDisk"; + SolrClient solrClient = cluster.simGetSolrClient(); + CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, + "conf",2, 2); + create.process(solrClient); + + CloudUtil.waitForState(cluster, "Timed out waiting for replicas of new collection to be active", + collectionName, CloudUtil.clusterShape(2, 2, false, true)); + ClusterState clusterState = cluster.getClusterStateProvider().getClusterState(); + DocCollection coll = clusterState.getCollection(collectionName); + Set nodes = coll.getReplicas().stream() + .map(r -> r.getNodeName()) + .collect(Collectors.toSet()); + Map initialFreedisk = getFreeDiskPerNode(nodes); + + // test small updates + for (int i = 0; i < NUM_DOCS; i++) { + SolrInputDocument doc = new SolrInputDocument("id", "id-" + i); + solrClient.add(collectionName, doc); + } + Map updatedFreedisk = getFreeDiskPerNode(nodes); + double delta = getDeltaFreeDiskBytes(initialFreedisk, updatedFreedisk); + // 2 replicas - twice as much delta + assertEquals(SimClusterStateProvider.DEFAULT_DOC_SIZE_BYTES * NUM_DOCS * 2, delta, delta * 0.1); + + // test small deletes - delete half of docs + for (int i = 0; i < NUM_DOCS / 2; i++) { + solrClient.deleteById(collectionName, "id-" + i); + } + Map updatedFreedisk1 = getFreeDiskPerNode(nodes); + double delta1 = getDeltaFreeDiskBytes(initialFreedisk, updatedFreedisk1); + // 2 replicas but half the docs + assertEquals(SimClusterStateProvider.DEFAULT_DOC_SIZE_BYTES * NUM_DOCS * 2 / 2, delta1, delta1 * 0.1); + + // test bulk delete + solrClient.deleteByQuery(collectionName, "*:*"); + Map updatedFreedisk2 = getFreeDiskPerNode(nodes); + double delta2 = getDeltaFreeDiskBytes(initialFreedisk, updatedFreedisk2); + // 0 docs - initial freedisk + log.info(cluster.dumpClusterState(true)); + assertEquals(0.0, delta2, delta2 * 0.1); + + // test bulk update + UpdateRequest ureq = new UpdateRequest(); + ureq.setDocIterator(new FakeDocIterator(0, NUM_DOCS)); + ureq.process(solrClient, collectionName); + Map updatedFreedisk3 = getFreeDiskPerNode(nodes); + double delta3 = getDeltaFreeDiskBytes(initialFreedisk, updatedFreedisk3); + assertEquals(SimClusterStateProvider.DEFAULT_DOC_SIZE_BYTES * NUM_DOCS * 2, delta3, delta3 * 0.1); + } + + private double getDeltaFreeDiskBytes(Map initial, Map updated) { + double deltaGB = 0; + for (String node : initial.keySet()) { + double before = initial.get(node).doubleValue(); + double after = updated.get(node).doubleValue(); + assertTrue("freedisk after=" + after + " not smaller than before=" + before, after <= before); + deltaGB += before - after; + } + return deltaGB * 1024.0 * 1024.0 * 1024.0; + } + + private Map getFreeDiskPerNode(Collection nodes) throws Exception { + Map freediskPerNode = new HashMap<>(); + for (String node : nodes) { + Map values = cluster.getNodeStateProvider().getNodeValues(node, Arrays.asList(Variable.Type.FREEDISK.tagName)); + freediskPerNode.put(node, (Number) values.get(Variable.Type.FREEDISK.tagName)); + } + log.info("- freeDiskPerNode: " + Utils.toJSONString(freediskPerNode)); + return freediskPerNode; + } } diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java index ea645c6a803f..1258c6d2fe9d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java @@ -40,6 +40,7 @@ import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig; import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo; import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventProcessorStage; +import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventType; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.cloud.CloudTestUtils; import org.apache.solr.cloud.CloudUtil; @@ -62,6 +63,7 @@ import org.apache.solr.common.cloud.LiveNodesListener; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.Utils; import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.util.LogLevel; import org.apache.solr.util.TimeOut; @@ -74,6 +76,10 @@ import com.google.common.util.concurrent.AtomicDouble; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACTIVE; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_INACTIVE; +import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE; + /** * An end-to-end integration test for triggers */ @@ -864,10 +870,6 @@ private TestLiveNodesListener registerLiveNodesListener() { public static class TestEventMarkerAction extends TriggerActionBase { - public TestEventMarkerAction() { - actionConstructorCalled.countDown(); - } - @Override public void process(TriggerEvent event, ActionContext actionContext) { boolean locked = lock.tryLock(); @@ -887,19 +889,29 @@ public void process(TriggerEvent event, ActionContext actionContext) { } @Override - public void configure(SolrResourceLoader loader, SolrCloudManager cloudManager, Map args) throws TriggerValidationException { + public void init() throws Exception { log.info("TestEventMarkerAction init"); - actionInitCalled.countDown(); - super.configure(loader, cloudManager, args); + super.init(); + } + } + + public static class AssertingListener extends TriggerListenerBase { + @Override + public void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String actionName, ActionContext context, Throwable error, String message) throws Exception { + if (!Thread.currentThread().getName().startsWith("ScheduledTrigger")) { + // for future safety + throw new IllegalThreadStateException("AssertingListener should have been invoked by a thread from the scheduled trigger thread pool"); + } + log.debug(" --- listener fired for event: {}, stage: {}", event, stage); + listenerEventLatch.await(); + log.debug(" --- listener wait complete for event: {}, stage: {}", event, stage); } } @Test public void testNodeMarkersRegistration() throws Exception { - // for this test we want to create two triggers so we must assert that the actions were created twice - actionInitCalled = new CountDownLatch(2); - // similarly we want both triggers to fire - triggerFiredLatch = new CountDownLatch(2); + triggerFiredLatch = new CountDownLatch(1); + listenerEventLatch = new CountDownLatch(1); TestLiveNodesListener listener = registerLiveNodesListener(); SolrClient solrClient = cluster.simGetSolrClient(); @@ -912,7 +924,7 @@ public void testNodeMarkersRegistration() throws Exception { assertTrue("cluster onChange listener didn't execute even after await()ing an excessive amount of time", listener.onChangeLatch.await(60, TimeUnit.SECONDS)); assertEquals(1, listener.addedNodes.size()); - assertEquals(node, listener.addedNodes.iterator().next()); + assertTrue(listener.addedNodes.toString(), listener.addedNodes.contains(node)); // verify that a znode doesn't exist (no trigger) String pathAdded = ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + node; assertFalse("Path " + pathAdded + " was created but there are no nodeAdded triggers", @@ -931,22 +943,28 @@ public void testNodeMarkersRegistration() throws Exception { assertEquals(0, listener.addedNodes.size()); // wait until the new overseer is up cluster.getTimeSource().sleep(5000); - // verify that a znode does NOT exist - there's no nodeLost trigger, - // so the new overseer cleaned up existing nodeLost markers - + String pathLost = ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + overseerLeader; TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeout.waitFor("Path " + pathLost + " exists", () -> { + AtomicBoolean markerInactive = new AtomicBoolean(); + timeout.waitFor("nodeLost marker to get inactive", () -> { try { - return !cluster.getDistribStateManager().hasData(pathLost); + if (!cluster.getDistribStateManager().hasData(pathLost)) { + throw new RuntimeException("marker " + pathLost + " should exist!"); + } + Map markerData = Utils.getJson(cluster.getDistribStateManager(), pathLost); + markerInactive.set(markerData.getOrDefault(MARKER_STATE, MARKER_ACTIVE).equals(MARKER_INACTIVE)); + return markerInactive.get(); + } catch (IOException | KeeperException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); } }); - assertFalse("Path " + pathLost + " exists", cluster.getDistribStateManager().hasData(pathLost)); + // verify that the marker is inactive - the new overseer should deactivate markers once they are processed + assertTrue("Marker " + pathLost + " still active!", markerInactive.get()); listener.reset(); @@ -956,7 +974,7 @@ public void testNodeMarkersRegistration() throws Exception { assertAutoScalingRequest ("{" + "'set-trigger' : {" + - "'name' : 'node_added_trigger'," + + "'name' : 'node_added_triggerMR'," + "'event' : 'nodeAdded'," + "'waitFor' : '1s'," + "'enabled' : true," + @@ -966,14 +984,25 @@ public void testNodeMarkersRegistration() throws Exception { assertAutoScalingRequest ("{" + "'set-trigger' : {" + - "'name' : 'node_lost_trigger'," + + "'name' : 'node_lost_triggerMR'," + "'event' : 'nodeLost'," + "'waitFor' : '1s'," + "'enabled' : true," + "'actions' : [{'name':'test','class':'" + TestEventMarkerAction.class.getName() + "'}]" + "}}"); + assertAutoScalingRequest( + "{\n" + + " \"set-listener\" : {\n" + + " \"name\" : \"listener_node_added_triggerMR\",\n" + + " \"trigger\" : \"node_added_triggerMR\",\n" + + " \"stage\" : \"STARTED\",\n" + + " \"class\" : \"" + AssertingListener.class.getName() + "\"\n" + + " }\n" + + "}" + ); assertAutoscalingUpdateComplete(); + overseerLeader = cluster.getSimClusterStateProvider().simGetOverseerLeader(); // create another node @@ -987,41 +1016,51 @@ public void testNodeMarkersRegistration() throws Exception { pathAdded = ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + node1; assertTrue("Path " + pathAdded + " wasn't created", cluster.getDistribStateManager().hasData(pathAdded)); + listenerEventLatch.countDown(); // let the trigger thread continue + + assertTrue(triggerFiredLatch.await(10, TimeUnit.SECONDS)); + + // kill this node listener.reset(); events.clear(); - // one nodeAdded (not cleared yet) and one nodeLost - triggerFiredLatch = new CountDownLatch(2); + triggerFiredLatch = new CountDownLatch(1); + + cluster.simRemoveNode(node1, true); + if (!listener.onChangeLatch.await(10, TimeUnit.SECONDS)) { + fail("onChange listener didn't execute on cluster change"); + } + assertEquals(1, listener.lostNodes.size()); + assertEquals(node1, listener.lostNodes.iterator().next()); + // verify that a znode exists + String pathLost2 = ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + node1; + assertTrue("Path " + pathLost2 + " wasn't created", cluster.getDistribStateManager().hasData(pathLost2)); + + listenerEventLatch.countDown(); // let the trigger thread continue + + assertTrue(triggerFiredLatch.await(10, TimeUnit.SECONDS)); + + // triggers don't remove markers + assertTrue("Path " + pathLost2 + " should still exist", cluster.getDistribStateManager().hasData(pathLost2)); + + listener.reset(); + events.clear(); + triggerFiredLatch = new CountDownLatch(1); // kill overseer again log.info("====== KILL OVERSEER 2"); - cluster.simRestartOverseer(overseerLeader); - assertTrue("cluster onChange listener didn't execute even after await()ing an excessive amount of time", - listener.onChangeLatch.await(60, TimeUnit.SECONDS)); + cluster.simRemoveNode(overseerLeader, true); + if (!listener.onChangeLatch.await(10, TimeUnit.SECONDS)) { + fail("onChange listener didn't execute on cluster change"); + } - assertAutoscalingUpdateComplete(); - assertTrue("trigger did not fire event after await()ing an excessive amount of time", - triggerFiredLatch.await(60, TimeUnit.SECONDS)); - assertEquals(2, events.size()); - TriggerEvent nodeAdded = null; - TriggerEvent nodeLost = null; - for (TriggerEvent ev : events) { - switch (ev.getEventType()) { - case NODEADDED: - nodeAdded = ev; - break; - case NODELOST: - nodeLost = ev; - break; - default: - fail("unexpected event type: " + ev); - } + if (!triggerFiredLatch.await(20, TimeUnit.SECONDS)) { + fail("Trigger should have fired by now"); } - assertNotNull("expected nodeAdded event", nodeAdded); - assertNotNull("expected nodeLost event", nodeLost); - List nodeNames = (List)nodeLost.getProperty(TriggerEvent.NODE_NAMES); + assertEquals(1, events.size()); + TriggerEvent ev = events.iterator().next(); + List nodeNames = (List) ev.getProperty(TriggerEvent.NODE_NAMES); assertTrue(nodeNames.contains(overseerLeader)); - nodeNames = (List)nodeAdded.getProperty(TriggerEvent.NODE_NAMES); - assertTrue(nodeNames.contains(node1)); + assertEquals(TriggerEventType.NODELOST, ev.getEventType()); } static final Map> listenerEvents = new ConcurrentHashMap<>(); diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java index 411d107111a0..50fa634fad7d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java @@ -17,24 +17,32 @@ package org.apache.solr.cloud.autoscaling.sim; import java.io.File; +import java.io.FileInputStream; import java.lang.invoke.MethodHandles; +import java.nio.charset.Charset; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeMap; import java.util.function.Function; import java.util.function.Predicate; import java.util.regex.Pattern; import java.util.stream.Collectors; +import org.apache.commons.io.IOUtils; import org.apache.solr.client.solrj.cloud.DistribStateManager; import org.apache.solr.client.solrj.cloud.NodeStateProvider; import org.apache.solr.client.solrj.cloud.SolrCloudManager; +import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper; import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo; +import org.apache.solr.client.solrj.cloud.autoscaling.Suggester; +import org.apache.solr.client.solrj.cloud.autoscaling.Suggestion; import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.cloud.SolrCloudTestCase; @@ -66,6 +74,10 @@ public static void setupCluster() throws Exception { .configure(); CollectionAdminRequest.createCollection(CollectionAdminParams.SYSTEM_COLL, null, 1, 2, 0, 1) .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("coll1", null, 1, 1) + .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("coll10", null, 1, 1) + .process(cluster.getSolrClient()); realManager = cluster.getJettySolrRunner(cluster.getJettySolrRunners().size() - 1).getCoreContainer() .getZkController().getSolrCloudManager(); } @@ -73,7 +85,7 @@ public static void setupCluster() throws Exception { @Test public void testSnapshots() throws Exception { SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(realManager, null); - Map snapshot = snapshotCloudManager.getSnapshot(true); + Map snapshot = snapshotCloudManager.getSnapshot(true, false); SnapshotCloudManager snapshotCloudManager1 = new SnapshotCloudManager(snapshot); SimSolrCloudTestCase.assertClusterStateEquals(realManager.getClusterStateProvider().getClusterState(), snapshotCloudManager.getClusterStateProvider().getClusterState()); SimSolrCloudTestCase.assertClusterStateEquals(realManager.getClusterStateProvider().getClusterState(), snapshotCloudManager1.getClusterStateProvider().getClusterState()); @@ -88,23 +100,60 @@ public void testPersistance() throws Exception { Path tmpPath = createTempDir(); File tmpDir = tmpPath.toFile(); SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(realManager, null); - snapshotCloudManager.saveSnapshot(tmpDir, true); + snapshotCloudManager.saveSnapshot(tmpDir, true, false); SnapshotCloudManager snapshotCloudManager1 = SnapshotCloudManager.readSnapshot(tmpDir); SimSolrCloudTestCase.assertClusterStateEquals(snapshotCloudManager.getClusterStateProvider().getClusterState(), snapshotCloudManager1.getClusterStateProvider().getClusterState()); assertNodeStateProvider(snapshotCloudManager, snapshotCloudManager1); assertDistribStateManager(snapshotCloudManager.getDistribStateManager(), snapshotCloudManager1.getDistribStateManager()); } + @Test + public void testRedaction() throws Exception { + Path tmpPath = createTempDir(); + File tmpDir = tmpPath.toFile(); + SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(realManager, null); + Set redacted = new HashSet<>(realManager.getClusterStateProvider().getLiveNodes()); + redacted.addAll(realManager.getClusterStateProvider().getClusterState().getCollectionStates().keySet()); + snapshotCloudManager.saveSnapshot(tmpDir, true, true); + for (String key : SnapshotCloudManager.REQUIRED_KEYS) { + File src = new File(tmpDir, key + ".json"); + assertTrue(src.toString() + " doesn't exist", src.exists()); + try (FileInputStream is = new FileInputStream(src)) { + String data = IOUtils.toString(is, Charset.forName("UTF-8")); + assertFalse("empty data in " + src, data.trim().isEmpty()); + for (String redactedName : redacted) { + assertFalse("redacted name " + redactedName + " found in " + src, data.contains(redactedName)); + } + } + } + } + + @Test + public void testComplexSnapshot() throws Exception { + File snapshotDir = new File(TEST_HOME(), "simSnapshot"); + SnapshotCloudManager snapshotCloudManager = SnapshotCloudManager.readSnapshot(snapshotDir); + assertEquals(48, snapshotCloudManager.getClusterStateProvider().getLiveNodes().size()); + assertEquals(16, snapshotCloudManager.getClusterStateProvider().getClusterState().getCollectionStates().size()); + try (SimCloudManager simCloudManager = SimCloudManager.createCluster(snapshotCloudManager, null, TimeSource.get("simTime:50"))) { + List suggestions = PolicyHelper.getSuggestions(simCloudManager.getDistribStateManager().getAutoScalingConfig(), simCloudManager); + //assertEquals(1, suggestions.size()); + if (suggestions.size() > 0) { + Suggester.SuggestionInfo suggestion = suggestions.get(0); + assertEquals(Suggestion.Type.improvement.toString(), suggestion.toMap(new HashMap<>()).get("type").toString()); + } + } + } + @Test public void testSimulatorFromSnapshot() throws Exception { Path tmpPath = createTempDir(); File tmpDir = tmpPath.toFile(); SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(realManager, null); - snapshotCloudManager.saveSnapshot(tmpDir, true); + snapshotCloudManager.saveSnapshot(tmpDir, true, false); SnapshotCloudManager snapshotCloudManager1 = SnapshotCloudManager.readSnapshot(tmpDir); try (SimCloudManager simCloudManager = SimCloudManager.createCluster(snapshotCloudManager1, null, TimeSource.get("simTime:50"))) { SimSolrCloudTestCase.assertClusterStateEquals(snapshotCloudManager.getClusterStateProvider().getClusterState(), simCloudManager.getClusterStateProvider().getClusterState()); - assertNodeStateProvider(snapshotCloudManager, simCloudManager); + assertNodeStateProvider(snapshotCloudManager, simCloudManager, "freedisk"); assertDistribStateManager(snapshotCloudManager.getDistribStateManager(), simCloudManager.getDistribStateManager()); ClusterState state = simCloudManager.getClusterStateProvider().getClusterState(); Replica r = state.getCollection(CollectionAdminParams.SYSTEM_COLL).getReplicas().get(0); @@ -126,14 +175,20 @@ public void testSimulatorFromSnapshot() throws Exception { } } - private static void assertNodeStateProvider(SolrCloudManager oneMgr, SolrCloudManager twoMgr) throws Exception { + private static void assertNodeStateProvider(SolrCloudManager oneMgr, SolrCloudManager twoMgr, String... ignorableNodeValues) throws Exception { NodeStateProvider one = oneMgr.getNodeStateProvider(); NodeStateProvider two = twoMgr.getNodeStateProvider(); for (String node : oneMgr.getClusterStateProvider().getLiveNodes()) { Map oneVals = one.getNodeValues(node, SimUtils.COMMON_NODE_TAGS); Map twoVals = two.getNodeValues(node, SimUtils.COMMON_NODE_TAGS); - oneVals = Utils.getDeepCopy(oneVals, 10, false, true); - twoVals = Utils.getDeepCopy(twoVals, 10, false, true); + oneVals = new TreeMap<>(Utils.getDeepCopy(oneVals, 10, false, true)); + twoVals = new TreeMap<>(Utils.getDeepCopy(twoVals, 10, false, true)); + if (ignorableNodeValues != null) { + for (String key : ignorableNodeValues) { + oneVals.remove(key); + twoVals.remove(key); + } + } assertEquals(Utils.toJSONString(oneVals), Utils.toJSONString(twoVals)); Map>> oneInfos = one.getReplicaInfo(node, SimUtils.COMMON_REPLICA_TAGS); Map>> twoInfos = two.getReplicaInfo(node, SimUtils.COMMON_REPLICA_TAGS); @@ -160,10 +215,16 @@ private static void assertNodeStateProvider(SolrCloudManager oneMgr, SolrCloudMa // ignore these because SimCloudManager always modifies them private static final Set IGNORE_DISTRIB_STATE_PATTERNS = new HashSet<>(Arrays.asList( - Pattern.compile("/autoscaling/triggerState.*"), - Pattern.compile("/clusterstate\\.json"), // different format in SimClusterStateProvider + Pattern.compile("/autoscaling/triggerState/.*"), + // some triggers may have run after the snapshot was taken + Pattern.compile("/autoscaling/events/.*"), + // we always use format 1 in SimClusterStateProvider + Pattern.compile("/clusterstate\\.json"), + // depending on the startup sequence leaders may differ Pattern.compile("/collections/[^/]+?/leader_elect/.*"), Pattern.compile("/collections/[^/]+?/leaders/.*"), + Pattern.compile("/collections/[^/]+?/terms/.*"), + Pattern.compile("/overseer_elect/election/.*"), Pattern.compile("/live_nodes/.*") )); diff --git a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java index ce1f68ef89a0..4a0f1ba7eb63 100644 --- a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java +++ b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java @@ -36,6 +36,7 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; @@ -62,7 +63,6 @@ public class BlobRepositoryMockingTest { boolean blobFetched = false; String blobKey = ""; String url = null; - String sha256 = null; ByteBuffer filecontent = null; @BeforeClass @@ -91,14 +91,6 @@ ByteBuffer fetchFromUrl(String key, String url) { return filecontent; } - @Override - BlobContentRef getBlobIncRef(String key, Decoder decoder, String url, String sha256) { - if(!Objects.equals(sha256, BlobRepositoryMockingTest.this.sha256)) return null; - blobKey = key; - blobFetched = true; - return new BlobContentRef(new BlobContent(key, filecontent)) ; - } - @Override ConcurrentHashMap createMap() { return mapMock; @@ -138,13 +130,21 @@ public void testGetBlobIncrRefByUrl() throws Exception{ when(mockContainer.isZooKeeperAware()).thenReturn(true); filecontent = TestDynamicLoading.getFileContent("runtimecode/runtimelibs_v2.jar.bin"); url = "http://localhost:8080/myjar/location.jar"; - sha256 = "79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4"; BlobRepository.BlobContentRef ref = repository.getBlobIncRef( "filefoo",null,url, - "79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4"); + "bc5ce45ad281b6a08fb7e529b1eb475040076834816570902acb6ebdd809410e31006efdeaa7f78a6c35574f3504963f5f7e4d92247d0eb4db3fc9abdda5d417"); assertTrue("filefoo".equals(blobKey)); assertTrue(blobFetched); assertNotNull(ref.blob); assertEquals(filecontent, ref.blob.get()); + verify(mockContainer).isZooKeeperAware(); + try { + repository.getBlobIncRef( "filefoo",null,url, + "WRONG-SHA512-KEY"); + fail("expected exception"); + } catch (Exception e) { + assertTrue(e.getMessage().contains(" expected sha512 hash : WRONG-SHA512-KEY , actual :")); + } + url = null; filecontent = null; } diff --git a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java index 3a8f2e6129a6..22ee299dac6a 100644 --- a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java +++ b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java @@ -48,7 +48,6 @@ public static void enableRuntimeLib() throws Exception { // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") //17-Aug-2018 commented @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018 public void testDynamicLoading() throws Exception { - System.setProperty("enable.runtime.lib", "true"); setupRestTestHarnesses(); @@ -98,7 +97,7 @@ public void testDynamicLoading() throws Exception { assertNotNull(map = (Map) map.get("error")); - assertTrue("full output " + map, map.get("msg").toString().contains("no such resource available: colltest/1" )); + assertTrue("full output " + map, map.get("msg").toString().contains("no such blob or version available: colltest/1" )); payload = " {\n" + " 'set' : {'watched': {" + " 'x':'X val',\n" + @@ -129,6 +128,9 @@ public void testDynamicLoading() throws Exception { } ByteBuffer jar = null; +// jar = persistZip("/tmp/runtimelibs.jar.bin", TestDynamicLoading.class, RuntimeLibReqHandler.class, RuntimeLibResponseWriter.class, RuntimeLibSearchComponent.class); +// if(true) return; + jar = getFileContent("runtimecode/runtimelibs.jar.bin"); TestBlobHandler.postAndCheck(cloudClient, baseURL, blobName, jar, 1); @@ -282,8 +284,4 @@ public static ByteBuffer generateZip(Class... classes) throws IOException { return bos.getByteBuffer(); } -/* public static void main(String[] args) throws Exception { - persistZip("/tmp/runtimelibs_v3.jar.bin", TestDynamicLoading.class, RuntimeLibReqHandler.class, RuntimeLibResponseWriter.class, RuntimeLibSearchComponent.class); - if(true) return; - }*/ } diff --git a/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java b/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java index 8fec3a433a74..575cf9e2da88 100644 --- a/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java +++ b/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java @@ -77,7 +77,7 @@ public void testDynamicLoadingUrl() throws Exception { try { String payload = "{\n" + "'add-runtimelib' : { 'name' : 'urljar', url : 'http://localhost:" + port + "/jar1.jar'" + - " 'sha256':'e01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}" + + " 'sha512':'e01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}" + "}"; RestTestHarness client = randomRestTestHarness(); TestSolrConfigHandler.runConfigCommandExpectFailure(client, "/config", payload, "Invalid jar"); @@ -85,7 +85,7 @@ public void testDynamicLoadingUrl() throws Exception { payload = "{\n" + "'add-runtimelib' : { 'name' : 'urljar', url : 'http://localhost:" + port + "/jar1.jar'" + - " 'sha256':'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}" + + " 'sha512':'d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}" + "}"; client = randomRestTestHarness(); TestSolrConfigHandler.runConfigCommand(client, "/config", payload); @@ -93,8 +93,8 @@ public void testDynamicLoadingUrl() throws Exception { null, "/config/overlay", null, - Arrays.asList("overlay", "runtimeLib", "urljar", "sha256"), - "e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc", 120); + Arrays.asList("overlay", "runtimeLib", "urljar", "sha512"), + "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420", 120); payload = "{\n" + "'create-requesthandler' : { 'name' : '/runtime', 'class': 'org.apache.solr.core.RuntimeLibReqHandler', 'runtimeLib' : true}" + diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java index 5f6a1c2dff5e..17494e0dcde1 100644 --- a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java +++ b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java @@ -41,7 +41,7 @@ import org.apache.solr.handler.TestSolrConfigHandlerConcurrent; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; -import org.apache.solr.search.SolrCacheHolder; +import org.apache.solr.search.SolrCache; import org.apache.solr.util.RESTfulServerProvider; import org.apache.solr.util.RestTestBase; import org.apache.solr.util.RestTestHarness; @@ -543,8 +543,8 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw HashMap m = new HashMap(); rsp.add("caches", m); for (String c : caches) { - SolrCacheHolder cache = (SolrCacheHolder) req.getSearcher().getCache(c); - if(cache != null) m.put(c, cache.get().getClass().getName()); + SolrCache cache = req.getSearcher().getCache(c); + if(cache != null) m.put(c, cache.getClass().getName()); } } } diff --git a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java index 88e60762c6fb..32ecc9e62ec1 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java @@ -20,7 +20,6 @@ import java.lang.invoke.MethodHandles; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; -import java.util.List; import java.util.Map; import org.apache.http.HttpEntity; @@ -86,6 +85,7 @@ public void doBlobHandlerTest() throws Exception { "type"),null)); checkBlobPost(baseUrl, cloudClient); + checkBlobPostMd5(baseUrl, cloudClient); } } @@ -108,6 +108,15 @@ static void checkBlobPost(String baseUrl, CloudSolrClient cloudClient) throws Ex compareInputAndOutput(baseUrl + "/.system/blob/test/1?wt=filestream", bytarr, cloudClient); } + static void checkBlobPostMd5(String baseUrl, CloudSolrClient cloudClient) throws Exception { + String blobName = "md5Test"; + String stringValue = "MHMyugAGUxFzeqbpxVemACGbQ"; // Random string requires padding in md5 hash + String stringValueMd5 = "02d82dd5aabc47fae54ee3dd236ad83d"; + postAndCheck(cloudClient, baseUrl, blobName, ByteBuffer.wrap(stringValue.getBytes(StandardCharsets.UTF_8)), 1); + MapWriter map = TestSolrConfigHandlerConcurrent.getAsMap(baseUrl + "/.system/blob/" + blobName, cloudClient); + assertEquals(stringValueMd5, map._getStr("response/docs[0]/md5", null)); + } + public static void createSystemCollection(SolrClient client) throws SolrServerException, IOException { CollectionAdminResponse response1; CollectionAdminRequest.Create createCollectionRequest = CollectionAdminRequest.createCollection(".system",1,2); @@ -121,7 +130,6 @@ public static void postAndCheck(CloudSolrClient cloudClient, String baseUrl, Str String url; MapWriter map = null; - List l; final RTimer timer = new RTimer(); int i = 0; for (; i < 150; i++) {//15 secs diff --git a/solr/core/src/test/org/apache/solr/handler/TestContainerReqHandler.java b/solr/core/src/test/org/apache/solr/handler/TestContainerReqHandler.java deleted file mode 100644 index cf7b15a10598..000000000000 --- a/solr/core/src/test/org/apache/solr/handler/TestContainerReqHandler.java +++ /dev/null @@ -1,781 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.solr.handler; - -import java.io.IOException; -import java.io.InputStream; -import java.io.Reader; -import java.lang.invoke.MethodHandles; -import java.net.URL; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.function.Predicate; - -import com.google.common.collect.ImmutableMap; -import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.client.solrj.ResponseParser; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.SolrRequest; -import org.apache.solr.client.solrj.SolrResponse; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.BaseHttpSolrClient; -import org.apache.solr.client.solrj.impl.HttpSolrClient; -import org.apache.solr.client.solrj.request.AbstractUpdateRequest; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.request.GenericSolrRequest; -import org.apache.solr.client.solrj.request.QueryRequest; -import org.apache.solr.client.solrj.request.UpdateRequest; -import org.apache.solr.client.solrj.request.V2Request; -import org.apache.solr.client.solrj.response.SimpleSolrResponse; -import org.apache.solr.client.solrj.response.V2Response; -import org.apache.solr.cloud.ConfigRequest; -import org.apache.solr.cloud.MiniSolrCloudCluster; -import org.apache.solr.cloud.SolrCloudTestCase; -import org.apache.solr.common.cloud.ClusterProperties; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.params.MapSolrParams; -import org.apache.solr.common.params.ModifiableSolrParams; -import org.apache.solr.common.params.SolrParams; -import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.Pair; -import org.apache.solr.common.util.StrUtils; -import org.apache.solr.common.util.Utils; -import org.apache.solr.core.ConfigOverlay; -import org.apache.solr.core.MemClassLoader; -import org.apache.solr.core.RuntimeLib; -import org.apache.solr.request.SolrRequestHandler; -import org.apache.solr.util.LogLevel; -import org.apache.zookeeper.CreateMode; -import org.apache.zookeeper.data.Stat; -import org.eclipse.jetty.server.Server; -import org.junit.BeforeClass; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.apache.solr.cloud.TestCryptoKeys.readFile; -import static org.apache.solr.common.params.CommonParams.JAVABIN; -import static org.apache.solr.common.params.CommonParams.WT; -import static org.apache.solr.common.util.Utils.getObjectByPath; -import static org.apache.solr.core.TestDynamicLoading.getFileContent; -import static org.apache.solr.core.TestDynamicLoadingUrl.runHttpServer; - -@SolrTestCaseJ4.SuppressSSL -@LogLevel("org.apache.solr.common.cloud.ZkStateReader=DEBUG;org.apache.solr.handler.admin.CollectionHandlerApi=DEBUG;org.apache.solr.core.PackageManager=DEBUG;org.apache.solr.common.cloud.ClusterProperties=DEBUG") -public class TestContainerReqHandler extends SolrCloudTestCase { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - - @BeforeClass - public static void setupCluster() throws Exception { - System.setProperty("enable.runtime.lib", "true"); - - } - - static SolrResponse assertResponseValues(int repeats, SolrClient client, SolrRequest req, Map vals) throws Exception { - SolrResponse rsp = null; - - for (int i = 0; i < repeats; i++) { - if (i > 0) { - Thread.sleep(100); - } - try { - rsp = req.process(client); - } catch (Exception e) { - if (i >= repeats - 1) throw e; - continue; - } - for (Object e : vals.entrySet()) { - Map.Entry entry = (Map.Entry) e; - String k = (String) entry.getKey(); - List key = StrUtils.split(k, '/'); - - Object val = entry.getValue(); - Predicate p = val instanceof Predicate ? (Predicate) val : o -> { - String v = o == null ? null : String.valueOf(o); - return Objects.equals(val, o); - }; - boolean isPass = p.test(rsp._get(key, null)); - if (isPass) return rsp; - else if (i >= repeats - 1) { - fail("attempt: " + i + " Mismatch for value : '" + key + "' in response " + Utils.toJSONString(rsp)); - } - - } - - } - return rsp; - } - - private static Map assertVersionInSync(SolrZkClient zkClient, SolrClient solrClient) throws SolrServerException, IOException { - Stat stat = new Stat(); - Map map = new ClusterProperties(zkClient).getClusterProperties(stat); - assertEquals(String.valueOf(stat.getVersion()), getExtResponse(solrClient)._getStr("metadata/version", null)); - return map; - } - - private static V2Response getExtResponse(SolrClient solrClient) throws SolrServerException, IOException { - return new V2Request.Builder("/node/ext") - .withMethod(SolrRequest.METHOD.GET) - .build().process(solrClient); - } - - @Test - public void testPackageAPI() throws Exception { - Map jars = Utils.makeMap( - "/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"), - "/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"), - "/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin")); - - Pair server = runHttpServer(jars); - int port = server.second(); - MiniSolrCloudCluster cluster = configureCluster(4).configure(); - try { - String payload = null; - try { - payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " + - "sha256 : 'wrong-sha256'}}"; - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - fail("Expected error"); - } catch (BaseHttpSolrClient.RemoteExecutionException e) { - assertTrue("actual output : " + Utils.toJSONString(e.getMetaData()), e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("expected sha256 hash :")); - } - - try { - payload = "{add-package:{name : 'foo', url: 'http://localhost:" + port + "/jar0.jar', " + - "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}"; - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - fail("Expected error"); - } catch (BaseHttpSolrClient.RemoteExecutionException e) { - assertTrue("Actual output : " + Utils.toJSONString(e.getMetaData()), e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("no such resource available: foo")); - } - - payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " + - "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}"; - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256"), - getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256")); - - - new V2Request.Builder("/cluster") - .withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler', package : global}}") - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - Map map = new ClusterProperties(cluster.getZkClient()).getClusterProperties(); - - - V2Request request = new V2Request.Builder("/node/ext/bar") - .withMethod(SolrRequest.METHOD.POST) - .build(); - assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap( - "class", "org.apache.solr.core.RuntimeLibReqHandler", - "loader", MemClassLoader.class.getName(), - "version", null)); - - - assertEquals("org.apache.solr.core.RuntimeLibReqHandler", - getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class"))); - - - payload = "{update-package:{name : 'global', url: 'http://localhost:" + port + "/jar3.jar', " + - "sha256 : '20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3'}}"; - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256"), - getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256")); - - - request = new V2Request.Builder("/node/ext/bar") - .withMethod(SolrRequest.METHOD.POST) - .build(); - assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap( - "class", "org.apache.solr.core.RuntimeLibReqHandler", - "loader", MemClassLoader.class.getName(), - "version", "3") - ); - - - new V2Request.Builder("/cluster") - .withPayload("{delete-requesthandler: 'bar'}") - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - request = new V2Request.Builder("/node/ext") - .withMethod(SolrRequest.METHOD.POST) - .build(); - assertResponseValues(10, cluster.getSolrClient(), request, ImmutableMap.of(SolrRequestHandler.TYPE, - (Predicate) o -> o instanceof List && ((List) o).isEmpty())); - new V2Request.Builder("/cluster") - .withPayload("{delete-package : 'global'}") - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - assertResponseValues(10, cluster.getSolrClient(), request, ImmutableMap.of(RuntimeLib.TYPE, - (Predicate) o -> o instanceof List && ((List) o).isEmpty())); - - - URL baseUrl = cluster.getRandomJetty(random()).getBaseUrl(); - try(HttpSolrClient client = new HttpSolrClient.Builder(baseUrl.toString()).build()){ - SimpleSolrResponse rsp = new GenericSolrRequest(SolrRequest.METHOD.GET, "/____v2/node/blob", new ModifiableSolrParams()).process(client); - List l = (List) rsp.nl.get("blob"); - assertTrue(l.contains("e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc")); - assertTrue(l.contains("20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3")); - } - } finally { - cluster.shutdown(); - server.first().stop(); - } - } - - @Test - public void testRuntimeLibWithSig2048() throws Exception { - Map jars = Utils.makeMap( - "/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"), - "/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"), - "/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin")); - - Pair server = runHttpServer(jars); - int port = server.second(); - MiniSolrCloudCluster cluster = configureCluster(4).configure(); - - try { - - byte[] derFile = readFile("cryptokeys/pub_key2048.der"); - cluster.getZkClient().makePath("/keys/exe", true); - cluster.getZkClient().create("/keys/exe/pub_key2048.der", derFile, CreateMode.PERSISTENT, true); - - String signature = "NaTm3+i99/ZhS8YRsLc3NLz2Y6VuwEbu7DihY8GAWwWIGm+jpXgn1JiuaenfxFCcfNKCC9WgZmEgbTZTzmV/OZMVn90u642YJbF3vTnzelW1pHB43ZRAJ1iesH0anM37w03n3es+vFWQtuxc+2Go888fJoMkUX2C6Zk6Jn116KE45DWjeyPM4mp3vvGzwGvdRxP5K9Q3suA+iuI/ULXM7m9mV4ruvs/MZvL+ELm5Jnmk1bBtixVJhQwJP2z++8tQKJghhyBxPIC/2fkAHobQpkhZrXu56JjP+v33ul3Ku4bbvfVMY/LVwCAEnxlvhk+C6uRCKCeFMrzQ/k5inasXLw=="; - - String payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " + - "sig : 'EdYkvRpMZbvElN93/xUmyKXcj6xHP16AVk71TlTascEwCb5cFQ2AeKhPIlwYpkLWXEOcLZKfeXoWwOLaV5ZNhg==' ," + - "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}"; - try { - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - } catch (BaseHttpSolrClient.RemoteExecutionException e) { - //No key matched signature for jar - assertTrue(e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("No key matched signature for jar")); - } - - - payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " + - "sig : '" + signature + "'," + - "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}"; - - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256"), - getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256")); - - new V2Request.Builder("/cluster") - .withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler' package : global}}") - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - Map map = new ClusterProperties(cluster.getZkClient()).getClusterProperties(); - - - V2Request request = new V2Request.Builder("/node/ext/bar") - .withMethod(SolrRequest.METHOD.POST) - .build(); - assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap( - "class", "org.apache.solr.core.RuntimeLibReqHandler", - "loader", MemClassLoader.class.getName(), - "version", null)); - - - assertEquals("org.apache.solr.core.RuntimeLibReqHandler", - getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class"))); - - payload = "{update-package:{name : 'global', url: 'http://localhost:" + port + "/jar3.jar', " + - "sig : 'YxFr6SpYrDwG85miDfRWHTjU9UltjtIWQZEhcV55C2rczRUVowCYBxmsDv5mAM8j0CTv854xpI1DtBT86wpoTdbF95LQuP9FJId4TS1j8bZ9cxHP5Cqyz1uBHFfUUNUrnpzTHQkVTp02O9NAjh3c2W41bL4U7j6jQ32+4CW2M+x00TDG0y0H75rQDR8zbLt31oWCz+sBOdZ3rGKJgAvdoGm/wVCTmsabZN+xoz4JaDeBXF16O9Uk9SSq4G0dz5YXFuLxHK7ciB5t0+q6pXlF/tdlDqF76Abze0R3d2/0MhXBzyNp3UxJmj6DiprgysfB0TbQtJG0XGfdSmx0VChvcA==' ," + - "sha256 : '20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3'}}"; - - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256"), - getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256")); - - - request = new V2Request.Builder("/node/ext/bar") - .withMethod(SolrRequest.METHOD.POST) - .build(); - assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap( - "class", "org.apache.solr.core.RuntimeLibReqHandler", - "loader", MemClassLoader.class.getName(), - "version", "3")); - - - } finally { - server.first().stop(); - cluster.shutdown(); - } - - } - - @Test - public void testRuntimeLibWithSig512() throws Exception { - Map jars = Utils.makeMap( - "/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"), - "/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"), - "/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin")); - - Pair server = runHttpServer(jars); - int port = server.second(); - MiniSolrCloudCluster cluster = configureCluster(4).configure(); - - try { - - byte[] derFile = readFile("cryptokeys/pub_key512.der"); - cluster.getZkClient().makePath("/keys/exe", true); - cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true); - - String signature = "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="; - - String payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " + - "sig : '" + signature + "'," + - "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}"; - - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256"), - getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256")); - - new V2Request.Builder("/cluster") - .withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler' package : global }}") - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - Map map = new ClusterProperties(cluster.getZkClient()).getClusterProperties(); - - - V2Request request = new V2Request.Builder("/node/ext/bar") - .withMethod(SolrRequest.METHOD.POST) - .build(); - assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap( - "class", "org.apache.solr.core.RuntimeLibReqHandler", - "loader", MemClassLoader.class.getName(), - "version", null)); - - - assertEquals("org.apache.solr.core.RuntimeLibReqHandler", - getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class"))); - - payload = "{update-package:{name : 'global', url: 'http://localhost:" + port + "/jar3.jar', " + - "sig : 'a400n4T7FT+2gM0SC6+MfSOExjud8MkhTSFylhvwNjtWwUgKdPFn434Wv7Qc4QEqDVLhQoL3WqYtQmLPti0G4Q==' ," + - "sha256 : '20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3'}}"; - - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256"), - getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256")); - - - request = new V2Request.Builder("/node/ext/bar") - .withMethod(SolrRequest.METHOD.POST) - .build(); - assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap( - "class", "org.apache.solr.core.RuntimeLibReqHandler", - "loader", MemClassLoader.class.getName(), - "version", "3")); - - } finally { - server.first().stop(); - cluster.shutdown(); - } - - } - - @Test - public void testSetClusterReqHandler() throws Exception { - MiniSolrCloudCluster cluster = configureCluster(4).configure(); - try { - SolrZkClient zkClient = cluster.getZkClient(); - new V2Request.Builder("/cluster") - .withPayload("{add-requesthandler:{name : 'foo', class : 'org.apache.solr.handler.DumpRequestHandler'}}") - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - - Map map = assertVersionInSync(zkClient, cluster.getSolrClient()); - - assertEquals("org.apache.solr.handler.DumpRequestHandler", - getObjectByPath(map, true, Arrays.asList("requestHandler", "foo", "class"))); - - assertVersionInSync(zkClient, cluster.getSolrClient()); - V2Response rsp = new V2Request.Builder("/node/ext/foo") - .withMethod(SolrRequest.METHOD.GET) - .withParams(new MapSolrParams((Map) Utils.makeMap("testkey", "testval"))) - .build().process(cluster.getSolrClient()); - assertEquals("testval", rsp._getStr("params/testkey", null)); - - new V2Request.Builder("/cluster") - .withPayload("{delete-requesthandler: 'foo'}") - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - - assertNull(getObjectByPath(map, true, Arrays.asList("requestHandler", "foo"))); - } finally { - cluster.shutdown(); - } - - } - - public void testPluginFrompackage() throws Exception { - String COLLECTION_NAME = "globalLoaderColl"; - Map jars = Utils.makeMap( - "/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"), - "/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"), - "/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin")); - - Pair server = runHttpServer(jars); - int port = server.second(); - System.setProperty("enable.runtime.lib", "true"); - MiniSolrCloudCluster cluster = configureCluster(4) - .addConfig("conf", configset("cloud-minimal")) - .configure(); - try { - CollectionAdminRequest - .createCollection(COLLECTION_NAME, "conf", 2, 1) - .setMaxShardsPerNode(100) - .process(cluster.getSolrClient()); - - - cluster.waitForActiveCollection(COLLECTION_NAME, 2, 2); - String payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " + - "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}"; - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - String sha256 = (String) getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256"); - String url = (String) getObjectByPath(Utils.fromJSONString(payload), true, "add-package/url"); - - assertEquals(sha256, - getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256")); - - - payload = "{\n" + - "'create-requesthandler' : { 'name' : '/runtime', 'class': 'org.apache.solr.core.RuntimeLibReqHandler' , 'package':global }," + - "'create-searchcomponent' : { 'name' : 'get', 'class': 'org.apache.solr.core.RuntimeLibSearchComponent' , 'package':global }," + - "'create-queryResponseWriter' : { 'name' : 'json1', 'class': 'org.apache.solr.core.RuntimeLibResponseWriter' , 'package':global }" + - "}"; - cluster.getSolrClient().request(new ConfigRequest(payload) { - @Override - public String getCollection() { - return COLLECTION_NAME; - } - }); - - SolrParams params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, - WT, JAVABIN, - "meta","true")); - - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/queryResponseWriter/json1", params), - Utils.makeMap( - "/config/queryResponseWriter/json1/_packageinfo_/url", url, - "/config/queryResponseWriter/json1/_meta_/sha256", sha256 - )); - - params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, - WT, JAVABIN, - "meta","true")); - - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/searchComponent/get", params), - Utils.makeMap( - "config/searchComponent/get/_packageinfo_/url", url, - "config/searchComponent/get/_packageinfo_/sha256", sha256 - )); - - params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, - WT, JAVABIN, - "meta","true")); - - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/requestHandler/runtime", params), - Utils.makeMap( - ":config:requestHandler:/runtime:_packageinfo_:url", url, - ":config:requestHandler:/runtime:_packageinfo_:sha256", sha256 - )); - - - params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, WT, JAVABIN)); - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/overlay", params), - Utils.makeMap( - "overlay/queryResponseWriter/json1/class", "org.apache.solr.core.RuntimeLibResponseWriter", - "overlay/searchComponent/get/class", "org.apache.solr.core.RuntimeLibSearchComponent" - )); - - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/runtime", params), - Utils.makeMap("class", "org.apache.solr.core.RuntimeLibReqHandler", - "loader", MemClassLoader.class.getName())); - - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/get?abc=xyz", params), - Utils.makeMap("get", "org.apache.solr.core.RuntimeLibSearchComponent", - "loader", MemClassLoader.class.getName())); - - GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/runtime", - new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, WT, "json1"))); - req.setResponseParser(new ResponseParser() { - @Override - public String getWriterType() { - return "json1"; - } - - @Override - public NamedList processResponse(InputStream body, String encoding) { - return new NamedList<>((Map) Utils.fromJSON(body)); - } - - @Override - public NamedList processResponse(Reader reader) { - return new NamedList<>((Map) Utils.fromJSON(reader)); - - } - - }); - assertResponseValues(10, - cluster.getSolrClient(), - req, - Utils.makeMap("wt", "org.apache.solr.core.RuntimeLibResponseWriter", - "loader", MemClassLoader.class.getName())); - - - payload = "{update-package:{name : 'global', url: 'http://localhost:" + port + "/jar2.jar', " + - "sha256 : '79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4'}}"; - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - sha256 = (String) getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256"); - url = (String) getObjectByPath(Utils.fromJSONString(payload), true, "update-package/url"); - - assertEquals(sha256, - getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256")); - - params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, - WT, JAVABIN, - "meta","true")); - - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/queryResponseWriter/json1", params), - Utils.makeMap( - "/config/queryResponseWriter/json1/_packageinfo_/url", url, - "/config/queryResponseWriter/json1/_packageinfo_/sha256", sha256 - )); - - params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, - WT, JAVABIN, - "meta","true")); - - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/searchComponent/get", params), - Utils.makeMap( - "/config/searchComponent/get/_packageinfo_/url", url, - "/config/searchComponent/get/_packageinfo_/sha256", sha256 - )); - - params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, - WT, JAVABIN, - "meta","true")); - - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/requestHandler/runtime", params), - Utils.makeMap( - ":config:requestHandler:/runtime:_packageinfo_:url", url, - ":config:requestHandler:/runtime:_packageinfo_:sha256", sha256 - )); - - - - try { - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - fail("should have failed"); - } catch (BaseHttpSolrClient.RemoteExecutionException e) { - assertTrue("actual output : " + Utils.toJSONString(e.getMetaData()), e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("Trying to update a jar with the same sha256")); - } - - - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/get?abc=xyz", params), - Utils.makeMap("get", "org.apache.solr.core.RuntimeLibSearchComponent", - "loader", MemClassLoader.class.getName(), - "Version", "2")); - } finally { - cluster.deleteAllCollections(); - cluster.shutdown(); - server.first().stop(); - } - - } - -// @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13650") - public void testCacheLoadFromPackage() throws Exception { - String COLLECTION_NAME = "globalCacheColl"; - Map jars = Utils.makeMap( - "/jar1.jar", getFileContent("runtimecode/cache.jar.bin"), - "/jar2.jar", getFileContent("runtimecode/cache_v2.jar.bin")); - - Pair server = runHttpServer(jars); - int port = server.second(); - - String overlay = "{" + - " \"props\":{\"query\":{\"documentCache\":{\n" + - " \"class\":\"org.apache.solr.core.MyDocCache\",\n" + - " \"size\":\"512\",\n" + - " \"initialSize\":\"512\" , \"package\":\"cache_pkg\"}}}}"; - MiniSolrCloudCluster cluster = configureCluster(4) - .addConfig("conf", configset("cloud-minimal"), - Collections.singletonMap(ConfigOverlay.RESOURCE_NAME, overlay.getBytes(UTF_8))) - .configure(); - try { - String payload = "{add-package:{name : 'cache_pkg', url: 'http://localhost:" + port + "/jar1.jar', " + - "sha256 : '32e8b5b2a95ea306538b52017f0954aa1b0f8a8b2d0acbc498fd0e66a223f7bd'}}"; - - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256"), - getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/cache_pkg/sha256")); - - CollectionAdminRequest - .createCollection(COLLECTION_NAME, "conf", 2, 1) - .setMaxShardsPerNode(100) - .process(cluster.getSolrClient()); - - - cluster.waitForActiveCollection(COLLECTION_NAME, 2, 2); - SolrParams params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, WT, JAVABIN)); - - NamedList rsp = cluster.getSolrClient().request(new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/overlay", params)); - assertEquals("org.apache.solr.core.MyDocCache", rsp._getStr("overlay/props/query/documentCache/class", null)); - - String sha256 = (String) getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256"); - String url = (String) getObjectByPath(Utils.fromJSONString(payload), true, "add-package/url"); - - - params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, - WT, JAVABIN, - "meta","true")); - - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/query/documentCache", params), - Utils.makeMap( - "/config/query/documentCache/_packageinfo_/url", url, - "/config/query/documentCache/_packageinfo_/sha256", sha256 - )); - - - UpdateRequest req = new UpdateRequest(); - - req.add("id", "1", "desc_s", "document 1") - .setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true) - .setWaitSearcher(true); - cluster.getSolrClient().request(req, COLLECTION_NAME); - - SolrQuery solrQuery = new SolrQuery("q", "id:1", "collection", COLLECTION_NAME); - assertResponseValues(10, - cluster.getSolrClient(), - new QueryRequest(solrQuery), - Utils.makeMap("/response[0]/my_synthetic_fld_s", "version_1")); - - - payload = "{update-package:{name : 'cache_pkg', url: 'http://localhost:" + port + "/jar2.jar', " + - "sha256 : '0f670f6dcc2b00f9a448a7ebd457d4ff985ab702c85cdb3608dcae9889e8d702'}}"; - new V2Request.Builder("/cluster") - .withPayload(payload) - .withMethod(SolrRequest.METHOD.POST) - .build().process(cluster.getSolrClient()); - sha256 = (String) getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256"); - url = (String) getObjectByPath(Utils.fromJSONString(payload), true, "update-package/url"); - assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256"), - getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/cache_pkg/sha256")); - - params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, - WT, JAVABIN, - "meta","true")); - - assertResponseValues(10, - cluster.getSolrClient(), - new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/query/documentCache", params), - Utils.makeMap( - "/config/query/documentCache/_packageinfo_/url", url, - "/config/query/documentCache/_packageinfo_/sha256", sha256 - )); - req = new UpdateRequest(); - req.add("id", "2", "desc_s", "document 1") - .setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true) - .setWaitSearcher(true); - cluster.getSolrClient().request(req, COLLECTION_NAME); - - - solrQuery = new SolrQuery("q", "id:2", "collection", COLLECTION_NAME); - SolrResponse result = assertResponseValues(10, - cluster.getSolrClient(), - new QueryRequest(solrQuery), - Utils.makeMap("response[0]/my_synthetic_fld_s", "version_2")); - - } finally { - cluster.deleteAllCollections(); - cluster.shutdown(); - server.first().stop(); - } - } - -} diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java index 3c16ce65a5f4..3b52568f6db8 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java @@ -31,7 +31,7 @@ import org.apache.solr.core.SolrCore; import org.apache.solr.handler.RequestHandlerBase; import org.apache.solr.metrics.MetricsMap; -import org.apache.solr.metrics.SolrMetrics; +import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.response.SolrQueryResponse; @@ -391,7 +391,7 @@ public void testMetricsUnload() throws Exception { static class RefreshablePluginHolder extends PluginBag.PluginHolder { private DumpRequestHandler rh; - private SolrMetrics metricsInfo; + private SolrMetricsContext metricsInfo; public RefreshablePluginHolder(PluginInfo info, DumpRequestHandler rh) { super(info); @@ -404,18 +404,18 @@ public boolean isLoaded() { } void closeHandler() throws Exception { - this.metricsInfo = rh.getMetrics(); - if(metricsInfo.tag.contains(String.valueOf(rh.hashCode()))){ - //this created a new child metrics - metricsInfo = metricsInfo.getParent(); - } + this.metricsInfo = rh.getSolrMetricsContext(); +// if(metricsInfo.tag.contains(String.valueOf(rh.hashCode()))){ +// //this created a new child metrics +// metricsInfo = metricsInfo.getParent(); +// } this.rh.close(); } void reset(DumpRequestHandler rh) throws Exception { this.rh = rh; if(metricsInfo != null) - this.rh.initializeMetrics(metricsInfo); + this.rh.initializeMetrics(metricsInfo, "/dumphandler"); } @@ -440,11 +440,11 @@ public String getDescription() { } @Override - public void initializeMetrics(SolrMetrics m) { - super.initializeMetrics(m); + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + super.initializeMetrics(parentContext, scope); MetricsMap metrics = new MetricsMap((detailed, map) -> map.putAll(gaugevals)); - solrMetrics.gauge(this, - metrics, true, "dumphandlergauge", getCategory().toString()); + solrMetricsContext.gauge(this, + metrics, true, "dumphandlergauge", getCategory().toString(), scope); } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java index 1762ec67fcea..fde0335b9cf8 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java @@ -17,6 +17,9 @@ package org.apache.solr.handler.admin; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -24,14 +27,23 @@ import java.util.Map; import java.util.Set; +import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.api.AnnotatedApi; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; +import org.apache.solr.api.Command; +import org.apache.solr.api.EndPoint; import org.apache.solr.api.V2HttpCall; import org.apache.solr.api.V2HttpCall.CompositeApi; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.common.params.MapSolrParams; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.CommandOperation; +import org.apache.solr.common.util.ContentStream; +import org.apache.solr.common.util.ContentStreamBase; +import org.apache.solr.common.util.JsonSchemaValidator; import org.apache.solr.common.util.PathTrie; import org.apache.solr.common.util.StrUtils; import org.apache.solr.common.util.Utils; @@ -43,11 +55,15 @@ import org.apache.solr.handler.SolrConfigHandler; import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.response.SolrQueryResponse; +import org.apache.solr.security.PermissionNameProvider; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.apache.solr.api.ApiBag.EMPTY_SPEC; import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET; +import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST; import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH; import static org.apache.solr.common.params.CommonParams.CONFIGSETS_HANDLER_PATH; import static org.apache.solr.common.params.CommonParams.CORES_HANDLER_PATH; @@ -152,6 +168,122 @@ public void testFramework() { } + public void testPayload() { + String json = "{package:pkg1, version: '0.1', files :[a.jar, b.jar]}"; + Utils.fromJSONString(json); + + ApiBag apiBag = new ApiBag(false); + AnnotatedApi api = new AnnotatedApi(new ApiTest()); + apiBag.register(api, Collections.emptyMap()); + + ValidatingJsonMap spec = api.getSpec(); + + assertEquals("POST", spec._getStr("/methods[0]",null) ); + assertEquals("POST", spec._getStr("/methods[0]",null) ); + assertEquals("/cluster/package", spec._getStr("/url/paths[0]",null) ); + assertEquals("string", spec._getStr("/commands/add/properties/package/type",null) ); + assertEquals("array", spec._getStr("/commands/add/properties/files/type",null) ); + assertEquals("string", spec._getStr("/commands/add/properties/files/items/type",null) ); + assertEquals("string", spec._getStr("/commands/delete/items/type",null) ); + SolrQueryResponse rsp = v2ApiInvoke(apiBag, "/cluster/package", "POST", new ModifiableSolrParams(), + new ByteArrayInputStream("{add:{package:mypkg, version: '1.0', files : [a.jar, b.jar]}}".getBytes(UTF_8))); + + + AddVersion addversion = (AddVersion) rsp.getValues().get("add"); + assertEquals("mypkg", addversion.pkg); + assertEquals("1.0", addversion.version); + assertEquals("a.jar", addversion.files.get(0)); + assertEquals("b.jar", addversion.files.get(1)); + + + + } + + @EndPoint(method = POST, path = "/cluster/package", permission = PermissionNameProvider.Name.ALL) + public static class ApiTest { + @Command(name = "add") + public void add(SolrQueryRequest req, SolrQueryResponse rsp, AddVersion addVersion) { + rsp.add("add", addVersion); + + } + + @Command(name = "delete") + public void del(SolrQueryRequest req, SolrQueryResponse rsp, List names) { + rsp.add("delete",names); + + } + + } + + public static class AddVersion { + @JsonProperty(value = "package", required = true) + public String pkg; + @JsonProperty(value = "version", required = true) + public String version; + @JsonProperty(value = "files", required = true) + public List files; + } + + public void testAnnotatedApi() { + ApiBag apiBag = new ApiBag(false); + apiBag.register(new AnnotatedApi(new DummyTest()), Collections.emptyMap()); + SolrQueryResponse rsp = v2ApiInvoke(apiBag, "/node/filestore/package/mypkg/jar1.jar", "GET", + new ModifiableSolrParams(), null); + assertEquals("/package/mypkg/jar1.jar", rsp.getValues().get("path")); + } + + @EndPoint( + path = "/node/filestore/*", + method = SolrRequest.METHOD.GET, + permission = PermissionNameProvider.Name.ALL) + public class DummyTest { + @Command + public void read(SolrQueryRequest req, SolrQueryResponse rsp) { + rsp.add("FSRead.called", "true"); + rsp.add("path", req.getPathTemplateValues().get("*")); + } + } + + private static SolrQueryResponse v2ApiInvoke(ApiBag bag, String uri, String method, SolrParams params, InputStream payload) { + if (params == null) params = new ModifiableSolrParams(); + SolrQueryResponse rsp = new SolrQueryResponse(); + HashMap templateVals = new HashMap<>(); + Api[] currentApi = new Api[1]; + + SolrQueryRequestBase req = new SolrQueryRequestBase(null, params) { + + @Override + public Map getPathTemplateValues() { + return templateVals; + } + + @Override + protected Map getValidators() { + return currentApi[0] == null? + Collections.emptyMap(): + currentApi[0].getCommandSchema(); + } + + @Override + public Iterable getContentStreams() { + return Collections.singletonList(new ContentStreamBase() { + @Override + public InputStream getStream() throws IOException { + return payload; + } + }); + + } + }; + Api api = bag.lookup(uri, method, templateVals); + currentApi[0] = api; + + + api.call(req, rsp); + return rsp; + + } + public void testTrailingTemplatePaths() { PathTrie registry = new PathTrie<>(); Api api = new Api(EMPTY_SPEC) { @@ -204,7 +336,7 @@ public List getCommands(boolean validateInput) { } - private void assertConditions(Map root, Map conditions) { + public static void assertConditions(Map root, Map conditions) { for (Object o : conditions.entrySet()) { Map.Entry e = (Map.Entry) o; String path = (String) e.getKey(); diff --git a/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java index 130f1ef1af97..0062fcfb939d 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java @@ -185,9 +185,11 @@ public void testModifyRequestTrack() { //if the request has debugQuery=true or debug=track, the sreq should get debug=track always assertTrue(Arrays.asList(sreq.params.getParams(CommonParams.DEBUG)).contains(CommonParams.TRACK)); //the purpose must be added as readable param to be included in the shard logs - assertEquals("GET_FIELDS,GET_DEBUG", sreq.params.get(CommonParams.REQUEST_PURPOSE)); + assertEquals("GET_FIELDS,GET_DEBUG,SET_TERM_STATS", sreq.params.get(CommonParams.REQUEST_PURPOSE)); //the rid must be added to be included in the shard logs assertEquals("123456-my_rid", sreq.params.get(CommonParams.REQUEST_ID)); + // close requests - this method obtains a searcher in order to access its StatsCache + req.close(); } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java index ab7bf3aeee5d..81cc005bd9a2 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java @@ -20,6 +20,7 @@ import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -32,9 +33,12 @@ import org.apache.lucene.search.similarities.DFISimilarity; import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper; import org.apache.lucene.search.similarities.Similarity; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.request.schema.SchemaRequest; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.SolrCore; +import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SimilarityFactory; import org.apache.solr.search.similarities.SchemaSimilarityFactory; import org.apache.solr.util.RESTfulServerProvider; @@ -42,6 +46,7 @@ import org.apache.solr.util.RestTestHarness; import org.junit.After; import org.junit.Before; +import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -188,7 +193,7 @@ public void testAnalyzerClass() throws Exception { assertNotNull(map); Map analyzer = (Map)map.get("analyzer"); assertEquals("org.apache.lucene.analysis.core.WhitespaceAnalyzer", String.valueOf(analyzer.get("class"))); - assertEquals("5.0.0", String.valueOf(analyzer.get("luceneMatchVersion"))); + assertEquals("5.0.0", String.valueOf(analyzer.get(IndexSchema.LUCENE_MATCH_VERSION_PARAM))); } public void testAddFieldMatchingExistingDynamicField() throws Exception { @@ -898,6 +903,24 @@ public void testSortableTextFieldWithAnalyzer() throws Exception { } + @Test + public void testAddNewFieldAndQuery() throws Exception { + getSolrClient().add(Arrays.asList( + sdoc("id", "1", "term_s", "tux"))); + + getSolrClient().commit(true, true); + Map attrs = new HashMap<>(); + attrs.put("name", "newstringtestfield"); + attrs.put("type", "string"); + + new SchemaRequest.AddField(attrs).process(getSolrClient()); + + SolrQuery query = new SolrQuery("*:*"); + query.addFacetField("newstringtestfield"); + int size = getSolrClient().query(query).getResults().size(); + assertEquals(1, size); + } + public void testSimilarityParser() throws Exception { RestTestHarness harness = restTestHarness; diff --git a/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java b/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java new file mode 100644 index 000000000000..27ecf9ce5eed --- /dev/null +++ b/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java @@ -0,0 +1,286 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.search; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import com.github.benmanes.caffeine.cache.RemovalCause; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCase; +import org.apache.solr.metrics.SolrMetricManager; +import org.junit.Test; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; + +/** + * Test for {@link CaffeineCache}. + */ +public class TestCaffeineCache extends SolrTestCase { + + SolrMetricManager metricManager = new SolrMetricManager(); + String registry = TestUtil.randomSimpleString(random(), 2, 10); + String scope = TestUtil.randomSimpleString(random(), 2, 10); + + @Test + public void testSimple() throws IOException { + CaffeineCache lfuCache = new CaffeineCache<>(); + lfuCache.initializeMetrics(metricManager, registry, "foo", scope + "-1"); + + CaffeineCache newLFUCache = new CaffeineCache<>(); + newLFUCache.initializeMetrics(metricManager, registry, "foo2", scope + "-2"); + + Map params = new HashMap<>(); + params.put("size", "100"); + params.put("initialSize", "10"); + params.put("autowarmCount", "25"); + + NoOpRegenerator regenerator = new NoOpRegenerator(); + Object initObj = lfuCache.init(params, null, regenerator); + lfuCache.setState(SolrCache.State.LIVE); + for (int i = 0; i < 101; i++) { + lfuCache.put(i + 1, Integer.toString(i + 1)); + } + assertEquals("15", lfuCache.get(15)); + assertEquals("75", lfuCache.get(75)); + assertEquals(null, lfuCache.get(110)); + Map nl = lfuCache.getMetricsMap().getValue(); + assertEquals(3L, nl.get("lookups")); + assertEquals(2L, nl.get("hits")); + assertEquals(101L, nl.get("inserts")); + + assertEquals(null, lfuCache.get(1)); // first item put in should be the first out + + // Test autowarming + newLFUCache.init(params, initObj, regenerator); + newLFUCache.warm(null, lfuCache); + newLFUCache.setState(SolrCache.State.LIVE); + + newLFUCache.put(103, "103"); + assertEquals("15", newLFUCache.get(15)); + assertEquals("75", newLFUCache.get(75)); + assertEquals(null, newLFUCache.get(50)); + nl = newLFUCache.getMetricsMap().getValue(); + assertEquals(3L, nl.get("lookups")); + assertEquals(2L, nl.get("hits")); + assertEquals(1L, nl.get("inserts")); + assertEquals(0L, nl.get("evictions")); + + assertEquals(7L, nl.get("cumulative_lookups")); + assertEquals(4L, nl.get("cumulative_hits")); + assertEquals(102L, nl.get("cumulative_inserts")); + } + + @Test + public void testTimeDecay() { + Cache cacheDecay = Caffeine.newBuilder() + .executor(Runnable::run) + .maximumSize(20) + .build(); + for (int i = 1; i < 21; i++) { + cacheDecay.put(i, Integer.toString(i)); + } + Map itemsDecay; + + // Now increase the freq count for 5 items + for (int i = 0; i < 5; ++i) { + for (int j = 0; j < 10; ++j) { + cacheDecay.getIfPresent(i + 13); + } + } + // OK, 13 - 17 should have larger counts and should stick past next few collections + cacheDecay.put(22, "22"); + cacheDecay.put(23, "23"); + cacheDecay.put(24, "24"); + cacheDecay.put(25, "25"); + itemsDecay = cacheDecay.policy().eviction().get().hottest(10); + // 13 - 17 should be in cache, but 11 and 18 (among others) should not. Testing that elements before and + // after the ones with increased counts are removed, and all the increased count ones are still in the cache + assertNull(itemsDecay.get(11)); + assertNull(itemsDecay.get(18)); + assertNotNull(itemsDecay.get(13)); + assertNotNull(itemsDecay.get(14)); + assertNotNull(itemsDecay.get(15)); + assertNotNull(itemsDecay.get(16)); + assertNotNull(itemsDecay.get(17)); + + + // Testing that all the elements in front of the ones with increased counts are gone + for (int idx = 26; idx < 32; ++idx) { + cacheDecay.put(idx, Integer.toString(idx)); + } + //Surplus count should be at 0 + itemsDecay = cacheDecay.policy().eviction().get().hottest(10); + assertNull(itemsDecay.get(20)); + assertNull(itemsDecay.get(24)); + assertNotNull(itemsDecay.get(13)); + assertNotNull(itemsDecay.get(14)); + assertNotNull(itemsDecay.get(15)); + assertNotNull(itemsDecay.get(16)); + assertNotNull(itemsDecay.get(17)); + } + + @Test + public void testMaxIdleTime() throws Exception { + int IDLE_TIME_SEC = 5; + CountDownLatch removed = new CountDownLatch(1); + AtomicReference removalCause = new AtomicReference<>(); + CaffeineCache cache = new CaffeineCache() { + @Override + public void onRemoval(String key, String value, RemovalCause cause) { + super.onRemoval(key, value, cause); + removalCause.set(cause); + removed.countDown(); + } + }; + Map params = new HashMap<>(); + params.put("size", "6"); + params.put("maxIdleTime", "" + IDLE_TIME_SEC); + cache.init(params, null, new NoOpRegenerator()); + + cache.put("foo", "bar"); + assertEquals("bar", cache.get("foo")); + // sleep for at least the idle time before inserting other entries + // the eviction is piggy-backed on put() + Thread.sleep(TimeUnit.SECONDS.toMillis(IDLE_TIME_SEC * 2)); + cache.put("abc", "xyz"); + boolean await = removed.await(30, TimeUnit.SECONDS); + assertTrue("did not expire entry in in time", await); + assertEquals(RemovalCause.EXPIRED, removalCause.get()); + assertNull(cache.get("foo")); + } + + @Test + public void testSetLimits() throws Exception { + AtomicReference removed = new AtomicReference<>(new CountDownLatch(2)); + List removalCauses = new ArrayList<>(); + List removedKeys = new ArrayList<>(); + Set allKeys = new HashSet<>(); + CaffeineCache cache = new CaffeineCache() { + @Override + public Accountable put(String key, Accountable val) { + allKeys.add(key); + return super.put(key, val); + } + + @Override + public void onRemoval(String key, Accountable value, RemovalCause cause) { + super.onRemoval(key, value, cause); + removalCauses.add(cause); + removedKeys.add(key); + removed.get().countDown(); + } + }; + Map params = new HashMap<>(); + params.put("size", "5"); + cache.init(params, null, new NoOpRegenerator()); + + for (int i = 0; i < 5; i++) { + cache.put("foo-" + i, new Accountable() { + @Override + public long ramBytesUsed() { + return 1024 * 1024; + } + }); + } + assertEquals(5, cache.size()); + // no evictions yet + assertEquals(2, removed.get().getCount()); + + cache.put("abc1", new Accountable() { + @Override + public long ramBytesUsed() { + return 1; + } + }); + cache.put("abc2", new Accountable() { + @Override + public long ramBytesUsed() { + return 2; + } + }); + boolean await = removed.get().await(30, TimeUnit.SECONDS); + assertTrue("did not evict entries in in time", await); + assertEquals(5, cache.size()); + assertEquals(2, cache.get("abc2").ramBytesUsed()); + for (String key : removedKeys) { + assertNull("key " + key + " still present!", cache.get(key)); + allKeys.remove(key); + } + for (RemovalCause cause : removalCauses) { + assertEquals(RemovalCause.SIZE, cause); + } + + removed.set(new CountDownLatch(2)); + removalCauses.clear(); + removedKeys.clear(); + // trim down by item count + cache.setMaxSize(3); + cache.put("abc3", new Accountable() { + @Override + public long ramBytesUsed() { + return 3; + } + }); + await = removed.get().await(30, TimeUnit.SECONDS); + assertTrue("did not evict entries in in time", await); + assertEquals(3, cache.size()); + for (String key : removedKeys) { + assertNull("key " + key + " still present!", cache.get(key)); + allKeys.remove(key); + } + for (RemovalCause cause : removalCauses) { + assertEquals(RemovalCause.SIZE, cause); + } + + // at least one item has to go + removed.set(new CountDownLatch(1)); + removalCauses.clear(); + removedKeys.clear(); + // trim down by ram size + cache.setMaxRamMB(1); + await = removed.get().await(30, TimeUnit.SECONDS); + assertTrue("did not evict entries in in time", await); + for (String key : removedKeys) { + assertNull("key " + key + " still present!", cache.get(key)); + allKeys.remove(key); + } + for (RemovalCause cause : removalCauses) { + assertEquals(RemovalCause.SIZE, cause); + } + // check total size of remaining items + long total = 0; + for (String key : allKeys) { + Accountable a = cache.get(key); + assertNotNull("missing value for key " + key, a); + total += a.ramBytesUsed(); + } + assertTrue("total ram bytes should be greater than 0", total > 0); + assertTrue("total ram bytes exceeded limit", total < 1024 * 1024); + cache.close(); + } +} diff --git a/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java b/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java index 0fb176516187..e3ee32867fd3 100644 --- a/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java +++ b/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search; +import org.apache.commons.math3.stat.descriptive.SummaryStatistics; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.WildcardQuery; @@ -23,15 +24,18 @@ import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCase; +import org.apache.solr.common.util.TimeSource; import org.apache.solr.metrics.MetricsMap; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.util.ConcurrentLRUCache; import org.apache.solr.util.RTimer; -import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Random; +import java.util.TreeMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -47,16 +51,16 @@ public class TestFastLRUCache extends SolrTestCase { String registry = TestUtil.randomSimpleString(random(), 2, 10); String scope = TestUtil.randomSimpleString(random(), 2, 10); - public void testPercentageAutowarm() throws IOException { + public void testPercentageAutowarm() throws Exception { FastLRUCache fastCache = new FastLRUCache<>(); - fastCache.initializeMetrics(metricManager, registry, "foo", scope); - MetricsMap metrics = fastCache.getMetricsMap(); Map params = new HashMap<>(); params.put("size", "100"); params.put("initialSize", "10"); params.put("autowarmCount", "100%"); CacheRegenerator cr = new NoOpRegenerator(); Object o = fastCache.init(params, null, cr); + fastCache.initializeMetrics(metricManager, registry, "foo", scope); + MetricsMap metrics = fastCache.getMetricsMap(); fastCache.setState(SolrCache.State.LIVE); for (int i = 0; i < 101; i++) { fastCache.put(i + 1, "" + (i + 1)); @@ -69,9 +73,9 @@ public void testPercentageAutowarm() throws IOException { assertEquals(101L, nl.get("inserts")); assertEquals(null, fastCache.get(1)); // first item put in should be the first out FastLRUCache fastCacheNew = new FastLRUCache<>(); + fastCacheNew.init(params, o, cr); fastCacheNew.initializeMetrics(metricManager, registry, "foo", scope); metrics = fastCacheNew.getMetricsMap(); - fastCacheNew.init(params, o, cr); fastCacheNew.warm(null, fastCache); fastCacheNew.setState(SolrCache.State.LIVE); fastCache.close(); @@ -89,7 +93,7 @@ public void testPercentageAutowarm() throws IOException { fastCacheNew.close(); } - public void testPercentageAutowarmMultiple() throws IOException { + public void testPercentageAutowarmMultiple() throws Exception { doTestPercentageAutowarm(100, 50, new int[]{51, 55, 60, 70, 80, 99, 100}, new int[]{1, 2, 3, 5, 10, 20, 30, 40, 50}); doTestPercentageAutowarm(100, 25, new int[]{76, 80, 99, 100}, new int[]{1, 2, 3, 5, 10, 20, 30, 40, 50, 51, 55, 60, 70}); doTestPercentageAutowarm(1000, 10, new int[]{901, 930, 950, 999, 1000}, new int[]{1, 5, 100, 200, 300, 400, 800, 899, 900}); @@ -97,23 +101,23 @@ public void testPercentageAutowarmMultiple() throws IOException { doTestPercentageAutowarm(100, 0, new int[]{}, new int[]{1, 10, 25, 51, 55, 60, 70, 80, 99, 100, 200, 300}); } - private void doTestPercentageAutowarm(int limit, int percentage, int[] hits, int[]misses) { + private void doTestPercentageAutowarm(int limit, int percentage, int[] hits, int[]misses) throws Exception { FastLRUCache fastCache = new FastLRUCache<>(); - fastCache.initializeMetrics(metricManager, registry, "foo", scope); Map params = new HashMap<>(); params.put("size", String.valueOf(limit)); params.put("initialSize", "10"); params.put("autowarmCount", percentage + "%"); CacheRegenerator cr = new NoOpRegenerator(); Object o = fastCache.init(params, null, cr); + fastCache.initializeMetrics(metricManager, registry, "foo", scope); fastCache.setState(SolrCache.State.LIVE); for (int i = 1; i <= limit; i++) { fastCache.put(i, "" + i);//adds numbers from 1 to 100 } FastLRUCache fastCacheNew = new FastLRUCache<>(); - fastCacheNew.initializeMetrics(metricManager, registry, "foo", scope); fastCacheNew.init(params, o, cr); + fastCacheNew.initializeMetrics(metricManager, registry, "foo", scope); fastCacheNew.warm(null, fastCache); fastCacheNew.setState(SolrCache.State.LIVE); fastCache.close(); @@ -131,14 +135,14 @@ private void doTestPercentageAutowarm(int limit, int percentage, int[] hits, int fastCacheNew.close(); } - public void testNoAutowarm() throws IOException { + public void testNoAutowarm() throws Exception { FastLRUCache fastCache = new FastLRUCache<>(); - fastCache.initializeMetrics(metricManager, registry, "foo", scope); Map params = new HashMap<>(); params.put("size", "100"); params.put("initialSize", "10"); CacheRegenerator cr = new NoOpRegenerator(); Object o = fastCache.init(params, null, cr); + fastCache.initializeMetrics(metricManager, registry, "foo", scope); fastCache.setState(SolrCache.State.LIVE); for (int i = 0; i < 101; i++) { fastCache.put(i + 1, "" + (i + 1)); @@ -161,7 +165,7 @@ public void testNoAutowarm() throws IOException { fastCacheNew.close(); } - public void testFullAutowarm() throws IOException { + public void testFullAutowarm() throws Exception { FastLRUCache cache = new FastLRUCache<>(); Map params = new HashMap<>(); params.put("size", "100"); @@ -191,15 +195,15 @@ public void testFullAutowarm() throws IOException { cacheNew.close(); } - public void testSimple() throws IOException { + public void testSimple() throws Exception { FastLRUCache sc = new FastLRUCache(); - sc.initializeMetrics(metricManager, registry, "foo", scope); Map l = new HashMap(); l.put("size", "100"); l.put("initialSize", "10"); l.put("autowarmCount", "25"); CacheRegenerator cr = new NoOpRegenerator(); Object o = sc.init(l, null, cr); + sc.initializeMetrics(metricManager, registry, "foo", scope); sc.setState(SolrCache.State.LIVE); for (int i = 0; i < 101; i++) { sc.put(i + 1, "" + (i + 1)); @@ -216,8 +220,8 @@ public void testSimple() throws IOException { FastLRUCache scNew = new FastLRUCache(); - scNew.initializeMetrics(metricManager, registry, "foo", scope); scNew.init(l, o, cr); + scNew.initializeMetrics(metricManager, registry, "foo", scope); scNew.warm(null, sc); scNew.setState(SolrCache.State.LIVE); sc.close(); @@ -280,7 +284,7 @@ void doPerfTest(int iter, int cacheSize, int maxKey) { int upperWaterMark = (int)(lowerWaterMark * 1.1); Random r = random(); - ConcurrentLRUCache cache = new ConcurrentLRUCache(upperWaterMark, lowerWaterMark, (upperWaterMark+lowerWaterMark)/2, upperWaterMark, false, false, null); + ConcurrentLRUCache cache = new ConcurrentLRUCache(upperWaterMark, lowerWaterMark, (upperWaterMark+lowerWaterMark)/2, upperWaterMark, false, false, null, -1); boolean getSize=false; int minSize=0,maxSize=0; for (int i=0; i cache = new FastLRUCache<>(); - cache.initializeMetrics(metricManager, registry, "foo", scope); Map params = new HashMap<>(); params.put("size", "6"); params.put("maxRamMB", "8"); CacheRegenerator cr = new NoOpRegenerator(); Object o = cache.init(params, null, cr); + cache.initializeMetrics(metricManager, registry, "foo", scope); for (int i = 0; i < 6; i++) { cache.put("" + i, new Accountable() { @Override @@ -397,6 +401,41 @@ public long ramBytesUsed() { assertEquals(10, cache.size()); } + public void testMaxIdleTime() throws Exception { + int IDLE_TIME_SEC = 600; + long IDLE_TIME_NS = TimeUnit.NANOSECONDS.convert(IDLE_TIME_SEC, TimeUnit.SECONDS); + CountDownLatch sweepFinished = new CountDownLatch(1); + ConcurrentLRUCache cache = new ConcurrentLRUCache(6, 5, 5, 6, false, false, null, IDLE_TIME_SEC) { + @Override + public void markAndSweep() { + super.markAndSweep(); + sweepFinished.countDown(); + } + }; + long currentTime = TimeSource.NANO_TIME.getEpochTimeNs(); + for (int i = 0; i < 4; i++) { + cache.putCacheEntry(new ConcurrentLRUCache.CacheEntry<>("" + i, new Accountable() { + @Override + public long ramBytesUsed() { + return 1024 * 1024; + } + }, currentTime, 0)); + } + // no evictions yet + assertEquals(4, cache.size()); + assertEquals("markAndSweep spurious run", 1, sweepFinished.getCount()); + cache.putCacheEntry(new ConcurrentLRUCache.CacheEntry<>("4", new Accountable() { + @Override + public long ramBytesUsed() { + return 0; + } + }, currentTime - IDLE_TIME_NS * 2, 0)); + boolean await = sweepFinished.await(10, TimeUnit.SECONDS); + assertTrue("did not evict entries in time", await); + assertEquals(4, cache.size()); + assertNull(cache.get("4")); + } + /*** public void testPerf() { doPerfTest(1000000, 100000, 200000); // big cache, warmup @@ -433,7 +472,7 @@ void fillCache(SolrCache sc, int cacheSize, int maxKey) { } - void cachePerfTest(final SolrCache sc, final int nThreads, final int numGets, int cacheSize, final int maxKey) { + double[] cachePerfTest(final SolrCache sc, final int nThreads, final int numGets, int cacheSize, final int maxKey) { Map l = new HashMap(); l.put("size", ""+cacheSize); l.put("initialSize", ""+cacheSize); @@ -474,37 +513,73 @@ public void run() { } } - System.out.println("time=" + timer.getTime() + " impl=" +sc.getClass().getSimpleName() - +" nThreads= " + nThreads + " size="+cacheSize+" maxKey="+maxKey+" gets="+numGets - +" hitRatio="+(1-(((double)puts.get())/numGets))); + double time = timer.getTime(); + double hitRatio = (1-(((double)puts.get())/numGets)); +// System.out.println("time=" + time + " impl=" +sc.getClass().getSimpleName() +// +" nThreads= " + nThreads + " size="+cacheSize+" maxKey="+maxKey+" gets="+numGets +// +" hitRatio="+(1-(((double)puts.get())/numGets))); + return new double[]{time, hitRatio}; } - void perfTestBoth(int nThreads, int numGets, int cacheSize, int maxKey) { - cachePerfTest(new LRUCache(), nThreads, numGets, cacheSize, maxKey); - cachePerfTest(new FastLRUCache(), nThreads, numGets, cacheSize, maxKey); + private int NUM_RUNS = 5; + void perfTestBoth(int maxThreads, int numGets, int cacheSize, int maxKey, + Map> timeStats, + Map> hitStats) { + for (int nThreads = 1 ; nThreads <= maxThreads; nThreads++) { + String testKey = "threads=" + nThreads + ",gets=" + numGets + ",size=" + cacheSize + ",maxKey=" + maxKey; + System.err.println(testKey); + for (int i = 0; i < NUM_RUNS; i++) { + double[] data = cachePerfTest(new LRUCache(), nThreads, numGets, cacheSize, maxKey); + timeStats.computeIfAbsent(testKey, k -> new TreeMap<>()) + .computeIfAbsent("LRUCache", k -> new SummaryStatistics()) + .addValue(data[0]); + hitStats.computeIfAbsent(testKey, k -> new TreeMap<>()) + .computeIfAbsent("LRUCache", k -> new SummaryStatistics()) + .addValue(data[1]); + data = cachePerfTest(new CaffeineCache(), nThreads, numGets, cacheSize, maxKey); + timeStats.computeIfAbsent(testKey, k -> new TreeMap<>()) + .computeIfAbsent("CaffeineCache", k -> new SummaryStatistics()) + .addValue(data[0]); + hitStats.computeIfAbsent(testKey, k -> new TreeMap<>()) + .computeIfAbsent("CaffeineCache", k -> new SummaryStatistics()) + .addValue(data[1]); + data = cachePerfTest(new FastLRUCache(), nThreads, numGets, cacheSize, maxKey); + timeStats.computeIfAbsent(testKey, k -> new TreeMap<>()) + .computeIfAbsent("FastLRUCache", k -> new SummaryStatistics()) + .addValue(data[0]); + hitStats.computeIfAbsent(testKey, k -> new TreeMap<>()) + .computeIfAbsent("FastLRUCache", k -> new SummaryStatistics()) + .addValue(data[1]); + } + } } + int NUM_THREADS = 4; /*** public void testCachePerf() { + Map> timeStats = new TreeMap<>(); + Map> hitStats = new TreeMap<>(); // warmup - perfTestBoth(2, 100000, 100000, 120000); - perfTestBoth(1, 2000000, 100000, 100000); // big cache, 100% hit ratio - perfTestBoth(2, 2000000, 100000, 100000); // big cache, 100% hit ratio - perfTestBoth(1, 2000000, 100000, 120000); // big cache, bigger hit ratio - perfTestBoth(2, 2000000, 100000, 120000); // big cache, bigger hit ratio - perfTestBoth(1, 2000000, 100000, 200000); // big cache, ~50% hit ratio - perfTestBoth(2, 2000000, 100000, 200000); // big cache, ~50% hit ratio - perfTestBoth(1, 2000000, 100000, 1000000); // big cache, ~10% hit ratio - perfTestBoth(2, 2000000, 100000, 1000000); // big cache, ~10% hit ratio - - perfTestBoth(1, 2000000, 1000, 1000); // small cache, ~100% hit ratio - perfTestBoth(2, 2000000, 1000, 1000); // small cache, ~100% hit ratio - perfTestBoth(1, 2000000, 1000, 1200); // small cache, bigger hit ratio - perfTestBoth(2, 2000000, 1000, 1200); // small cache, bigger hit ratio - perfTestBoth(1, 2000000, 1000, 2000); // small cache, ~50% hit ratio - perfTestBoth(2, 2000000, 1000, 2000); // small cache, ~50% hit ratio - perfTestBoth(1, 2000000, 1000, 10000); // small cache, ~10% hit ratio - perfTestBoth(2, 2000000, 1000, 10000); // small cache, ~10% hit ratio + perfTestBoth(NUM_THREADS, 100000, 100000, 120000, new HashMap<>(), new HashMap()); + + perfTestBoth(NUM_THREADS, 2000000, 100000, 100000, timeStats, hitStats); // big cache, 100% hit ratio + perfTestBoth(NUM_THREADS, 2000000, 100000, 120000, timeStats, hitStats); // big cache, bigger hit ratio + perfTestBoth(NUM_THREADS, 2000000, 100000, 200000, timeStats, hitStats); // big cache, ~50% hit ratio + perfTestBoth(NUM_THREADS, 2000000, 100000, 1000000, timeStats, hitStats); // big cache, ~10% hit ratio + + perfTestBoth(NUM_THREADS, 2000000, 1000, 1000, timeStats, hitStats); // small cache, ~100% hit ratio + perfTestBoth(NUM_THREADS, 2000000, 1000, 1200, timeStats, hitStats); // small cache, bigger hit ratio + perfTestBoth(NUM_THREADS, 2000000, 1000, 2000, timeStats, hitStats); // small cache, ~50% hit ratio + perfTestBoth(NUM_THREADS, 2000000, 1000, 10000, timeStats, hitStats); // small cache, ~10% hit ratio + + System.out.println("\n=====================\n"); + timeStats.forEach((testKey, map) -> { + Map hits = hitStats.get(testKey); + System.out.println("* " + testKey); + map.forEach((type, summary) -> { + System.out.println("\t" + String.format("%14s", type) + "\ttime " + summary.getMean() + "\thitRatio " + hits.get(type).getMean()); + }); + }); } ***/ diff --git a/solr/core/src/test/org/apache/solr/search/TestLFUCache.java b/solr/core/src/test/org/apache/solr/search/TestLFUCache.java index beac366cd4c2..9802a63edabb 100644 --- a/solr/core/src/test/org/apache/solr/search/TestLFUCache.java +++ b/solr/core/src/test/org/apache/solr/search/TestLFUCache.java @@ -22,8 +22,10 @@ import java.util.Locale; import java.util.Map; import java.util.Random; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import org.apache.lucene.index.Term; @@ -33,6 +35,7 @@ import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.ExecutorUtil; +import org.apache.solr.common.util.TimeSource; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.util.ConcurrentLFUCache; import org.apache.solr.util.DefaultSolrThreadFactory; @@ -60,7 +63,7 @@ public static void beforeClass() throws Exception { @Test public void testTimeDecayParams() throws IOException { h.getCore().withSearcher(searcher -> { - LFUCache cacheDecayTrue = (LFUCache) ((SolrCacheHolder) searcher.getCache("lfuCacheDecayTrue")).get(); + LFUCache cacheDecayTrue = (LFUCache) searcher.getCache("lfuCacheDecayTrue"); assertNotNull(cacheDecayTrue); Map stats = cacheDecayTrue.getMetricsMap().getValue(); assertTrue((Boolean) stats.get("timeDecay")); @@ -71,7 +74,7 @@ public void testTimeDecayParams() throws IOException { addCache(cacheDecayTrue, 11, 12, 13, 14, 15); assertCache(cacheDecayTrue, 1, 2, 3, 4, 5, 12, 13, 14, 15); - LFUCache cacheDecayDefault = (LFUCache) ((SolrCacheHolder) searcher.getCache("lfuCacheDecayDefault")).get(); + LFUCache cacheDecayDefault = (LFUCache) searcher.getCache("lfuCacheDecayDefault"); assertNotNull(cacheDecayDefault); stats = cacheDecayDefault.getMetricsMap().getValue(); assertTrue((Boolean) stats.get("timeDecay")); @@ -85,7 +88,7 @@ public void testTimeDecayParams() throws IOException { addCache(cacheDecayDefault, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21); assertCache(cacheDecayDefault, 1, 2, 3, 4, 5, 17, 18, 19, 20, 21); - LFUCache cacheDecayFalse = (LFUCache) ((SolrCacheHolder) searcher.getCache("lfuCacheDecayFalse")).get(); + LFUCache cacheDecayFalse = (LFUCache) searcher.getCache("lfuCacheDecayFalse"); assertNotNull(cacheDecayFalse); stats = cacheDecayFalse.getMetricsMap().getValue(); assertFalse((Boolean) stats.get("timeDecay")); @@ -131,7 +134,7 @@ private void assertNotCache(LFUCache cache, int... gets) { @Test - public void testSimple() throws IOException { + public void testSimple() throws Exception { SolrMetricManager metricManager = new SolrMetricManager(); Random r = random(); String registry = TestUtil.randomSimpleString(r, 2, 10); @@ -494,6 +497,35 @@ public void testSetLimits() throws Exception { assertEquals(10, cache.size()); } + @Test + public void testMaxIdleTimeEviction() throws Exception { + int IDLE_TIME_SEC = 5; + long IDLE_TIME_NS = TimeUnit.NANOSECONDS.convert(IDLE_TIME_SEC, TimeUnit.SECONDS); + CountDownLatch sweepFinished = new CountDownLatch(1); + final AtomicLong numSweepsStarted = new AtomicLong(0); + ConcurrentLFUCache cache = new ConcurrentLFUCache(6, 5, 5, 6, false, false, null, false, IDLE_TIME_SEC) { + @Override + public void markAndSweep() { + numSweepsStarted.incrementAndGet(); + super.markAndSweep(); + sweepFinished.countDown(); + } + }; + for (int i = 0; i < 4; i++) { + cache.put("" + i, "foo " + i); + } + // no evictions yet + assertEquals(4, cache.size()); + assertEquals("markAndSweep spurious run", 0, numSweepsStarted.get()); + long currentTime = TimeSource.NANO_TIME.getEpochTimeNs(); + cache.putCacheEntry(new ConcurrentLFUCache.CacheEntry<>("4", "foo5", + currentTime - IDLE_TIME_NS * 2)); + boolean await = sweepFinished.await(10, TimeUnit.SECONDS); + assertTrue("did not evict entries in time", await); + assertEquals(4, cache.size()); + assertNull(cache.get("4")); + } + // From the original LRU cache tests, they're commented out there too because they take a while. // void doPerfTest(int iter, int cacheSize, int maxKey) { // long start = System.currentTimeMillis(); diff --git a/solr/core/src/test/org/apache/solr/search/TestLRUCache.java b/solr/core/src/test/org/apache/solr/search/TestLRUCache.java index f9cd0dba842c..297dfa25cd85 100644 --- a/solr/core/src/test/org/apache/solr/search/TestLRUCache.java +++ b/solr/core/src/test/org/apache/solr/search/TestLRUCache.java @@ -16,15 +16,17 @@ */ package org.apache.solr.search; -import java.io.IOException; import java.util.HashMap; import java.util.Map; +import java.util.concurrent.TimeUnit; import org.apache.lucene.util.Accountable; import org.apache.solr.SolrTestCase; import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.TestUtil; +import org.apache.solr.common.util.TimeSource; import org.apache.solr.metrics.SolrMetricManager; +import org.junit.Test; /** * Test for org.apache.solr.search.LRUCache @@ -35,7 +37,7 @@ public class TestLRUCache extends SolrTestCase { String registry = TestUtil.randomSimpleString(random(), 2, 10); String scope = TestUtil.randomSimpleString(random(), 2, 10); - public void testFullAutowarm() throws IOException { + public void testFullAutowarm() throws Exception { LRUCache lruCache = new LRUCache<>(); Map params = new HashMap<>(); params.put("size", "100"); @@ -61,14 +63,14 @@ public void testFullAutowarm() throws IOException { lruCacheNew.close(); } - public void testPercentageAutowarm() throws IOException { + public void testPercentageAutowarm() throws Exception { doTestPercentageAutowarm(100, 50, new int[]{51, 55, 60, 70, 80, 99, 100}, new int[]{1, 2, 3, 5, 10, 20, 30, 40, 50}); doTestPercentageAutowarm(100, 25, new int[]{76, 80, 99, 100}, new int[]{1, 2, 3, 5, 10, 20, 30, 40, 50, 51, 55, 60, 70}); doTestPercentageAutowarm(1000, 10, new int[]{901, 930, 950, 999, 1000}, new int[]{1, 5, 100, 200, 300, 400, 800, 899, 900}); doTestPercentageAutowarm(10, 10, new int[]{10}, new int[]{1, 5, 9, 100, 200, 300, 400, 800, 899, 900}); } - private void doTestPercentageAutowarm(int limit, int percentage, int[] hits, int[]misses) { + private void doTestPercentageAutowarm(int limit, int percentage, int[] hits, int[]misses) throws Exception { LRUCache lruCache = new LRUCache<>(); Map params = new HashMap<>(); params.put("size", String.valueOf(limit)); @@ -98,7 +100,7 @@ private void doTestPercentageAutowarm(int limit, int percentage, int[] hits, int } @SuppressWarnings("unchecked") - public void testNoAutowarm() throws IOException { + public void testNoAutowarm() throws Exception { LRUCache lruCache = new LRUCache<>(); lruCache.initializeMetrics(metricManager, registry, "foo", scope); Map params = new HashMap<>(); @@ -145,7 +147,7 @@ public long ramBytesUsed() { } }); assertEquals(1, accountableLRUCache.size()); - assertEquals(baseSize + 512 * 1024 + RamUsageEstimator.sizeOfObject("1") + RamUsageEstimator.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY, accountableLRUCache.ramBytesUsed()); + assertEquals(baseSize + 512 * 1024 + RamUsageEstimator.sizeOfObject("1") + RamUsageEstimator.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY + LRUCache.CacheValue.BASE_RAM_BYTES_USED, accountableLRUCache.ramBytesUsed()); accountableLRUCache.put("20", new Accountable() { @Override public long ramBytesUsed() { @@ -153,7 +155,7 @@ public long ramBytesUsed() { } }); assertEquals(1, accountableLRUCache.size()); - assertEquals(baseSize + 512 * 1024 + RamUsageEstimator.sizeOfObject("20") + RamUsageEstimator.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY, accountableLRUCache.ramBytesUsed()); + assertEquals(baseSize + 512 * 1024 + RamUsageEstimator.sizeOfObject("20") + RamUsageEstimator.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY + LRUCache.CacheValue.BASE_RAM_BYTES_USED, accountableLRUCache.ramBytesUsed()); Map nl = accountableLRUCache.getMetricsMap().getValue(); assertEquals(1L, nl.get("evictions")); assertEquals(1L, nl.get("evictionsRamUsage")); @@ -167,7 +169,8 @@ public long ramBytesUsed() { assertEquals(1L, nl.get("evictions")); assertEquals(1L, nl.get("evictionsRamUsage")); assertEquals(2L, accountableLRUCache.size()); - assertEquals(baseSize + 513 * 1024 + RamUsageEstimator.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY * 2 + + assertEquals(baseSize + 513 * 1024 + + (RamUsageEstimator.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY + LRUCache.CacheValue.BASE_RAM_BYTES_USED) * 2 + RamUsageEstimator.sizeOfObject("20") + RamUsageEstimator.sizeOfObject("300"), accountableLRUCache.ramBytesUsed()); accountableLRUCache.clear(); @@ -255,4 +258,39 @@ public long ramBytesUsed() { } assertEquals(10, cache.size()); } + + @Test + public void testMaxIdleTime() throws Exception { + int IDLE_TIME_SEC = 600; + long IDLE_TIME_NS = TimeUnit.NANOSECONDS.convert(IDLE_TIME_SEC, TimeUnit.SECONDS); + LRUCache cache = new LRUCache<>(); + cache.initializeMetrics(metricManager, registry, "foo", scope); + Map params = new HashMap<>(); + params.put("size", "6"); + params.put("maxIdleTime", "" + IDLE_TIME_SEC); + CacheRegenerator cr = new NoOpRegenerator(); + Object o = cache.init(params, null, cr); + cache.setSyntheticEntries(true); + for (int i = 0; i < 4; i++) { + cache.put("" + i, new Accountable() { + @Override + public long ramBytesUsed() { + return 1024 * 1024; + } + }); + } + // no evictions yet + assertEquals(4, cache.size()); + long currentTime = TimeSource.NANO_TIME.getEpochTimeNs(); + cache.putCacheValue("4", new LRUCache.CacheValue<>(new Accountable() { + @Override + public long ramBytesUsed() { + return 0; + } + }, currentTime - IDLE_TIME_NS * 2)); + assertEquals(4, cache.size()); + assertNull(cache.get("4")); + Map stats = cache.getMetricsMap().getValue(); + assertEquals(1, ((Number)stats.get("evictionsIdleTime")).intValue()); + } } diff --git a/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java b/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java index b11ff3ec9bba..77663dfeeade 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java +++ b/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java @@ -48,7 +48,7 @@ * Builds a random index of a few simple fields, maintaining an in-memory model of the expected * doc counts so that we can verify the results of range facets w/ nested field facets that need refinement. * - * The focus here is on stressing the casees where the document values fall directonly on the + * The focus here is on stressing the cases where the document values fall direct only on the * range boundaries, and how the various "include" options affects refinement. */ public class RangeFacetCloudTest extends SolrCloudTestCase { @@ -63,8 +63,7 @@ public class RangeFacetCloudTest extends SolrCloudTestCase { private static final int NUM_RANGE_VALUES = 6; private static final int TERM_VALUES_RANDOMIZER = 100; - // TODO: add 'count asc' once SOLR-12343 is fixed - private static final List SORTS = Arrays.asList("count desc", "index asc", "index desc"); + private static final List SORTS = Arrays.asList("count desc", "count asc", "index asc", "index desc"); private static final List> OTHERS = buildListOfFacetRangeOtherOptions(); private static final List BEFORE_AFTER_BETWEEN @@ -136,20 +135,20 @@ public void testInclude_Lower() throws Exception { ("q", "*:*", "rows", "0", "json.facet", // exclude a single low value from our ranges "{ foo:{ type:range, field:"+INT_FIELD+" start:1, end:5, gap:1"+otherStr+include+subFacet+" } }"); - + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 4, buckets.size()); for (int i = 0; i < 4; i++) { int expectedVal = i+1; assertBucket("bucket#" + i, expectedVal, modelVals(expectedVal), subFacetLimit, buckets.get(i)); } - + assertBeforeAfterBetween(other, modelVals(0), modelVals(5), modelVals(1,4), subFacetLimit, foo); - + } catch (AssertionError|RuntimeException ae) { throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } @@ -157,7 +156,7 @@ public void testInclude_Lower() throws Exception { } } } - + public void testInclude_Lower_Gap2() throws Exception { for (boolean doSubFacet : Arrays.asList(false, true)) { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); @@ -538,10 +537,6 @@ public void testInclude_All_Gap2() throws Exception { } catch (AssertionError|RuntimeException ae) { throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } - - - - } } } @@ -582,6 +577,137 @@ public void testInclude_All_Gap2_hardend() throws Exception { } } + public void testRangeWithInterval() throws Exception { + for (boolean doSubFacet : Arrays.asList(false, true)) { + final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); + final CharSequence subFacet = makeSubFacet(subFacetLimit); + for (boolean incUpper : Arrays.asList(false, true)) { + String incUpperStr = ",inclusive_to:"+incUpper; + final SolrQuery solrQuery = new SolrQuery + ("q", "*:*", "rows", "0", "json.facet", + "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{from:1, to:2"+ incUpperStr+ "}," + + "{from:2, to:3"+ incUpperStr +"},{from:3, to:4"+ incUpperStr +"},{from:4, to:5"+ incUpperStr+"}]" + + subFacet + " } }"); + + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); + try { + final NamedList foo = ((NamedList>) rsp.getResponse().get("facets")).get("foo"); + final List> buckets = (List>) foo.get("buckets"); + + assertEquals("num buckets", 4, buckets.size()); + for (int i = 0; i < 4; i++) { + String expectedVal = "[" + (i + 1) + "," + (i + 2) + (incUpper? "]": ")"); + ModelRange modelVals = incUpper? modelVals(i+1, i+2) : modelVals(i+1); + assertBucket("bucket#" + i, expectedVal, modelVals, subFacetLimit, buckets.get(i)); + } + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } + } + } + } + + public void testRangeWithOldIntervalFormat() throws Exception { + for (boolean doSubFacet : Arrays.asList(false, true)) { + final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); + final CharSequence subFacet = makeSubFacet(subFacetLimit); + for (boolean incUpper : Arrays.asList(false, true)) { + String incUpperStr = incUpper? "]\"":")\""; + final SolrQuery solrQuery = new SolrQuery + ("q", "*:*", "rows", "0", "json.facet", + "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{range:\"[1,2"+ incUpperStr+ "}," + + "{range:\"[2,3"+ incUpperStr +"},{range:\"[3,4"+ incUpperStr +"},{range:\"[4,5"+ incUpperStr+"}]" + + subFacet + " } }"); + + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); + try { + final NamedList foo = ((NamedList>) rsp.getResponse().get("facets")).get("foo"); + final List> buckets = (List>) foo.get("buckets"); + + assertEquals("num buckets", 4, buckets.size()); + for (int i = 0; i < 4; i++) { + String expectedVal = "[" + (i + 1) + "," + (i + 2) + (incUpper? "]": ")"); + ModelRange modelVals = incUpper? modelVals(i+1, i+2) : modelVals(i+1); + assertBucket("bucket#" + i, expectedVal, modelVals, subFacetLimit, buckets.get(i)); + } + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } + } + } + } + + public void testIntervalWithMincount() throws Exception { + for (boolean doSubFacet : Arrays.asList(false, true)) { + final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); + final CharSequence subFacet = makeSubFacet(subFacetLimit); + + long mincount_to_use = -1; + Object expected_mincount_bucket_val = null; + + // without mincount + SolrQuery solrQuery = new SolrQuery( + "q", "*:*", "rows", "0", "json.facet", + "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{from:1, to:3},{from:3, to:5}]" + + subFacet + " } }" + ); + + QueryResponse rsp = cluster.getSolrClient().query(solrQuery); + try { + final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final List> buckets = (List>) foo.get("buckets"); + + assertEquals("num buckets", 2, buckets.size()); + + // upper is not included + assertBucket("bucket#0", "[1,3)", modelVals(1,2), subFacetLimit, buckets.get(0)); + assertBucket("bucket#1", "[3,5)", modelVals(3,4), subFacetLimit, buckets.get(1)); + + // if we've made it this far, then our buckets match the model + // now use our buckets to pick a mincount to use based on the MIN(+1) count seen + long count0 = ((Number)buckets.get(0).get("count")).longValue(); + long count1 = ((Number)buckets.get(1).get("count")).longValue(); + + mincount_to_use = 1 + Math.min(count0, count1); + if (count0 > count1) { + expected_mincount_bucket_val = buckets.get(0).get("val"); + } else if (count1 > count0) { + expected_mincount_bucket_val = buckets.get(1).get("val"); + } + + } catch (AssertionError|RuntimeException ae) { + throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } + + // with mincount + solrQuery = new SolrQuery( + "q", "*:*", "rows", "0", "json.facet", + "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{from:1, to:3},{from:3, to:5}]" + + ",mincount:" + mincount_to_use + subFacet + " } }" + ); + + rsp = cluster.getSolrClient().query(solrQuery); + try { + final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final List> buckets = (List>) foo.get("buckets"); + + if (null == expected_mincount_bucket_val) { + assertEquals("num buckets", 0, buckets.size()); + } else { + assertEquals("num buckets", 1, buckets.size()); + final Object actualBucket = buckets.get(0); + if (expected_mincount_bucket_val.equals("[1,3)")) { + assertBucket("bucket#0(0)", "[1,3)", modelVals(1,2), subFacetLimit, actualBucket); + } else { + assertBucket("bucket#0(1)", "[3,5)", modelVals(3,4), subFacetLimit, actualBucket); + } + } + } catch (AssertionError|RuntimeException ae) { + throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } + } + } + /** * Helper method for validating a single 'bucket' from a Range facet. * @@ -592,7 +718,7 @@ public void testInclude_All_Gap2_hardend() throws Exception { * @param actualBucket the actual bucket returned from a query for all assertions to be conducted against. */ private static void assertBucket(final String label, - final Integer expectedVal, + final Object expectedVal, final ModelRange expectedRangeValues, final Integer subFacetLimitUsed, final Object actualBucket) { @@ -614,7 +740,7 @@ private static void assertBucket(final String label, expectedCount += RANGE_MODEL[i]; toMerge.add(TERM_MODEL[i]); } - + assertEqualsHACK("count", expectedCount, bucket.get("count")); // merge the maps of our range values by summing the (int) values on key collisions @@ -650,7 +776,7 @@ private static void assertBucket(final String label, } /** - * A convinience method for calling {@link #assertBucket} on the before/after/between buckets + * A convenience method for calling {@link #assertBucket} on the before/after/between buckets * of a facet result, based on the {@link FacetRangeOther} specified for this facet. * * @see #assertBucket @@ -686,7 +812,7 @@ private static void assertBeforeAfterBetween(final EnumSet othe private static final class ModelRange { public final int lower; public final int upper; - /** Don't use, use the convinience methods */ + /** Don't use, use the convenience methods */ public ModelRange(int lower, int upper) { if (lower < 0 || upper < 0) { assert(lower < 0 && upper < lower); @@ -771,13 +897,13 @@ private static final String formatFacetRangeOther(EnumSet other String val = other.toString(); if (random().nextBoolean()) { // two valid syntaxes to randomize between: - // - a JSON list of items (conviniently the default toString of EnumSet), - // - a single quoted string containing the comma seperated list + // - a JSON list of items (conveniently the default toString of EnumSet), + // - a single quoted string containing the comma separated list val = val.replaceAll("\\[|\\]","'"); // HACK: work around SOLR-12539... // - // when sending a single string containing a comma seperated list of values, JSON Facets 'other' + // when sending a single string containing a comma separated list of values, JSON Facets 'other' // parsing can't handle any leading (or trailing?) whitespace val = val.replaceAll("\\s",""); } diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java index 40dabea228b7..461611cecb2e 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java @@ -211,7 +211,7 @@ public void testMerge() throws Exception { null, null ); - + // same test, but nested in a terms facet doTestRefine("{top:{type:terms, field:Afield, facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", "{top: {buckets:[{val:'A', count:2, x:{buckets:[{val:x1, count:5},{val:x2, count:3}], more:true} } ] } }", @@ -290,7 +290,39 @@ public void testMerge() throws Exception { // refinement... null, null); - + + // same test, but nested in range facet with ranges + doTestRefine("{top:{type:range, field:R, ranges:[{from:0, to:1}], facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", + "{top: {buckets:[{val:\"[0,1)\", count:2, x:{buckets:[{val:x1, count:5},{val:x2, count:3}],more:true} } ] } }", + "{top: {buckets:[{val:\"[0,1)\", count:1, x:{buckets:[{val:x2, count:4},{val:x3, count:2}],more:true} } ] } }", + null, + "=={top: {" + + "_s:[ [\"[0,1)\" , {x:{_l:[x1]}} ] ]" + + " } " + + "}" + ); + + doTestRefine("{top:{type:range, field:R, ranges:[{from:\"*\", to:1}], facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", + "{top: {buckets:[{val:\"[*,1)\", count:2, x:{buckets:[{val:x1, count:5},{val:x2, count:3}],more:true} } ] } }", + "{top: {buckets:[{val:\"[*,1)\", count:1, x:{buckets:[{val:x2, count:4},{val:x3, count:2}],more:true} } ] } }", + null, + "=={top: {" + + "_s:[ [\"[*,1)\" , {x:{_l:[x1]}} ] ]" + + " } " + + "}" + ); + + // a range facet w/o any sub facets shouldn't require any refinement + // other and include ignored for ranges + doTestRefine("{top:{type:range, other:all, field:R, ranges:[{from:0, to:2},{from:2, to:3}] } }" + + // phase #1 + "{top: {buckets:[{val:\"[0,2)\", count:2}, {val:\"[2,3)\", count:2}]," + + " } }", + "{top: {buckets:[{val:\"[0,2)\", count:2}, {val:\"[2,3)\", count:19}]," + + " } }", + // refinement... + null, + null); // for testing partial _p, we need a partial facet within a partial facet doTestRefine("{top:{type:terms, field:Afield, refine:true, limit:1, facet:{x : {type:terms, field:X, limit:1, refine:true} } } }", diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java index b70c8ddd7b21..b3586ee18180 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java @@ -295,7 +295,7 @@ public void testBehaviorEquivilenceOfUninvertibleFalse() throws Exception { ); } } - + /** * whitebox sanity checks that a shard request range facet that returns "between" or "after" * will cause the correct "actual_end" to be returned @@ -3207,6 +3207,256 @@ public void testDomainErrors() throws Exception { } + @Test + public void testRangeFacetWithRanges() throws Exception { + Client client = Client.localClient(); + client.deleteByQuery("*:*", null); + indexSimple(client); + + final SolrParams p = params("q", "*:*", "rows", "0"); + // with lower and upper include + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i, ranges:[{range:\" [-5,7] \"}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[-5,7]\",count:5}]}}"); + + // with lower include and upper exclude + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{range:\"[-5,7)\"}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[-5,7)\",count:4}]}}"); + + // with lower exclude and upper include + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7]\"}]}}"), + "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}"); + + // with lower and upper exclude + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7)\"}]}}"), + "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2}]}}"); + + // with other and include, they are not supported + // but wouldn't throw any error as they are not consumed + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7)\"}],include:\"lower\",other:[\"after\"]}}"), + "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2}]}}"); + + // with mincount>0 + client.testJQ( + params(p, "json.facet", "{price:{type : range,field : num_i,mincount:3," + + "ranges:[{range:\"(-5,7)\"},{range:\"(-5,7]\"}]}}" + ), + "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}"); + + // with multiple ranges + client.testJQ( + params(p, "json.facet", "{price:{type : range,field : num_i," + + "ranges:[{range:\"(-5,7)\"},{range:\"(-5,7]\"}]}}" + ), + "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2},{val:\"(-5,7]\",count:3}]}}"); + + // with * as one of the values + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{range:\"(*,10]\"}]}}"), + "facets=={count:6, price:{buckets:[{val:\"(*,10]\",count:5}]}}"); + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{range:\"[-5,*)\"}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[-5,*)\",count:5}]}}"); + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{range:\"[*,*]\"}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[*,*]\",count:5}]}}"); + } + + @Test + public void testRangeFacetWithRangesInNewFormat() throws Exception { + Client client = Client.localClient(); + client.deleteByQuery("*:*", null); + indexSimple(client); + SolrParams p = params("q", "*:*", "rows", "0"); + + //case without inclusive params + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[-5,7)\",count:4}]}}"); + + //case without key param and to included + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:true ,inclusive_to:true}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[-5,7]\",count:5}]}}"); + + //case with all params + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:true ,inclusive_to:true}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[-5,7]\",count:5}]}}"); + + // from and to excluded + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"), + "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2}]}}"); + + // from excluded and to included + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true}]}}"), + "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}"); + + // multiple ranges + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,include:[\"lower\"], outer:\"before\"," + + "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"), + "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3},{val:\"(-5,7)\",count:2}]}}"); + + // with mincount>0 + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,mincount:3" + + "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"), + "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}"); + + // mix of old and new formats + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i," + + "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{range:\"(-5,7)\"}]}}"), + "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3},{val:\"(-5,7)\",count:2}]}}"); + + // from==to + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:false ,inclusive_to:true}]}}"), + "facets=={count:6, price:{buckets:[{val:\"(-5,-5]\",count:0}]}}"); + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:false ,inclusive_to:false}]}}"), + "facets=={count:6, price:{buckets:[{val:\"(-5,-5)\",count:0}]}}"); + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:true ,inclusive_to:false}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[-5,-5)\",count:0}]}}"); + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:true ,inclusive_to:true}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[-5,-5]\",count:2}]}}"); + + // with * as one of the values + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:\"*\", to:10,inclusive_from:false ,inclusive_to:true}]}}"), + "facets=={count:6, price:{buckets:[{val:\"(*,10]\",count:5}]}}"); + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:-5, to:\"*\",inclusive_from:true ,inclusive_to:false}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[-5,*)\",count:5}]}}"); + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:-5,inclusive_from:true ,inclusive_to:false}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[-5,*)\",count:5}]}}"); + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{from:\"*\", to:\"*\",inclusive_from:true ,inclusive_to:false}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[*,*)\",count:5}]}}"); + client.testJQ(params(p, "json.facet" + , "{price:{type : range,field : num_i,ranges:[{inclusive_from:true ,inclusive_to:false}]}}"), + "facets=={count:6, price:{buckets:[{val:\"[*,*)\",count:5}]}}"); + } + + @Test + public void testRangeFacetsErrorCases() throws Exception { + Client client = Client.localClient(); + client.deleteByQuery("*:*", null); + indexSimple(client); + + SolrParams params = params("q", "*:*", "rows", "0"); + + // invalid format for ranges + SolrException ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i,start:-10,end:10,gap:2," + + "ranges:[{key:\"0-200\", to:200}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertEquals("Cannot set gap/start/end and ranges params together", ex.getMessage()); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:bleh}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertTrue(ex.getMessage().contains("Expected List for ranges but got String")); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[bleh]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertTrue(ex.getMessage().contains("Expected Map for range but got String")); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[{from:0, to:200, inclusive_to:bleh}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertTrue(ex.getMessage().contains("Expected boolean type for param 'inclusive_to' but got String")); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[{from:0, to:200, inclusive_from:bleh}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertTrue(ex.getMessage().contains("Expected boolean type for param 'inclusive_from' but got String")); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[{from:bleh, to:200}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertEquals("Can't parse value bleh for field: num_i", ex.getMessage()); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[{from:0, to:bleh}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertEquals("Can't parse value bleh for field: num_i", ex.getMessage()); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[{from:200, to:0}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertEquals("'from' is higher than 'to' in range for key: [200,0)", ex.getMessage()); + + // with old format + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[{range:\"\"}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertTrue(ex.getMessage().contains("empty facet range")); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[{range:\"bl\"}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertTrue(ex.getMessage().contains("Invalid start character b in facet range bl")); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[{range:\"(bl\"}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertTrue(ex.getMessage().contains("Invalid end character l in facet range (bl")); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[{range:\"(bleh,12)\"}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertEquals("Can't parse value bleh for field: num_i", ex.getMessage()); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[{range:\"(12,bleh)\"}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertEquals("Can't parse value bleh for field: num_i", ex.getMessage()); + + ex = expectThrows(SolrException.class, + () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + + "ranges:[{range:\"(200,12)\"}]}}")) + ); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + assertEquals("'start' is higher than 'end' in range for key: (200,12)", ex.getMessage()); + } + @Test public void testOtherErrorCases() throws Exception { Client client = Client.localClient(); @@ -3273,6 +3523,39 @@ public void testOtherErrorCases() throws Exception { "Expected boolean type for param 'perSeg' but got Long = 2 , path=facet/cat_s", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,perSeg:2}}"), SolrException.ErrorCode.BAD_REQUEST); + + assertQEx("Should fail as sort is invalid", + "Invalid sort option 'bleh' for field 'cat_s'", + req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:bleh}}"), + SolrException.ErrorCode.BAD_REQUEST); + + assertQEx("Should fail as sort order is invalid", + "Unknown Sort direction 'bleh'", + req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:{count: bleh}}}"), + SolrException.ErrorCode.BAD_REQUEST); + + // test for prelim_sort + assertQEx("Should fail as prelim_sort is invalid", + "Invalid prelim_sort option 'bleh' for field 'cat_s'", + req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,prelim_sort:bleh}}"), + SolrException.ErrorCode.BAD_REQUEST); + + assertQEx("Should fail as prelim_sort map is invalid", + "Invalid prelim_sort option '{bleh=desc}' for field 'cat_s'", + req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,prelim_sort:{bleh:desc}}}"), + SolrException.ErrorCode.BAD_REQUEST); + + // with nested facet + assertQEx("Should fail as prelim_sort is invalid", + "Invalid sort option 'bleh' for field 'id'", + req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:bleh,facet:" + + "{bleh:\"unique(cat_s)\",id:{type:terms,field:id,sort:bleh}}}}"), + SolrException.ErrorCode.BAD_REQUEST); + + assertQ("Should pass as sort is proper", + req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:bleh,facet:" + + "{bleh:\"unique(cat_s)\",id:{type:terms,field:id,sort:{bleh:desc},facet:{bleh:\"unique(id)\"}}}}}") + ); } diff --git a/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java b/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java index bcc936d53788..9f34db06c90f 100644 --- a/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java +++ b/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java @@ -19,12 +19,13 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.JSONTestUtil; import org.apache.solr.SolrTestCaseHS; - +import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; + @LuceneTestCase.SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Lucene45","Appending"}) public class TestJsonRequest extends SolrTestCaseHS { @@ -79,6 +80,15 @@ public static void doJsonRequest(Client client, boolean isDistrib) throws Except , "response/numFound==2" ); + // invalid value + SolrException ex = expectThrows(SolrException.class, () -> client.testJQ(params("q", "*:*", "json", "5"))); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); + + // this is to verify other json params are not affected + client.testJQ( params("q", "cat_s:A", "json.limit", "1"), + "response/numFound==2" + ); + // test multiple json params client.testJQ( params("json","{query:'cat_s:A'}", "json","{filter:'where_s:NY'}") , "response/numFound==1" diff --git a/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java b/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java index e96fe2910929..9b848d1061aa 100644 --- a/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java +++ b/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java @@ -41,6 +41,7 @@ public void distribTearDown() throws Exception { @Test public void test() throws Exception { del("*:*"); + commit(); String aDocId=null; for (int i = 0; i < clients.size(); i++) { int shard = i + 1; diff --git a/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java index 4e6fad28e300..28bbaa8affe9 100644 --- a/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java @@ -60,7 +60,6 @@ import static org.apache.solr.security.AuditEvent.EventType.COMPLETED; import static org.apache.solr.security.AuditEvent.EventType.ERROR; import static org.apache.solr.security.AuditEvent.EventType.REJECTED; -import static org.apache.solr.security.AuditEvent.EventType.UNAUTHORIZED; import static org.apache.solr.security.AuditEvent.RequestType.ADMIN; import static org.apache.solr.security.AuditEvent.RequestType.SEARCH; @@ -184,11 +183,11 @@ public void auth() throws Exception { CollectionAdminRequest.Create createRequest = CollectionAdminRequest.createCollection("test", 1, 1); createRequest.setBasicAuthCredentials("solr", "wrongPW"); client.request(createRequest); - fail("Call should fail with 403"); + fail("Call should fail with 401"); } catch (SolrException ex) { waitForAuditEventCallbacks(1); CallbackReceiver receiver = testHarness.get().receiver; - assertAuditEvent(receiver.popEvent(), UNAUTHORIZED, "/admin/collections", ADMIN, null,403); + assertAuditEvent(receiver.popEvent(), REJECTED, "/admin/collections", ADMIN, null, 401); } } diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java index 975c25262279..a7b5d31c9a8f 100644 --- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java @@ -58,8 +58,9 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.Utils; import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.Utils; +import org.apache.solr.util.LogLevel; import org.apache.solr.util.SolrCLI; import org.apache.solr.util.TimeOut; import org.junit.After; @@ -96,6 +97,7 @@ public void tearDownCluster() throws Exception { @Test //commented 9-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 21-May-2018 // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 + @LogLevel("org.apache.solr.security=DEBUG") public void testBasicAuth() throws Exception { boolean isUseV2Api = random().nextBoolean(); String authcPrefix = "/admin/authentication"; @@ -138,9 +140,7 @@ public void testBasicAuth() throws Exception { final SolrRequest genericReq; if (isUseV2Api) { - genericReq = new V2Request.Builder("/cluster/security/authentication") - .withPayload(command) - .withMethod(SolrRequest.METHOD.POST).build(); + genericReq = new V2Request.Builder("/cluster/security/authentication").withMethod(SolrRequest.METHOD.POST).build(); } else { genericReq = new GenericSolrRequest(SolrRequest.METHOD.POST, authcPrefix, new ModifiableSolrParams()); ((GenericSolrRequest)genericReq).setContentWriter(new StringPayloadContentWriter(command, CommonParams.JSON_MIME)); @@ -234,7 +234,7 @@ public void testBasicAuth() throws Exception { HttpSolrClient.RemoteSolrException e = expectThrows(HttpSolrClient.RemoteSolrException.class, () -> { new UpdateRequest().deleteByQuery("*:*").process(aNewClient, COLLECTION); }); - assertTrue(e.getMessage().contains("Unauthorized request")); + assertTrue(e.getMessage(), e.getMessage().contains("Authentication failed")); } finally { aNewClient.close(); cluster.stopJettySolrRunner(aNewJetty); diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java index 974e940a10c6..bcfe60862b5a 100644 --- a/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java +++ b/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java @@ -19,11 +19,10 @@ import java.lang.invoke.MethodHandles; -import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.Http2SolrClient; -import org.apache.solr.client.solrj.impl.PreemptiveBasicAuthClientBuilderFactory; import org.apache.solr.client.solrj.request.CollectionAdminRequest; +import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.cloud.SolrCloudAuthTestCase; import org.junit.Before; import org.junit.Test; @@ -35,10 +34,6 @@ public class BasicAuthOnSingleNodeTest extends SolrCloudAuthTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String COLLECTION = "authCollection"; - static { - System.setProperty("basicauth", "solr:solr"); - } - @Before public void setupCluster() throws Exception { configureCluster(1) @@ -63,13 +58,12 @@ public void setupCluster() throws Exception { public void basicTest() throws Exception { try (Http2SolrClient client = new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) .build()){ - PreemptiveBasicAuthClientBuilderFactory factory = new PreemptiveBasicAuthClientBuilderFactory(); - factory.setup(client); // SOLR-13510, this will be failed if the listener (handling inject credential in header) is called in another // thread since SolrRequestInfo will return null in that case. for (int i = 0; i < 30; i++) { - client.query(COLLECTION, new SolrQuery("*:*")); + assertNotNull(new QueryRequest(params("q", "*:*")) + .setBasicAuthCredentials("solr", "solr").process(client, COLLECTION)); } } } diff --git a/solr/core/src/test/org/apache/solr/security/JWTAuthPluginIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/JWTAuthPluginIntegrationTest.java index cb0f655f1c35..20dc667c6ac3 100644 --- a/solr/core/src/test/org/apache/solr/security/JWTAuthPluginIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/security/JWTAuthPluginIntegrationTest.java @@ -24,6 +24,7 @@ import java.net.HttpURLConnection; import java.net.URL; import java.nio.charset.StandardCharsets; +import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -40,6 +41,7 @@ import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.impl.HttpClientUtil; import org.apache.solr.cloud.SolrCloudAuthTestCase; +import org.apache.solr.common.util.Base64; import org.apache.solr.common.util.Pair; import org.apache.solr.common.util.TimeSource; import org.apache.solr.common.util.Utils; @@ -130,6 +132,20 @@ public void infoRequestWithoutToken() throws Exception { get(baseUrl + "/admin/info/system", null); } + @Test + public void infoRequestValidateXSolrAuthHeaders() throws IOException { + Map headers = getHeaders(baseUrl + "/admin/info/system", null); + assertEquals("401", headers.get("code")); + assertEquals("HTTP/1.1 401 Require authentication", headers.get(null)); + assertEquals("Bearer realm=\"my-solr-jwt\"", headers.get("WWW-Authenticate")); + String authData = new String(Base64.base64ToByteArray(headers.get("X-Solr-AuthData")), UTF_8); + assertEquals("{\n" + + " \"scope\":\"solr:admin\",\n" + + " \"redirect_uris\":[],\n" + + " \"authorizationEndpoint\":\"http://acmepaymentscorp/oauth/auz/authorize\",\n" + + " \"client_id\":\"solr-cluster\"}", authData); + } + @Test public void testMetrics() throws Exception { boolean isUseV2Api = random().nextBoolean(); @@ -215,6 +231,20 @@ private Pair get(String url, String token) throws IOException { return new Pair<>(result, code); } + private Map getHeaders(String url, String token) throws IOException { + URL createUrl = new URL(url); + HttpURLConnection conn = (HttpURLConnection) createUrl.openConnection(); + if (token != null) + conn.setRequestProperty("Authorization", "Bearer " + token); + conn.connect(); + int code = conn.getResponseCode(); + Map result = new HashMap<>(); + conn.getHeaderFields().forEach((k,v) -> result.put(k, v.get(0))); + result.put("code", String.valueOf(code)); + conn.disconnect(); + return result; + } + private Pair post(String url, String json, String token) throws IOException { URL createUrl = new URL(url); HttpURLConnection con = (HttpURLConnection) createUrl.openConnection(); diff --git a/solr/core/src/test/org/apache/solr/security/JWTAuthPluginTest.java b/solr/core/src/test/org/apache/solr/security/JWTAuthPluginTest.java index 407c9b0a6fe4..327f20d4a245 100644 --- a/solr/core/src/test/org/apache/solr/security/JWTAuthPluginTest.java +++ b/solr/core/src/test/org/apache/solr/security/JWTAuthPluginTest.java @@ -16,13 +16,11 @@ */ package org.apache.solr.security; -import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.security.Principal; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -30,20 +28,16 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.lang3.StringUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.util.Base64; import org.apache.solr.common.util.Utils; -import org.jose4j.jwk.HttpsJwks; -import org.jose4j.jwk.JsonWebKey; import org.jose4j.jwk.RsaJsonWebKey; import org.jose4j.jwk.RsaJwkGenerator; import org.jose4j.jws.AlgorithmIdentifiers; import org.jose4j.jws.JsonWebSignature; import org.jose4j.jwt.JwtClaims; import org.jose4j.keys.BigEndianBigInteger; -import org.jose4j.keys.resolvers.HttpsJwksVerificationKeyResolver; import org.jose4j.lang.JoseException; import org.junit.After; import org.junit.Before; @@ -59,18 +53,33 @@ public class JWTAuthPluginTest extends SolrTestCaseJ4 { private static String testHeader; private static String slimHeader; private JWTAuthPlugin plugin; - private HashMap testJwk; private static RsaJsonWebKey rsaJsonWebKey; private HashMap testConfig; private HashMap minimalConfig; + // Shared with other tests + static HashMap testJwk; - @BeforeClass - public static void beforeAll() throws Exception { + static { // Generate an RSA key pair, which will be used for signing and verification of the JWT, wrapped in a JWK - rsaJsonWebKey = RsaJwkGenerator.generateJwk(2048); - rsaJsonWebKey.setKeyId("k1"); + try { + rsaJsonWebKey = RsaJwkGenerator.generateJwk(2048); + rsaJsonWebKey.setKeyId("k1"); + + testJwk = new HashMap<>(); + testJwk.put("kty", rsaJsonWebKey.getKeyType()); + testJwk.put("e", BigEndianBigInteger.toBase64Url(rsaJsonWebKey.getRsaPublicKey().getPublicExponent())); + testJwk.put("use", rsaJsonWebKey.getUse()); + testJwk.put("kid", rsaJsonWebKey.getKeyId()); + testJwk.put("alg", rsaJsonWebKey.getAlgorithm()); + testJwk.put("n", BigEndianBigInteger.toBase64Url(rsaJsonWebKey.getRsaPublicKey().getModulus())); + } catch (JoseException e) { + fail("Failed static initialization: " + e.getMessage()); + } + } + @BeforeClass + public static void beforeAll() throws Exception { JwtClaims claims = generateClaims(); JsonWebSignature jws = new JsonWebSignature(); jws.setPayload(claims.toJson()); @@ -80,7 +89,7 @@ public static void beforeAll() throws Exception { String testJwt = jws.getCompactSerialization(); testHeader = "Bearer" + " " + testJwt; - + claims.unsetClaim("iss"); claims.unsetClaim("aud"); claims.unsetClaim("exp"); @@ -89,7 +98,7 @@ public static void beforeAll() throws Exception { slimHeader = "Bearer" + " " + slimJwt; } - static JwtClaims generateClaims() { + protected static JwtClaims generateClaims() { JwtClaims claims = new JwtClaims(); claims.setIssuer("IDServer"); // who creates the token and signs it claims.setAudience("Solr"); // to whom the token is intended to be sent @@ -112,18 +121,10 @@ static JwtClaims generateClaims() { @Before public void setUp() throws Exception { super.setUp(); + // Create an auth plugin plugin = new JWTAuthPlugin(); - // Create a JWK config for security.json - testJwk = new HashMap<>(); - testJwk.put("kty", rsaJsonWebKey.getKeyType()); - testJwk.put("e", BigEndianBigInteger.toBase64Url(rsaJsonWebKey.getRsaPublicKey().getPublicExponent())); - testJwk.put("use", rsaJsonWebKey.getUse()); - testJwk.put("kid", rsaJsonWebKey.getKeyId()); - testJwk.put("alg", rsaJsonWebKey.getAlgorithm()); - testJwk.put("n", BigEndianBigInteger.toBase64Url(rsaJsonWebKey.getRsaPublicKey().getModulus())); - testConfig = new HashMap<>(); testConfig.put("class", "org.apache.solr.security.JWTAuthPlugin"); testConfig.put("jwk", testJwk); @@ -180,55 +181,35 @@ public void initWithJwk() { } @Test - public void initWithJwkUrl() { + @Deprecated + public void initWithJwkUrlForBackwardsCompat() { HashMap authConf = new HashMap<>(); authConf.put("jwkUrl", "https://127.0.0.1:9999/foo.jwk"); plugin = new JWTAuthPlugin(); plugin.init(authConf); + assertEquals(1, plugin.getIssuerConfigs().size()); + assertEquals(1, plugin.getIssuerConfigs().get(0).getJwksUrls().size()); } - /** - * Simulate a rotate of JWK key in IdP. - * Validating of JWK signature will fail since we still use old cached JWK set. - * Using a mock {@link HttpsJwks} we validate that plugin calls refresh() and then passes validation - */ @Test - public void invalidSigRefreshJwk() throws JoseException { - RsaJsonWebKey rsaJsonWebKey2 = RsaJwkGenerator.generateJwk(2048); - rsaJsonWebKey2.setKeyId("k2"); - HashMap testJwkWrong = new HashMap<>(); - testJwkWrong.put("kty", rsaJsonWebKey2.getKeyType()); - testJwkWrong.put("e", BigEndianBigInteger.toBase64Url(rsaJsonWebKey2.getRsaPublicKey().getPublicExponent())); - testJwkWrong.put("use", rsaJsonWebKey2.getUse()); - testJwkWrong.put("kid", rsaJsonWebKey2.getKeyId()); - testJwkWrong.put("alg", rsaJsonWebKey2.getAlgorithm()); - testJwkWrong.put("n", BigEndianBigInteger.toBase64Url(rsaJsonWebKey2.getRsaPublicKey().getModulus())); - JsonWebKey wrongJwk = JsonWebKey.Factory.newJwk(testJwkWrong); - - // Configure our mock plugin with URL as jwk source - JsonWebKey correctJwk = JsonWebKey.Factory.newJwk(testJwk); - plugin = new MockJwksUrlPlugin(wrongJwk, correctJwk); - HashMap pluginConfigJwkUrl = new HashMap<>(); - pluginConfigJwkUrl.put("class", "org.apache.solr.security.JWTAuthPlugin"); - pluginConfigJwkUrl.put("jwkUrl", "dummy"); - plugin.init(pluginConfigJwkUrl); - - // Validate that plugin will call refresh() on invalid signature, then the call succeeds - assertFalse(((MockJwksUrlPlugin)plugin).isRefreshCalled()); - JWTAuthPlugin.JWTAuthenticationResponse resp = plugin.authenticate(testHeader); - assertTrue(resp.isAuthenticated()); - assertTrue(((MockJwksUrlPlugin)plugin).isRefreshCalled()); + public void initWithJwksUrl() { + HashMap authConf = new HashMap<>(); + authConf.put("jwksUrl", "https://127.0.0.1:9999/foo.jwk"); + plugin = new JWTAuthPlugin(); + plugin.init(authConf); + assertEquals(1, plugin.getIssuerConfigs().size()); + assertEquals(1, plugin.getIssuerConfigs().get(0).getJwksUrls().size()); } @Test - public void parseJwkSet() throws Exception { - plugin.parseJwkSet(testJwk); - - HashMap testJwks = new HashMap<>(); - List> keys = new ArrayList<>(); - keys.add(testJwk); - testJwks.put("keys", keys); - plugin.parseJwkSet(testJwks); + public void initWithJwkUrlArray() { + HashMap authConf = new HashMap<>(); + authConf.put("jwksUrl", Arrays.asList("https://127.0.0.1:9999/foo.jwk", "https://127.0.0.1:9999/foo2.jwk")); + authConf.put("iss", "myIssuer"); + plugin = new JWTAuthPlugin(); + plugin.init(authConf); + assertEquals(1, plugin.getIssuerConfigs().size()); + assertEquals(2, plugin.getIssuerConfigs().get(0).getJwksUrls().size()); } @Test @@ -308,20 +289,21 @@ public void claimMatch() { @Test public void missingIssAudExp() { + testConfig.put("requireIss", "false"); testConfig.put("requireExp", "false"); - testConfig.put("requireSub", "false"); plugin.init(testConfig); JWTAuthPlugin.JWTAuthenticationResponse resp = plugin.authenticate(slimHeader); - assertTrue(resp.isAuthenticated()); + assertTrue(resp.getErrorMessage(), resp.isAuthenticated()); - // Missing exp header + // Missing exp claim testConfig.put("requireExp", true); plugin.init(testConfig); resp = plugin.authenticate(slimHeader); assertEquals(JWTAuthPlugin.JWTAuthenticationResponse.AuthCode.JWT_VALIDATION_EXCEPTION, resp.getAuthCode()); + testConfig.put("requireExp", false); - // Missing sub header - testConfig.put("requireSub", true); + // Missing issuer claim + testConfig.put("requireIss", true); plugin.init(testConfig); resp = plugin.authenticate(slimHeader); assertEquals(JWTAuthPlugin.JWTAuthenticationResponse.AuthCode.JWT_VALIDATION_EXCEPTION, resp.getAuthCode()); @@ -341,7 +323,7 @@ public void scope() { testConfig.put("scope", "solr:read solr:admin"); plugin.init(testConfig); JWTAuthPlugin.JWTAuthenticationResponse resp = plugin.authenticate(testHeader); - assertTrue(resp.isAuthenticated()); + assertTrue(resp.getErrorMessage(), resp.isAuthenticated()); Principal principal = resp.getPrincipal(); assertTrue(principal instanceof VerifiedUserRoles); @@ -378,14 +360,14 @@ public void noHeaderNotBlockUnknown() { @Test public void minimalConfigPassThrough() { - testConfig.put("blockUnknown", false); + minimalConfig.put("blockUnknown", false); plugin.init(minimalConfig); JWTAuthPlugin.JWTAuthenticationResponse resp = plugin.authenticate(null); assertEquals(JWTAuthPlugin.JWTAuthenticationResponse.AuthCode.PASS_THROUGH, resp.getAuthCode()); } @Test - public void wellKnownConfig() { + public void wellKnownConfigNoHeaderPassThrough() { String wellKnownUrl = TEST_PATH().resolve("security").resolve("jwt_well-known-config.json").toAbsolutePath().toUri().toString(); testConfig.put("wellKnownUrl", wellKnownUrl); testConfig.remove("jwk"); @@ -394,42 +376,30 @@ public void wellKnownConfig() { assertEquals(JWTAuthPlugin.JWTAuthenticationResponse.AuthCode.PASS_THROUGH, resp.getAuthCode()); } - @Test(expected = SolrException.class) - public void onlyOneJwkConfig() { - testConfig.put("jwkUrl", "http://127.0.0.1:45678/myJwk"); + @Test + public void defaultRealm() { + String wellKnownUrl = TEST_PATH().resolve("security").resolve("jwt_well-known-config.json").toAbsolutePath().toUri().toString(); + testConfig.put("wellKnownUrl", wellKnownUrl); + testConfig.remove("jwk"); plugin.init(testConfig); + assertEquals("solr-jwt", plugin.realm); } - @Test(expected = SolrException.class) - public void wellKnownConfigNotHttps() { - testConfig.put("wellKnownUrl", "http://127.0.0.1:45678/.well-known/config"); + @Test + public void configureRealm() { + String wellKnownUrl = TEST_PATH().resolve("security").resolve("jwt_well-known-config.json").toAbsolutePath().toUri().toString(); + testConfig.put("wellKnownUrl", wellKnownUrl); + testConfig.remove("jwk"); + testConfig.put("realm", "myRealm"); plugin.init(testConfig); + assertEquals("myRealm", plugin.realm); } @Test(expected = SolrException.class) - public void wellKnownConfigNotReachable() { - testConfig.put("wellKnownUrl", "https://127.0.0.1:45678/.well-known/config"); + public void bothJwksUrlAndJwkFails() { + testConfig.put("jwksUrl", "http://127.0.0.1:45678/myJwk"); plugin.init(testConfig); } - - @Test - public void wellKnownConfigFromInputstream() throws IOException { - Path configJson = TEST_PATH().resolve("security").resolve("jwt_well-known-config.json"); - JWTAuthPlugin.WellKnownDiscoveryConfig config = JWTAuthPlugin.WellKnownDiscoveryConfig.parse(Files.newInputStream(configJson)); - assertEquals("https://acmepaymentscorp/oauth/jwks", config.getJwksUrl()); - } - - @Test - public void wellKnownConfigFromString() throws IOException { - Path configJson = TEST_PATH().resolve("security").resolve("jwt_well-known-config.json"); - String configString = StringUtils.join(Files.readAllLines(configJson), "\n"); - JWTAuthPlugin.WellKnownDiscoveryConfig config = JWTAuthPlugin.WellKnownDiscoveryConfig.parse(configString, StandardCharsets.UTF_8); - assertEquals("https://acmepaymentscorp/oauth/jwks", config.getJwksUrl()); - assertEquals("http://acmepaymentscorp", config.getIssuer()); - assertEquals("http://acmepaymentscorp/oauth/auz/authorize", config.getAuthorizationEndpoint()); - assertEquals(Arrays.asList("READ", "WRITE", "DELETE", "openid", "scope", "profile", "email", "address", "phone"), config.getScopesSupported()); - assertEquals(Arrays.asList("code", "code id_token", "code token", "code id_token token", "token", "id_token", "id_token token"), config.getResponseTypesSupported()); - } @Test public void xSolrAuthDataHeader() { @@ -445,48 +415,43 @@ public void xSolrAuthDataHeader() { assertEquals("solr-cluster", parsed.get("client_id")); } - /** - * Mock plugin that simulates a {@link HttpsJwks} with cached JWK that returns - * a different JWK after a call to refresh() - */ - private class MockJwksUrlPlugin extends JWTAuthPlugin { - private final JsonWebKey wrongJwk; - private final JsonWebKey correctJwk; - - boolean isRefreshCalled() { - return refreshCalled; - } - - private boolean refreshCalled; - - MockJwksUrlPlugin(JsonWebKey wrongJwk, JsonWebKey correctJwk) { - this.wrongJwk = wrongJwk; - this.correctJwk = correctJwk; - } + @Test + public void initWithTwoIssuers() { + HashMap authConf = new HashMap<>(); + JWTIssuerConfig iss1 = new JWTIssuerConfig("iss1").setIss("1").setAud("aud1") + .setJwksUrl("https://127.0.0.1:9999/foo.jwk"); + JWTIssuerConfig iss2 = new JWTIssuerConfig("iss2").setIss("2").setAud("aud2") + .setJwksUrl(Arrays.asList("https://127.0.0.1:9999/foo.jwk", "https://127.0.0.1:9999/foo2.jwk")); + authConf.put("issuers", Arrays.asList(iss1.asConfig(), iss2.asConfig())); + plugin = new JWTAuthPlugin(); + plugin.init(authConf); + assertEquals(2, plugin.getIssuerConfigs().size()); + assertTrue(plugin.getIssuerConfigs().get(0).usesHttpsJwk()); + assertTrue(plugin.getIssuerConfigs().get(1).usesHttpsJwk()); + JWTIssuerConfig issuer1 = plugin.getIssuerConfigByName("iss1"); + JWTIssuerConfig issuer2 = plugin.getIssuerConfigByName("iss2"); + assertNotNull(issuer1); + assertNotNull(issuer2); + assertEquals(2, issuer2.getJwksUrls().size()); + assertEquals("iss1", plugin.getPrimaryIssuer().getName()); + assertEquals("aud1", issuer1.getAud()); + } - @Override - void setupJwkUrl(String url) { - MockHttpsJwks httpsJkws = new MockHttpsJwks(url); - verificationKeyResolver = new HttpsJwksVerificationKeyResolver(httpsJkws); - } + @Test + public void initWithToplevelAndIssuersCombined() { + HashMap authConf = new HashMap<>(); + JWTIssuerConfig iss1 = new JWTIssuerConfig("iss1").setIss("1").setAud("aud1") + .setJwksUrl("https://127.0.0.1:9999/foo.jwk"); + authConf.put("issuers", Collections.singletonList(iss1.asConfig())); + authConf.put("aud", "aud2"); + authConf.put("jwksUrl", Arrays.asList("https://127.0.0.1:9999/foo.jwk", "https://127.0.0.1:9999/foo2.jwk")); - private class MockHttpsJwks extends HttpsJwks { - MockHttpsJwks(String url) { - super(url); - } - - @Override - public List getJsonWebKeys() { - return refreshCalled ? Collections.singletonList(correctJwk) : Collections.singletonList(wrongJwk); - } - - @Override - public void refresh() { - if (refreshCalled) { - fail("Refresh called twice"); - } - refreshCalled = true; - } - } + plugin = new JWTAuthPlugin(); + plugin.init(authConf); + assertEquals(2, plugin.getIssuerConfigs().size()); + assertEquals("PRIMARY", plugin.getPrimaryIssuer().getName()); + assertEquals("aud2", plugin.getPrimaryIssuer().getAud()); + // Top-level (name=PRIMARY) issuer config does not need "iss" for back compat + assertNull(plugin.getPrimaryIssuer().getIss()); } } \ No newline at end of file diff --git a/solr/core/src/test/org/apache/solr/security/JWTIssuerConfigTest.java b/solr/core/src/test/org/apache/solr/security/JWTIssuerConfigTest.java new file mode 100644 index 000000000000..338855248316 --- /dev/null +++ b/solr/core/src/test/org/apache/solr/security/JWTIssuerConfigTest.java @@ -0,0 +1,156 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.security; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.apache.solr.common.SolrException; +import org.jose4j.jwk.JsonWebKeySet; +import org.junit.Before; +import org.junit.Test; +import org.noggit.JSONUtil; + +import static org.apache.solr.SolrTestCaseJ4.TEST_PATH; +import static org.apache.solr.security.JWTAuthPluginTest.testJwk; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class JWTIssuerConfigTest { + private JWTIssuerConfig testIssuer; + private Map testIssuerConfigMap; + private String testIssuerJson; + + @Before + public void setUp() throws Exception { + testIssuer = new JWTIssuerConfig("name") + .setJwksUrl("https://issuer/path") + .setIss("issuer") + .setAud("audience") + .setClientId("clientid") + .setWellKnownUrl("wellknown") + .setAuthorizationEndpoint("https://issuer/authz"); + + testIssuerConfigMap = testIssuer.asConfig(); + + testIssuerJson = "{\n" + + " \"aud\":\"audience\",\n" + + " \"wellKnownUrl\":\"wellknown\",\n" + + " \"clientId\":\"clientid\",\n" + + " \"jwksUrl\":[\"https://issuer/path\"],\n" + + " \"name\":\"name\",\n" + + " \"iss\":\"issuer\",\n" + + " \"authorizationEndpoint\":\"https://issuer/authz\"}"; + } + + @Test + public void parseConfigMap() { + // Do a round-trip from map -> object -> map -> json + JWTIssuerConfig issuerConfig = new JWTIssuerConfig(testIssuerConfigMap); + issuerConfig.isValid(); + assertEquals(testIssuerJson, JSONUtil.toJSON(issuerConfig.asConfig())); + } + + @Test(expected = SolrException.class) + public void parseConfigMapNoName() { + testIssuerConfigMap.remove("name"); // Will fail validation + new JWTIssuerConfig(testIssuerConfigMap).isValid(); + } + + @Test + public void parseJwkSet() throws Exception { + HashMap testJwks = new HashMap<>(); + List> keys = new ArrayList<>(); + keys.add(testJwk); + testJwks.put("keys", keys); + JWTIssuerConfig.parseJwkSet(testJwks); + } + + @Test + public void setJwksUrl() { + JWTIssuerConfig conf = new JWTIssuerConfig("myConf"); + conf.setJwksUrl("http://server/path"); + } + + @Test + public void asConfig() { + assertEquals(testIssuerJson, JSONUtil.toJSON(testIssuer.asConfig())); + } + + @Test + public void isValid() { + assertTrue(testIssuer.isValid()); + } + + @Test(expected = SolrException.class) + public void notValidBothJwksAndJwk() { + testIssuer.setJsonWebKeySet(new JsonWebKeySet()); + testIssuer.isValid(); + } + + @Test + public void parseIssuerConfigExplicit() { + HashMap issuerConfigMap = new HashMap<>(); + issuerConfigMap.put("name", "myName"); + issuerConfigMap.put("iss", "myIss"); + issuerConfigMap.put("jwksUrl", "https://host/jwk"); + + JWTIssuerConfig issuerConfig = new JWTIssuerConfig(issuerConfigMap); + assertEquals("myIss", issuerConfig.getIss()); + assertEquals("myName", issuerConfig.getName()); + assertEquals(1, issuerConfig.getJwksUrls().size()); + assertEquals("https://host/jwk", issuerConfig.getJwksUrls().get(0)); + } + + @Test + public void wellKnownConfigFromInputstream() throws IOException { + Path configJson = TEST_PATH().resolve("security").resolve("jwt_well-known-config.json"); + JWTIssuerConfig.WellKnownDiscoveryConfig config = JWTIssuerConfig.WellKnownDiscoveryConfig.parse(Files.newInputStream(configJson)); + assertEquals("https://acmepaymentscorp/oauth/jwks", config.getJwksUrl()); + } + + @Test + public void wellKnownConfigFromString() throws IOException { + Path configJson = TEST_PATH().resolve("security").resolve("jwt_well-known-config.json"); + String configString = StringUtils.join(Files.readAllLines(configJson), "\n"); + JWTIssuerConfig.WellKnownDiscoveryConfig config = JWTIssuerConfig.WellKnownDiscoveryConfig.parse(configString, StandardCharsets.UTF_8); + assertEquals("https://acmepaymentscorp/oauth/jwks", config.getJwksUrl()); + assertEquals("http://acmepaymentscorp", config.getIssuer()); + assertEquals("http://acmepaymentscorp/oauth/auz/authorize", config.getAuthorizationEndpoint()); + assertEquals(Arrays.asList("READ", "WRITE", "DELETE", "openid", "scope", "profile", "email", "address", "phone"), config.getScopesSupported()); + assertEquals(Arrays.asList("code", "code id_token", "code token", "code id_token token", "token", "id_token", "id_token token"), config.getResponseTypesSupported()); + } + + @Test(expected = SolrException.class) + public void wellKnownConfigNotHttps() { + JWTIssuerConfig.WellKnownDiscoveryConfig.parse("http://127.0.0.1:45678/.well-known/config"); + } + + @Test(expected = SolrException.class) + public void wellKnownConfigNotReachable() { + JWTIssuerConfig.WellKnownDiscoveryConfig.parse("https://127.0.0.1:45678/.well-known/config"); + } +} \ No newline at end of file diff --git a/solr/core/src/test/org/apache/solr/security/JWTVerificationkeyResolverTest.java b/solr/core/src/test/org/apache/solr/security/JWTVerificationkeyResolverTest.java new file mode 100644 index 000000000000..4b88787b0ea2 --- /dev/null +++ b/solr/core/src/test/org/apache/solr/security/JWTVerificationkeyResolverTest.java @@ -0,0 +1,156 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.security; + +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.security.JWTIssuerConfig.HttpsJwksFactory; +import org.jose4j.jwk.HttpsJwks; +import org.jose4j.jwk.JsonWebKey; +import org.jose4j.jwk.RsaJsonWebKey; +import org.jose4j.jwk.RsaJwkGenerator; +import org.jose4j.jws.AlgorithmIdentifiers; +import org.jose4j.jws.JsonWebSignature; +import org.jose4j.lang.JoseException; +import org.jose4j.lang.UnresolvableKeyException; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; + +import static java.util.Arrays.asList; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.when; + +/** + * Tests the multi jwks resolver that can fetch keys from multiple JWKs + */ +public class JWTVerificationkeyResolverTest extends SolrTestCaseJ4 { + private JWTVerificationkeyResolver resolver; + + @Rule + public MockitoRule mockitoRule = MockitoJUnit.rule(); + + @Mock + private HttpsJwks firstJwkList; + @Mock + private HttpsJwks secondJwkList; + @Mock + private HttpsJwksFactory httpsJwksFactory; + + private KeyHolder k1; + private KeyHolder k2; + private KeyHolder k3; + private KeyHolder k4; + private KeyHolder k5; + private List keysToReturnFromSecondJwk; + private Iterator refreshSequenceForSecondJwk; + + @Before + public void setUp() throws Exception { + super.setUp(); + k1 = new KeyHolder("k1"); + k2 = new KeyHolder("k2"); + k3 = new KeyHolder("k3"); + k4 = new KeyHolder("k4"); + k5 = new KeyHolder("k5"); + + when(firstJwkList.getJsonWebKeys()).thenReturn(asList(k1.getJwk(), k2.getJwk())); + doAnswer(invocation -> { + keysToReturnFromSecondJwk = (List) refreshSequenceForSecondJwk.next(); + System.out.println("Refresh called, next to return is " + keysToReturnFromSecondJwk); + return null; + }).when(secondJwkList).refresh(); + when(secondJwkList.getJsonWebKeys()).then(inv -> { + if (keysToReturnFromSecondJwk == null) + keysToReturnFromSecondJwk = (List) refreshSequenceForSecondJwk.next(); + return keysToReturnFromSecondJwk; + }); + when(httpsJwksFactory.createList(anyList())).thenReturn(asList(firstJwkList, secondJwkList)); + + JWTIssuerConfig issuerConfig = new JWTIssuerConfig("primary").setIss("foo").setJwksUrl(asList("url1", "url2")); + issuerConfig.setHttpsJwksFactory(httpsJwksFactory); + resolver = new JWTVerificationkeyResolver(Arrays.asList(issuerConfig), true); + + assumeWorkingMockito(); + } + + @Test + public void findKeyFromFirstList() throws JoseException { + refreshSequenceForSecondJwk = asList( + asList(k3.getJwk(), k4.getJwk()), + asList(k5.getJwk())).iterator(); + resolver.resolveKey(k1.getJws(), null); + resolver.resolveKey(k2.getJws(), null); + resolver.resolveKey(k3.getJws(), null); + resolver.resolveKey(k4.getJws(), null); + // Key k5 is not in cache, so a refresh will be done, which + resolver.resolveKey(k5.getJws(), null); + } + + @Test(expected = UnresolvableKeyException.class) + public void notFoundKey() throws JoseException { + refreshSequenceForSecondJwk = asList( + asList(k3.getJwk()), + asList(k4.getJwk()), + asList(k5.getJwk())).iterator(); + // Will not find key since first refresh returns k4, and we only try one refresh. + resolver.resolveKey(k5.getJws(), null); + } + + public class KeyHolder { + private final RsaJsonWebKey key; + private final String kid; + + public KeyHolder(String kid) throws JoseException { + key = generateKey(kid); + this.kid = kid; + } + + public RsaJsonWebKey getRsaKey() { + return key; + } + + public JsonWebKey getJwk() throws JoseException { + JsonWebKey jsonKey = JsonWebKey.Factory.newJwk(key.getRsaPublicKey()); + jsonKey.setKeyId(kid); + return jsonKey; + } + + public JsonWebSignature getJws() { + JsonWebSignature jws = new JsonWebSignature(); + jws.setPayload(JWTAuthPluginTest.generateClaims().toJson()); + jws.setKey(getRsaKey().getPrivateKey()); + jws.setKeyIdHeaderValue(getRsaKey().getKeyId()); + jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256); + return jws; + } + + private RsaJsonWebKey generateKey(String kid) throws JoseException { + RsaJsonWebKey rsaJsonWebKey = RsaJwkGenerator.generateJwk(2048); + rsaJsonWebKey.setKeyId(kid); + return rsaJsonWebKey; + } + } +} \ No newline at end of file diff --git a/solr/core/src/test/org/apache/solr/store/blob/SharedStorageSplitTest.java b/solr/core/src/test/org/apache/solr/store/blob/SharedStorageSplitTest.java index bd6679809ba1..d69766fdddc3 100644 --- a/solr/core/src/test/org/apache/solr/store/blob/SharedStorageSplitTest.java +++ b/solr/core/src/test/org/apache/solr/store/blob/SharedStorageSplitTest.java @@ -18,8 +18,10 @@ import java.nio.file.Path; import java.util.Collection; -import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import junit.framework.TestCase; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -27,17 +29,14 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.client.solrj.response.UpdateResponse; -import org.apache.solr.cloud.api.collections.Assign; -import org.apache.solr.cloud.api.collections.SplitByPrefixTest; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.cloud.Replica.Type; import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.store.blob.client.CoreStorageClient; import org.apache.solr.store.shared.SolrCloudSharedStoreTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; /** @@ -66,8 +65,7 @@ public static void teardownTest() throws Exception { shutdownCluster(); } - void doSplitShard(String collectionName, boolean sharedStorage, int repFactor, int nPrefixes, int nDocsPerPrefix) throws Exception { - + CloudSolrClient createCollection(String collectionName, boolean sharedStorage, int repFactor) throws Exception { if (sharedStorage) { CollectionAdminRequest .createCollection(collectionName, "conf", 1, 0, 0, 0) @@ -86,6 +84,11 @@ void doSplitShard(String collectionName, boolean sharedStorage, int repFactor, i CloudSolrClient client = cluster.getSolrClient(); client.setDefaultCollection(collectionName); + return client; + } + + void doSplitShard(String collectionName, boolean sharedStorage, int repFactor, int nPrefixes, int nDocsPerPrefix) throws Exception { + CloudSolrClient client = createCollection(collectionName, sharedStorage, repFactor); if (random().nextBoolean()) { for (int i = 0; i < nPrefixes; i++) { @@ -174,4 +177,68 @@ public void testSplit() throws Exception { doSplitShard("c2", true, 2, 2, 2); } + + void doLiveSplitShard(String collectionName, boolean sharedStorage, int repFactor) throws Exception { + final CloudSolrClient client = createCollection(collectionName, sharedStorage, repFactor); + + final AtomicBoolean doIndex = new AtomicBoolean(true); + final AtomicInteger docsIndexed = new AtomicInteger(); + Thread indexThread = null; + try { + // start indexing client before we initiate a shard split + indexThread = new Thread(() -> { + while (doIndex.get()) { + try { + Thread.sleep(10); // cap indexing rate at 100 docs per second... + int currDoc = docsIndexed.get(); + + // Try all docs in the same update request + UpdateRequest updateReq = new UpdateRequest(); + updateReq.add(sdoc("id", "doc_" + currDoc)); + UpdateResponse ursp = updateReq.commit(client, collectionName); + assertEquals(0, ursp.getStatus()); // for now, don't accept any failures + if (ursp.getStatus() == 0) { + docsIndexed.incrementAndGet(); + } + } catch (Exception e) { + TestCase.fail(e.getMessage()); + break; + } + } + }); + indexThread.start(); + + Thread.sleep(100); // wait for a few docs to be indexed before invoking split + int docCount = docsIndexed.get(); + + CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(collectionName) + .setShardName("shard1"); + splitShard.process(client); + waitForState("Timed out waiting for sub shards to be active.", + collectionName, activeClusterShape(2, 3*repFactor)); // 2 repFactor for the new split shards, 1 repFactor for old replicas + + // make sure that docs were able to be indexed during the split + assertTrue(docsIndexed.get() > docCount); + + Thread.sleep(100); // wait for a few more docs to be indexed after split + + } finally { + // shut down the indexer + doIndex.set(false); + if (indexThread != null) { + indexThread.join(); + } + } + + assertTrue(docsIndexed.get() > 0); + + checkExpectedDocs(client, repFactor, docsIndexed.get()); + } + + @Test + @Ignore // need future fixes for this + public void testLiveSplit() throws Exception { + doLiveSplitShard("livesplit1", true, 1); + } + } diff --git a/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java b/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java index 3a44673b3eb7..2ea3bf078fd8 100644 --- a/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java +++ b/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java @@ -44,10 +44,10 @@ public class BlockCacheTest extends SolrTestCase { public void testBlockCache() { int blocksInTest = 2000000; int blockSize = 1024; - + int slabSize = blockSize * 4096; long totalMemory = 2 * slabSize; - + BlockCache blockCache = new BlockCache(new Metrics(), true, totalMemory, slabSize, blockSize); byte[] buffer = new byte[1024]; Random random = random(); @@ -82,7 +82,7 @@ public void testBlockCache() { long t3 = System.nanoTime(); if (blockCache.fetch(blockCacheKey, buffer)) { fetchTime += (System.nanoTime() - t3); - assertTrue(Arrays.equals(testData, buffer)); + assertTrue("buffer content differs", Arrays.equals(testData, buffer)); } } System.out.println("Cache Hits = " + hitsInCache.get()); @@ -101,7 +101,7 @@ private static byte[] testData(Random random, int size, byte[] buf) { // always returns the same thing so we don't actually have to store the bytes redundantly to check them. private static byte getByte(long pos) { // knuth multiplicative hash method, then take top 8 bits - return (byte) ((((int)pos) * (int)(2654435761L)) >> 24); + return (byte) ((((int) pos) * (int) (2654435761L)) >> 24); // just the lower bits of the block number, to aid in debugging... // return (byte)(pos>>10); @@ -117,17 +117,17 @@ public void testBlockCacheConcurrent() throws Exception { final long totalMemory = 2 * slabSize; // 2 slabs of memory, so only half of what is needed for all blocks /*** - final int blocksInTest = 16384; // pick something bigger than 256, since that would lead to a slab size of 64 blocks and the bitset locks would consist of a single word. - final int blockSize = 1024; - final int slabSize = blocksInTest * blockSize / 4; - final long totalMemory = 2 * slabSize; // 2 slabs of memory, so only half of what is needed for all blocks - ***/ - - final int nThreads=64; - final int nReads=1000000; - final int readsPerThread=nReads/nThreads; - final int readLastBlockOdds=10; // odds (1 in N) of the next block operation being on the same block as the previous operation... helps flush concurrency issues - final int showErrors=50; // show first 50 validation failures + final int blocksInTest = 16384; // pick something bigger than 256, since that would lead to a slab size of 64 blocks and the bitset locks would consist of a single word. + final int blockSize = 1024; + final int slabSize = blocksInTest * blockSize / 4; + final long totalMemory = 2 * slabSize; // 2 slabs of memory, so only half of what is needed for all blocks + ***/ + + final int nThreads = 64; + final int nReads = 1000000; + final int readsPerThread = nReads / nThreads; + final int readLastBlockOdds = 10; // odds (1 in N) of the next block operation being on the same block as the previous operation... helps flush concurrency issues + final int showErrors = 50; // show first 50 validation failures final BlockCache blockCache = new BlockCache(new Metrics(), true, totalMemory, slabSize, blockSize); @@ -142,7 +142,7 @@ public void testBlockCacheConcurrent() throws Exception { Thread[] threads = new Thread[nThreads]; - for (int i=0; i listener = (k, v, removalCause) -> { - assert v.key == k; + RemovalListener listener = (k, v, removalCause) -> { + removals.incrementAndGet(); + if (v == null) { + if (removalCause != RemovalCause.COLLECTED) { + throw new RuntimeException("Null value for key " + k + ", removalCause=" + removalCause); + } else { + return; + } + } + assertEquals("cache key differs from value's key", (Long) k, (Long) v.key); if (!v.live.compareAndSet(true, false)) { throw new RuntimeException("listener called more than once! k=" + k + " v=" + v + " removalCause=" + removalCause); // return; // use this variant if listeners may be called more than once } - removals.incrementAndGet(); }; - com.github.benmanes.caffeine.cache.Cache cache = Caffeine.newBuilder() + com.github.benmanes.caffeine.cache.Cache cache = Caffeine.newBuilder() .removalListener(listener) .maximumSize(maxEntries) .executor(Runnable::run) @@ -279,11 +288,12 @@ public void testCacheConcurrent() throws Exception { final AtomicLong maxObservedSize = new AtomicLong(); Thread[] threads = new Thread[nThreads]; - for (int i=0; i 0 && r.nextInt(odds)==0; + return odds > 0 && r.nextInt(odds) == 0; } long getBlock() { @@ -329,7 +339,7 @@ public void test() { Val v = cache.getIfPresent(k); if (v != null) { hits.incrementAndGet(); - assert k.equals(v.key); + assertEquals("cache key differs from value's key", (Long) k, (Long) v.key); } if (v == null || odds(updateAnywayOdds)) { @@ -358,13 +368,10 @@ public void test() { // Thread.sleep(1000); // need to wait if executor is used for listener? long cacheSize = cache.estimatedSize(); - System.out.println("Done! # of Elements = " + cacheSize + " inserts=" + inserts.get() + " removals=" + removals.get() + " hits=" + hits.get() + " maxObservedSize=" + maxObservedSize); - assert inserts.get() - removals.get() == cacheSize; - assertFalse( failed.get() ); + System.out.println("Done! # of Elements = " + cacheSize + " inserts=" + inserts.get() + " removals=" + removals.get() + " hits=" + hits.get() + " maxObservedSize=" + maxObservedSize); + assertEquals("cache size different from (inserts - removal)", cacheSize, inserts.get() - removals.get()); + assertFalse(failed.get()); } - - - } diff --git a/solr/core/src/test/org/apache/solr/update/RootFieldTest.java b/solr/core/src/test/org/apache/solr/update/RootFieldTest.java index 7c0ad2b858fa..8015d19d604a 100644 --- a/solr/core/src/test/org/apache/solr/update/RootFieldTest.java +++ b/solr/core/src/test/org/apache/solr/update/RootFieldTest.java @@ -17,7 +17,7 @@ package org.apache.solr.update; -import org.apache.solr.SolrJettyTestBase; +import org.apache.solr.EmbeddedSolrServerTestBase; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.common.SolrDocument; @@ -32,7 +32,7 @@ import static org.hamcrest.CoreMatchers.is; -public class RootFieldTest extends SolrJettyTestBase { +public class RootFieldTest extends EmbeddedSolrServerTestBase { private static boolean useRootSchema; private static final String MESSAGE = "Update handler should create and process _root_ field " + "unless there is no such a field in schema"; diff --git a/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java b/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java index 4eddb98136e9..6926c6fab943 100644 --- a/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java +++ b/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.update; +import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.net.SocketException; @@ -23,6 +24,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import javax.xml.parsers.ParserConfigurationException; import org.apache.solr.BaseDistributedSearchTestCase; @@ -353,6 +355,7 @@ public void newSearcher(SolrIndexSearcher newSearcher, testDeletes(false, false); testDeletes(true, true); testDeletes(true, false); + getRfFromResponseShouldNotCloseTheInputStream(); } private void testDeletes(boolean dbq, boolean withFailures) throws Exception { @@ -531,6 +534,22 @@ private void testReqShouldRetryDBQ() { SolrCmdDistributor.Req req = new SolrCmdDistributor.Req(null, new StdNode(null, "collection1", "shard1", 1), dbqReq, true); assertFalse(req.shouldRetry(err)); } + + public void getRfFromResponseShouldNotCloseTheInputStream() { + UpdateRequest dbqReq = new UpdateRequest(); + dbqReq.deleteByQuery("*:*"); + SolrCmdDistributor.Req req = new SolrCmdDistributor.Req(null, new StdNode(null, "collection1", "shard1", 1), dbqReq, true); + AtomicBoolean isClosed = new AtomicBoolean(false); + ByteArrayInputStream is = new ByteArrayInputStream(new byte[100]) { + @Override + public void close() throws IOException { + isClosed.set(true); + super.close(); + } + }; + req.trackRequestResult(null, is, true); + assertFalse("Underlying stream should not be closed!", isClosed.get()); + } private void testReqShouldRetryMaxRetries() { Error err = getError(new SocketException()); diff --git a/solr/core/src/test/org/apache/solr/update/processor/AbstractAtomicUpdatesMultivalueTestBase.java b/solr/core/src/test/org/apache/solr/update/processor/AbstractAtomicUpdatesMultivalueTestBase.java new file mode 100644 index 000000000000..05fd0e7e962c --- /dev/null +++ b/solr/core/src/test/org/apache/solr/update/processor/AbstractAtomicUpdatesMultivalueTestBase.java @@ -0,0 +1,428 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.update.processor; + +import static org.hamcrest.CoreMatchers.hasItems; +import static org.hamcrest.CoreMatchers.not; + +import java.io.IOException; +import java.time.ZonedDateTime; +import java.util.Arrays; +import java.util.Collection; +import java.util.Date; +import java.util.Optional; +import java.util.UUID; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.apache.curator.shaded.com.google.common.collect.Lists; +import org.apache.solr.EmbeddedSolrServerTestBase; +import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; +import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer.RequestWriterSupplier; +import org.apache.solr.common.util.ByteArrayUtf8CharSequence; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.google.common.collect.ImmutableMap; + +public abstract class AbstractAtomicUpdatesMultivalueTestBase extends EmbeddedSolrServerTestBase { + + @BeforeClass + public static void beforeClass() throws Exception { + System.setProperty("enable.update.log","true"); + initCore("solrconfig.xml", "schema.xml"); + } + + @Before + public void before() throws SolrServerException, IOException { + getSolrClient().deleteByQuery("*:*"); + } + + abstract RequestWriterSupplier getRequestWriterSupplier(); + + @Override + public synchronized EmbeddedSolrServer getSolrClient() { + return new EmbeddedSolrServer(h.getCoreContainer(), DEFAULT_CORE_NAME, getRequestWriterSupplier()) { + + @Override + public void close() { + // do not close core container + } + }; + } + + private static void assertQR(final String fieldName, final String queryValue, final int numFound) { + assertQ(req("q", fieldName + ":" + queryValue, "indent", "true"), "//result[@numFound = '" + numFound + "']"); + } + + private void runTestForField(final String fieldName, final Object[] values, final String[] queries, + final Optional> valueConverter) + throws SolrServerException, IOException { + + final Function vc = valueConverter.orElse(o -> o); + + getSolrClient().add(Arrays.asList( + sdoc("id", "20000", fieldName, Arrays.asList(values[0], values[1], values[2])), + sdoc("id", "20001", fieldName, Arrays.asList(values[1], values[2], values[3])))); + getSolrClient().commit(true, true); + + if (queries != null) { + assertQR(fieldName, queries[0], 1); + assertQR(fieldName, queries[1], 2); + assertQR(fieldName, queries[2], 2); + assertQR(fieldName, queries[3], 1); + } + + Collection fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); + assertEquals(3, fieldValues.size()); + assertThat(fieldValues, hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]))); + assertThat(fieldValues, not(hasItems(vc.apply(values[3])))); + fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); + assertEquals(3, fieldValues.size()); + assertThat(fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); + assertThat(fieldValues, not(hasItems(vc.apply(values[0])))); + + getSolrClient().add(sdoc("id", "20000", fieldName, ImmutableMap.of("remove", + Lists.newArrayList(values[0])))); + getSolrClient().commit(true, true); + + if (queries != null) { + assertQR(fieldName, queries[0], 0); + assertQR(fieldName, queries[1], 2); + assertQR(fieldName, queries[2], 2); + assertQR(fieldName, queries[3], 1); + } + + fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); + assertEquals(2, fieldValues.size()); + assertThat(fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2]))); + assertThat(fieldValues, not(hasItems(vc.apply(values[0]), vc.apply(values[3])))); + fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); + assertEquals(3, fieldValues.size()); + assertThat(fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); + assertThat(fieldValues, not(hasItems(vc.apply(values[0])))); + + getSolrClient().add(sdoc("id", "20001", fieldName, ImmutableMap.of("remove", + Lists.newArrayList(values[0], values[1], values[2])))); + getSolrClient().commit(true, true); + + if (queries != null) { + assertQR(fieldName, queries[0], 0); + assertQR(fieldName, queries[1], 1); + assertQR(fieldName, queries[2], 1); + assertQR(fieldName, queries[3], 1); + } + + fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); + assertEquals(2, fieldValues.size()); + assertThat(fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2]))); + assertThat(fieldValues, not(hasItems(vc.apply(values[0]), vc.apply(values[3])))); + fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); + assertEquals(1, fieldValues.size()); + assertThat(fieldValues, hasItems(vc.apply(values[3]))); + assertThat(fieldValues, not(hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2])))); + + getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("add", + Lists.newArrayList(values[0]), "remove", Lists.newArrayList(values[1], values[2]))), + sdoc("id", "20001", fieldName, + ImmutableMap.of("add", Lists.newArrayList(values[0]), "remove", Lists.newArrayList(values[3]))))); + getSolrClient().commit(true, true); + + if (queries != null) { + assertQR(fieldName, queries[0], 2); + assertQR(fieldName, queries[1], 0); + assertQR(fieldName, queries[2], 0); + assertQR(fieldName, queries[3], 0); + } + + fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); + assertEquals(1, fieldValues.size()); + assertThat(fieldValues, hasItems(vc.apply(values[0]))); + assertThat(fieldValues, not(hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3])))); + fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); + assertEquals(1, fieldValues.size()); + assertThat(fieldValues, hasItems(vc.apply(values[0]))); + assertThat(fieldValues, not(hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3])))); + + getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("set", + Lists.newArrayList(values[0], values[1], values[2], values[3]))), sdoc("id", "20001", fieldName, + ImmutableMap.of("set", + Lists.newArrayList(values[0], values[1], values[2], values[3]))))); + getSolrClient().commit(true, true); + + if (queries != null) { + assertQR(fieldName, queries[0], 2); + assertQR(fieldName, queries[1], 2); + assertQR(fieldName, queries[2], 2); + assertQR(fieldName, queries[3], 2); + } + + fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); + assertEquals(4, fieldValues.size()); + assertThat(fieldValues, + hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); + fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); + assertEquals(4, fieldValues.size()); + assertThat(fieldValues, + hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); + } + + private String[] toStringArray(final Object[] values) { + return Arrays.stream(values).map(v -> v.toString()).collect(Collectors.toList()).toArray(new String[] {}); + } + + private void runTestForFieldWithQuery(final String fieldName, final Object[] values) + throws SolrServerException, IOException { + runTestForField(fieldName, values, toStringArray(values), Optional.empty()); + } + + private void runTestForFieldWithQuery(final String fieldName, final Object[] values, final String[] queries) + throws SolrServerException, IOException { + runTestForField(fieldName, values, queries, Optional.empty()); + } + + private void runTestForFieldWithQuery(final String fieldName, final Object[] values, final String[] queries, + final Function valueConverter) + throws SolrServerException, IOException { + runTestForField(fieldName, values, queries, Optional.of(valueConverter)); + } + + private void runTestForFieldWithoutQuery(final String fieldName, final Object[] values) + throws SolrServerException, IOException { + runTestForField(fieldName, values, null, Optional.empty()); + } + + @Test + @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13762") + public void testMultivalueBinaryField() throws SolrServerException, IOException { + runTestForFieldWithoutQuery("binaryRemove", + new byte[][] {new byte[] {0}, new byte[] {1}, new byte[] {2}, new byte[] {3}}); + } + + @Test + public void testMultivalueBooleanField() throws SolrServerException, IOException { + + final String fieldName = "booleanRemove"; + + getSolrClient().add(Arrays.asList( + sdoc("id", "20000", fieldName, Lists.newArrayList(true, false)), + sdoc("id", "20001", fieldName, Lists.newArrayList(false, true)))); + getSolrClient().commit(true, true); + + assertQR(fieldName, "true", 2); + assertQR(fieldName, "false", 2); + + Collection fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); + assertEquals(2, fieldValues.size()); + assertThat(fieldValues, hasItems(true, false)); + fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); + assertEquals(2, fieldValues.size()); + assertThat(fieldValues, hasItems(true, false)); + + getSolrClient().add(sdoc("id", "20000", fieldName, ImmutableMap.of("remove", + Lists.newArrayList(false)))); + getSolrClient().commit(true, true); + + assertQR(fieldName, "true", 2); + assertQR(fieldName, "false", 1); + + fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); + assertEquals(1, fieldValues.size()); + assertThat(fieldValues, hasItems(true)); + fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); + assertEquals(2, fieldValues.size()); + assertThat(fieldValues, hasItems(true, false)); + + getSolrClient().add(sdoc("id", "20001", fieldName, ImmutableMap.of("remove", + Lists.newArrayList(true, false)))); + getSolrClient().commit(true, true); + + assertQR(fieldName, "true", 1); + assertQR(fieldName, "false", 0); + + fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); + assertEquals(1, fieldValues.size()); + assertThat(fieldValues, hasItems(true)); + assertThat(fieldValues, not(hasItems(false))); + fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); + assertNull(fieldValues); + + getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("add", + Lists.newArrayList(false, false))))); + getSolrClient().commit(true, true); + + assertQR(fieldName, "true", 1); + assertQR(fieldName, "false", 1); + + fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); + assertEquals(3, fieldValues.size()); + assertThat(fieldValues, hasItems(true, false)); + fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); + assertNull(fieldValues); + + getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("set", + Lists.newArrayList(true, false))), sdoc("id", "20001", fieldName, + ImmutableMap.of("set", + Lists.newArrayList(false, true))))); + getSolrClient().commit(true, true); + + assertQR(fieldName, "true", 2); + assertQR(fieldName, "false", 2); + + fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); + assertEquals(2, fieldValues.size()); + assertThat(fieldValues, hasItems(true, false)); + fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); + assertEquals(2, fieldValues.size()); + assertThat(fieldValues, hasItems(true, false)); + } + + @Test + public void testMultivalueCollationField() throws SolrServerException, IOException { + runTestForFieldWithQuery("collationRemove", new String[] {"cf1", "cf2", "cf3", "cf4"}); + } + + @Test + public void testMultivalueDatePointField() throws SolrServerException, IOException { + + final String s1 = "1980-01-01T00:00:00Z"; + final Date d1 = Date.from(ZonedDateTime.parse(s1).toInstant()); + final String s2 = "1990-01-01T00:00:00Z"; + final Date d2 = Date.from(ZonedDateTime.parse(s2).toInstant()); + final String s3 = "2000-01-01T00:00:00Z"; + final Date d3 = Date.from(ZonedDateTime.parse(s3).toInstant()); + final String s4 = "2010-01-01T00:00:00Z"; + final Date d4 = Date.from(ZonedDateTime.parse(s4).toInstant()); + + runTestForFieldWithQuery("datePointRemove", new Date[] {d1, d2, d3, d4}, + new String[] {"\"" + s1 + "\"", "\"" + s2 + "\"", "\"" + s3 + "\"", "\"" + s4 + "\""}); + } + + @Test + public void testMultivalueDateRangeField() throws SolrServerException, IOException { + + final String s1 = "1980-01-01T00:00:00Z"; + final String s2 = "1990-01-01T00:00:00Z"; + final String s3 = "2000-01-01T00:00:00Z"; + final String s4 = "2010-01-01T00:00:00Z"; + + runTestForFieldWithQuery("dateRangeRemove", new String[] {s1, s2, s3, s4}, + new String[] {"\"" + s1 + "\"", "\"" + s2 + "\"", "\"" + s3 + "\"", "\"" + s4 + "\""}); + } + + @Test + public void testMultivalueDoublePointField() throws SolrServerException, IOException { + runTestForFieldWithQuery("doublePointRemove", new Double[] {1.0d, 2.0d, 3.0d, 4.0d}); + } + + @Test + public void testMultivalueEnumField() throws SolrServerException, IOException { + runTestForFieldWithQuery("enumRemove_sev_enum", new Object[] {"Low", "Medium", "High", "Critical"}); + } + + @Test + public void testMultivalueEnumFieldWithNumbers() throws SolrServerException, IOException { + final Object[] values = new Object[] {"Low", "Medium", "High", 11}; + runTestForFieldWithQuery("enumRemove_sev_enum", values, toStringArray(values), o -> { + if (Integer.valueOf(11).equals(o)) { + return "Critical"; + } else { + return o; + } + }); + } + + @Test + public void testMultivalueExternalFileField() throws SolrServerException, IOException { + runTestForFieldWithoutQuery("externalFileRemove", + new String[] {"file1.txt", "file2.txt", "file3.txt", "file4.txt"}); + } + + @Test + public void testMultivalueFloatPointField() throws SolrServerException, IOException { + runTestForFieldWithQuery("floatPointRemove", new Float[] {1.0f, 2.0f, 3.0f, 4.0f}); + } + + @Test + public void testMultivalueICUCollationField() throws SolrServerException, IOException { + runTestForFieldWithQuery("icuCollationRemove", new String[] {"iuccf1", "icucf2", "icucf3", "icucf4"}); + } + + @Test + public void testMultivalueIntPointField() throws SolrServerException, IOException { + runTestForFieldWithQuery("intPointRemove", new Integer[] {1, 2, 3, 4}); + } + + @Test + public void testMultivalueLatLonPointSpatialField() throws SolrServerException, IOException { + runTestForFieldWithoutQuery("latLonPointSpatialRemove", + new String[] {"1.0,-1.0", "2.0,-2.0", "3.0,-3.0", "4.0,-4.0"}); + } + + @Test + public void testMultivalueLatLonField() throws SolrServerException, IOException { + runTestForFieldWithQuery("latLonRemove", new String[] {"1.0,-1.0", "2.0,-2.0", "3.0,-3.0", "4.0,-4.0"}); + } + + @Test + public void testMultivalueLongPointField() throws SolrServerException, IOException { + runTestForFieldWithQuery("longPointRemove", new Long[] {1l, 2l, 3l, 4l}); + } + + @Test + public void testMultivaluePointField() throws SolrServerException, IOException { + runTestForFieldWithQuery("pointRemove", new String[] {"1,1", "2,2", "3,3", "4,4"}); + } + + @Test + public void testMultivalueRandomSortField() throws SolrServerException, IOException { + runTestForFieldWithQuery("randomSortRemove", new String[] {"rsf1", "rsf2", "rsf3", "rsf4"}); + } + + @Test + public void testMultivalueSpatialRecursivePrefixTreeFieldType() throws SolrServerException, IOException { + runTestForFieldWithoutQuery("spatialRecursivePrefixTreeRemove", new String[] {"1,1", "2,2", "3,3", "4,4"}); + } + + @Test + public void testMultivalueStringField() throws SolrServerException, IOException { + runTestForFieldWithQuery("stringRemove", new String[] {"str1", "str2", "str3", "str4"}); + } + + @Test + public void testMultivalueStringFieldUsingCharSequence() throws SolrServerException, IOException { + final ByteArrayUtf8CharSequence[] values = new ByteArrayUtf8CharSequence[] {new ByteArrayUtf8CharSequence("str1"), + new ByteArrayUtf8CharSequence("str2"), + new ByteArrayUtf8CharSequence("str3"), new ByteArrayUtf8CharSequence("str4")}; + runTestForFieldWithQuery("stringRemove", values, toStringArray(values), o -> o.toString()); + } + + @Test + public void testMultivalueTextField() throws SolrServerException, IOException { + runTestForFieldWithQuery("textRemove", new String[] {"text1", "text2", "text3", "text4"}); + } + + @Test + public void testMultivalueUUIDField() throws SolrServerException, IOException { + final String[] values = new String[] {UUID.randomUUID().toString(), UUID.randomUUID().toString(), + UUID.randomUUID().toString(), UUID.randomUUID().toString()}; + runTestForFieldWithQuery("uuidRemove", values); + } + +} diff --git a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java index 12404862887b..48c76b778128 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java @@ -75,6 +75,7 @@ public void testRemove() throws Exception { assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '3']"); + assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '3']"); doc = new SolrInputDocument(); @@ -88,6 +89,7 @@ public void testRemove() throws Exception { assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '2']"); + assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "21"); @@ -142,6 +144,7 @@ public void testRemoveInteger() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']"); + assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); doc = new SolrInputDocument(); @@ -155,6 +158,7 @@ public void testRemoveInteger() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']"); + assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); @@ -210,6 +214,7 @@ public void testRemoveIntegerInDocSavedWithInteger() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']"); + assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); doc = new SolrInputDocument(); @@ -223,6 +228,7 @@ public void testRemoveIntegerInDocSavedWithInteger() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']"); + assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); @@ -274,6 +280,7 @@ public void testRemoveIntegerUsingStringType() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']"); + assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); doc = new SolrInputDocument(); @@ -287,6 +294,7 @@ public void testRemoveIntegerUsingStringType() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']"); + assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); @@ -339,6 +347,7 @@ public void testRemoveIntegerUsingLongType() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']"); + assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); doc = new SolrInputDocument(); doc.setField("id", "1001"); @@ -351,6 +360,7 @@ public void testRemoveIntegerUsingLongType() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']"); + assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); @@ -423,6 +433,7 @@ public void testRemoveIntegerUsingFloatType() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']"); + assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); doc = new SolrInputDocument(); @@ -436,6 +447,7 @@ public void testRemoveIntegerUsingFloatType() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']"); + assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); @@ -489,6 +501,7 @@ public void testRemoveIntegerUsingDoubleType() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:22222222", "indent", "true"), "//result[@numFound = '3']"); + assertQ(req("q", "intRemove:33333333", "indent", "true"), "//result[@numFound = '3']"); doc = new SolrInputDocument(); @@ -502,6 +515,7 @@ public void testRemoveIntegerUsingDoubleType() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:22222222", "indent", "true"), "//result[@numFound = '2']"); + assertQ(req("q", "intRemove:33333333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); @@ -559,6 +573,7 @@ public void testRemoveDateUsingStringType() throws Exception { assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']"); } assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']"); + assertQ(req("q", "dateRemove:\"2014-09-03T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']"); doc = new SolrInputDocument(); doc.setField("id", "10001"); @@ -672,6 +687,7 @@ public void testRemoveDateUsingDateType() throws Exception { assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '2']"); + assertQ(req("q", "dateRemove:\"2014-09-03T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "10021"); @@ -794,6 +810,7 @@ public void testRemoveFloatUsingStringType() throws Exception { assertQ(req("q", "floatRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '3']"); + assertQ(req("q", "floatRemove:\"333.333\"", "indent", "true"), "//result[@numFound = '3']"); doc = new SolrInputDocument(); @@ -808,6 +825,7 @@ public void testRemoveFloatUsingStringType() throws Exception { assertQ(req("q", "floatRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '2']"); + assertQ(req("q", "floatRemove:\"333.333\"", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "10021"); @@ -832,7 +850,7 @@ public void testRemoveFloatUsingStringType() throws Exception { assertQ(req("q", "floatRemove:\"111.111\"", "indent", "true"), "//result[@numFound = '3']"); } - @Test + @Test public void testRemoveregex() throws Exception { SolrInputDocument doc; @@ -862,6 +880,7 @@ public void testRemoveregex() throws Exception { assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '3']"); + assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '3']"); doc = new SolrInputDocument(); @@ -875,6 +894,7 @@ public void testRemoveregex() throws Exception { assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '2']"); + assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '2']"); // removeregex does remove all occurrences doc = new SolrInputDocument(); doc.setField("id", "21"); @@ -899,6 +919,43 @@ public void testRemoveregex() throws Exception { assertQ(req("q", "cat:aaa", "indent", "true"), "//result[@numFound = '3']"); } + @Test + public void testRemoveregexMustMatchWholeValue() throws Exception { + SolrInputDocument doc; + + doc = new SolrInputDocument(); + doc.setField("id", "1"); + doc.setField("cat", new String[]{"aaa", "bbb", "ccc", "ccc", "ddd"}); + assertU(adoc(doc)); + assertU(commit()); + + assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); + assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); + + + doc = new SolrInputDocument(); + doc.setField("id", "1"); + List removeList = new ArrayList<>(); + removeList.add("bb"); + doc.setField("cat", ImmutableMap.of("removeregex", removeList)); //behavior when hitting Solr through ZK + assertU(adoc(doc)); + assertU(commit()); + + assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); + assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); // Was not removed - regex didn't match whole value + + doc = new SolrInputDocument(); + doc.setField("id", "1"); + removeList = new ArrayList<>(); + removeList.add("bbb"); + doc.setField("cat", ImmutableMap.of("removeregex", removeList)); //behavior when hitting Solr through ZK + assertU(adoc(doc)); + assertU(commit()); + + assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); + assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']"); // Was removed now - regex matches + } + @Test public void testAdd() throws Exception { SolrInputDocument doc = new SolrInputDocument(); @@ -975,6 +1032,55 @@ public void testAddDistinct() throws Exception { assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '2']"); //'cat' field not present, do 'add' atomic operation } + @Test + public void testAddMultiple() throws Exception { + SolrInputDocument doc = new SolrInputDocument(); + doc.setField("id", "3"); + doc.setField("cat", new String[]{"aaa", "ccc"}); + assertU(adoc(doc)); + assertU(commit()); + + assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); + assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']"); + + + doc = new SolrInputDocument(); + doc.setField("id", "3"); + doc.setField("cat", ImmutableMap.of("add", "bbb")); + assertU(adoc(doc)); + assertU(commit()); + + assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); + assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); + + doc = new SolrInputDocument(); + doc.setField("id", "3"); + doc.setField("cat", ImmutableMap.of("add", "bbb")); + assertU(adoc(doc)); + assertU(commit()); + + assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); + assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); // Should now have 2 occurrences of bbb + + doc = new SolrInputDocument(); + doc.setField("id", "3"); + doc.setField("cat", ImmutableMap.of("remove", "bbb")); + assertU(adoc(doc)); + assertU(commit()); + + assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); + assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); // remove only removed first occurrence + + doc = new SolrInputDocument(); + doc.setField("id", "3"); + doc.setField("cat", ImmutableMap.of("remove", "bbb")); + assertU(adoc(doc)); + assertU(commit()); + + assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); + assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']"); // remove now removed last occurrence + } + @Test public void testSet() throws Exception { SolrInputDocument doc; diff --git a/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java index 738235ba332a..3d583aca6dbc 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java @@ -686,7 +686,8 @@ private void testFailedDocument(String category, String timestamp, String errorM final Object errors = resp.getResponseHeader().get("errors"); // Tolerant URP assertTrue(errors != null && errors.toString().contains(errorMsg)); } catch (SolrException e) { - assertTrue(e.getMessage().contains(errorMsg)); + String message = e.getMessage(); + assertTrue("expected message to contain" + errorMsg + " but message was " + message , message.contains(errorMsg)); } numDocsDeletedOrFailed++; } diff --git a/solr/core/src/test-files/runtimecode/MyDocCache.java b/solr/core/src/test/org/apache/solr/update/processor/JavaBinAtomicUpdateMultivalueTest.java similarity index 64% rename from solr/core/src/test-files/runtimecode/MyDocCache.java rename to solr/core/src/test/org/apache/solr/update/processor/JavaBinAtomicUpdateMultivalueTest.java index 406b9508f79c..5f9889e04e56 100644 --- a/solr/core/src/test-files/runtimecode/MyDocCache.java +++ b/solr/core/src/test/org/apache/solr/update/processor/JavaBinAtomicUpdateMultivalueTest.java @@ -14,22 +14,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package org.apache.solr.update.processor; -package runtimecode; +import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer.RequestWriterSupplier; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.StoredField; -import org.apache.solr.search.LRUCache; +public class JavaBinAtomicUpdateMultivalueTest extends AbstractAtomicUpdatesMultivalueTestBase { -public class MyDocCache extends LRUCache { - - static String fld_name= "my_synthetic_fld_s"; @Override - public V put(K key, V value) { - if(value instanceof Document){ - Document d = (Document) value; - d.add(new StoredField(fld_name, "version_2")); - } - return super.put(key, value); + RequestWriterSupplier getRequestWriterSupplier() { + return RequestWriterSupplier.JavaBin; } + } diff --git a/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java b/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java index 6cee3d9c1d89..889b0bf5786b 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java +++ b/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java @@ -31,7 +31,7 @@ protected void process(AddUpdateCommand cmd, SolrQueryRequest req, SolrQueryResp List names = new ArrayList<>(); for (UpdateRequestProcessorFactory p : processorChain.getProcessors()) { if (p instanceof UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder.LazyUpdateRequestProcessorFactory) { - p = ((UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder.LazyUpdateRequestProcessorFactory) p).getDelegate(); + p = ((UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder.LazyUpdateRequestProcessorFactory) p).delegate; } names.add(p.getClass().getSimpleName()); } diff --git a/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java index e0dc8e0e27ae..1cb55420dcd2 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java @@ -20,6 +20,8 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; import java.time.Instant; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; import java.time.temporal.ChronoUnit; import java.util.Arrays; import java.util.Collections; @@ -50,7 +52,12 @@ import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.Utils; import org.apache.solr.update.UpdateCommand; +import org.apache.solr.util.DateMathParser; import org.apache.solr.util.LogLevel; +import org.apache.zookeeper.KeeperException; +import org.apache.zookeeper.WatchedEvent; +import org.apache.zookeeper.Watcher; +import org.apache.zookeeper.data.Stat; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -59,6 +66,7 @@ import static org.apache.solr.client.solrj.RoutedAliasTypes.TIME; import static org.apache.solr.cloud.api.collections.RoutedAlias.ROUTED_ALIAS_NAME_CORE_PROP; +import static org.apache.solr.cloud.api.collections.TimeRoutedAlias.ROUTER_START; import static org.apache.solr.common.cloud.ZkStateReader.COLLECTIONS_ZKNODE; import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROPS_ZKNODE; @@ -706,6 +714,68 @@ public void testParse() { TimeRoutedAlias.parseInstantFromCollectionName(alias, alias + TRA + "2017-10-02")); } + @Test + public void testDateMathInStart() throws Exception { + ClusterStateProvider clusterStateProvider = solrClient.getClusterStateProvider(); + Class aClass = clusterStateProvider.getClass(); + System.out.println("CSPROVIDER:" + aClass); + + // This test prevents recurrence of SOLR-13760 + + String configName = getSaferTestName(); + createConfigSet(configName); + CountDownLatch aliasUpdate = new CountDownLatch(1); + monitorAlias(aliasUpdate); + + // each collection has 4 shards with 3 replicas for 12 possible destinations + // 4 of which are leaders, and 8 of which should fail this test. + final int numShards = 1 + random().nextInt(4); + final int numReplicas = 1 + random().nextInt(3); + CollectionAdminRequest.createTimeRoutedAlias(alias, "2019-09-14T03:00:00Z/DAY", "+1DAY", getTimeField(), + CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas) + .setMaxShardsPerNode(numReplicas)) + .process(solrClient); + + aliasUpdate.await(); + if (BaseHttpClusterStateProvider.class.isAssignableFrom(aClass)) { + ((BaseHttpClusterStateProvider)clusterStateProvider).resolveAlias(getAlias(), true); + } + aliasUpdate = new CountDownLatch(1); + monitorAlias(aliasUpdate); + + ModifiableSolrParams params = params(); + String nowDay = DateTimeFormatter.ISO_INSTANT.format(DateMathParser.parseMath(new Date(), "2019-09-14T01:00:00Z").toInstant()); + assertUpdateResponse(add(alias, Arrays.asList( + sdoc("id", "1", "timestamp_dt", nowDay)), // should not cause preemptive creation of 10-28 now + params)); + + // this process should have lead to the modification of the start time for the alias, converting it into + // a parsable date, removing the DateMath + + // what we test next happens in a separate thread, so we have to give it some time to happen + aliasUpdate.await(); + if (BaseHttpClusterStateProvider.class.isAssignableFrom(aClass)) { + ((BaseHttpClusterStateProvider)clusterStateProvider).resolveAlias(getAlias(), true); + } + + String hopeFullyModified = clusterStateProvider.getAliasProperties(getAlias()).get(ROUTER_START); + try { + Instant.parse(hopeFullyModified); + } catch (DateTimeParseException e) { + fail(ROUTER_START + " should not have any date math by this point and parse as an instant. Using "+ aClass +" Found:" + hopeFullyModified); + } + } + + private void monitorAlias(CountDownLatch aliasUpdate) throws KeeperException, InterruptedException { + Stat stat = new Stat(); + zkClient().getData("/aliases.json", new Watcher() { + @Override + public void process(WatchedEvent watchedEvent) { + aliasUpdate.countDown(); + } + }, stat, true); + } + /** * Need to ensure that the existing TRA's gracefully handle, old, new and mixtures thereof. TRA's with * an autoDeleteAge setting will gracefully convert to the new format over time. diff --git a/solr/core/src/test/org/apache/solr/update/processor/XMLAtomicUpdateMultivalueTest.java b/solr/core/src/test/org/apache/solr/update/processor/XMLAtomicUpdateMultivalueTest.java new file mode 100644 index 000000000000..1a5f62be1551 --- /dev/null +++ b/solr/core/src/test/org/apache/solr/update/processor/XMLAtomicUpdateMultivalueTest.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.update.processor; + +import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer.RequestWriterSupplier; + +public class XMLAtomicUpdateMultivalueTest extends AbstractAtomicUpdatesMultivalueTestBase { + + @Override + RequestWriterSupplier getRequestWriterSupplier() { + return RequestWriterSupplier.XML; + } + +} diff --git a/solr/core/src/test/org/apache/solr/util/TestExportTool.java b/solr/core/src/test/org/apache/solr/util/TestExportTool.java index fdfb3c09b2e7..9e637f977442 100644 --- a/solr/core/src/test/org/apache/solr/util/TestExportTool.java +++ b/solr/core/src/test/org/apache/solr/util/TestExportTool.java @@ -36,7 +36,6 @@ import org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.cloud.MiniSolrCloudCluster; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; @@ -49,7 +48,7 @@ public class TestExportTool extends SolrCloudTestCase { public void testBasic() throws Exception { String COLLECTION_NAME = "globalLoaderColl"; - MiniSolrCloudCluster cluster = configureCluster(4) + configureCluster(4) .addConfig("conf", configset("cloud-minimal")) .configure(); @@ -122,7 +121,7 @@ public void testBasic() throws Exception { @Nightly public void testVeryLargeCluster() throws Exception { String COLLECTION_NAME = "veryLargeColl"; - MiniSolrCloudCluster cluster = configureCluster(4) + configureCluster(4) .addConfig("conf", configset("cloud-minimal")) .configure(); diff --git a/solr/licenses/asciidoctor-ant-1.6.0-alpha.5.jar.sha1 b/solr/licenses/asciidoctor-ant-1.6.0-alpha.5.jar.sha1 deleted file mode 100644 index 0da9ca2b120e..000000000000 --- a/solr/licenses/asciidoctor-ant-1.6.0-alpha.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -741c5e5afd8a2c7d415feb7b9a8d6fe8a6cca57c diff --git a/solr/licenses/asciidoctor-ant-1.6.2.jar.sha1 b/solr/licenses/asciidoctor-ant-1.6.2.jar.sha1 new file mode 100644 index 000000000000..558a01f58399 --- /dev/null +++ b/solr/licenses/asciidoctor-ant-1.6.2.jar.sha1 @@ -0,0 +1 @@ +c5ba599e3918e7a3316e6bf110cadd5aeb2a026b diff --git a/solr/licenses/caffeine-2.4.0.jar.sha1 b/solr/licenses/caffeine-2.4.0.jar.sha1 deleted file mode 100644 index 9c317d927349..000000000000 --- a/solr/licenses/caffeine-2.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5aa8bbb851b1ad403cc140094ba4a25998369efe diff --git a/solr/licenses/caffeine-2.8.0.jar.sha1 b/solr/licenses/caffeine-2.8.0.jar.sha1 new file mode 100644 index 000000000000..ce291c474255 --- /dev/null +++ b/solr/licenses/caffeine-2.8.0.jar.sha1 @@ -0,0 +1 @@ +6000774d7f8412ced005a704188ced78beeed2bb diff --git a/solr/licenses/commons-beanutils-1.9.3.jar.sha1 b/solr/licenses/commons-beanutils-1.9.3.jar.sha1 deleted file mode 100644 index da389e5971cd..000000000000 --- a/solr/licenses/commons-beanutils-1.9.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c845703de334ddc6b4b3cd26835458cb1cba1f3d diff --git a/solr/licenses/commons-beanutils-NOTICE.txt b/solr/licenses/commons-beanutils-NOTICE.txt deleted file mode 100644 index c6c8ce997896..000000000000 --- a/solr/licenses/commons-beanutils-NOTICE.txt +++ /dev/null @@ -1,5 +0,0 @@ -Apache Commons BeanUtils -Copyright 2000-2018 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). diff --git a/solr/licenses/jackson-annotations-2.9.8.jar.sha1 b/solr/licenses/jackson-annotations-2.9.8.jar.sha1 deleted file mode 100644 index 64b57a832ff2..000000000000 --- a/solr/licenses/jackson-annotations-2.9.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ba7f0e6f8f1b28d251eeff2a5604bed34c53ff35 diff --git a/solr/licenses/jackson-annotations-2.9.9.jar.sha1 b/solr/licenses/jackson-annotations-2.9.9.jar.sha1 new file mode 100644 index 000000000000..7cf1b18f110b --- /dev/null +++ b/solr/licenses/jackson-annotations-2.9.9.jar.sha1 @@ -0,0 +1 @@ +2ea299c145207161c212e28abbc8f513fa245940 diff --git a/solr/licenses/jackson-core-2.9.8.jar.sha1 b/solr/licenses/jackson-core-2.9.8.jar.sha1 deleted file mode 100644 index 7634344bc1a1..000000000000 --- a/solr/licenses/jackson-core-2.9.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0f5a654e4675769c716e5b387830d19b501ca191 diff --git a/solr/licenses/jackson-core-2.9.9.jar.sha1 b/solr/licenses/jackson-core-2.9.9.jar.sha1 new file mode 100644 index 000000000000..d81f13017a51 --- /dev/null +++ b/solr/licenses/jackson-core-2.9.9.jar.sha1 @@ -0,0 +1 @@ +bfff5af9fb8347d26bbb7959cb9b4fe9a2b0ca5e diff --git a/solr/licenses/jackson-databind-2.9.8.jar.sha1 b/solr/licenses/jackson-databind-2.9.8.jar.sha1 deleted file mode 100644 index 3319cf30280c..000000000000 --- a/solr/licenses/jackson-databind-2.9.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -11283f21cc480aa86c4df7a0a3243ec508372ed2 diff --git a/solr/licenses/jackson-databind-2.9.9.3.jar.sha1 b/solr/licenses/jackson-databind-2.9.9.3.jar.sha1 new file mode 100644 index 000000000000..6b26e158ca0e --- /dev/null +++ b/solr/licenses/jackson-databind-2.9.9.3.jar.sha1 @@ -0,0 +1 @@ +68ddd453458765757fd3ffca9437f9a42d91003e diff --git a/solr/licenses/jackson-dataformat-smile-2.9.8.jar.sha1 b/solr/licenses/jackson-dataformat-smile-2.9.8.jar.sha1 deleted file mode 100644 index a4787c06b70d..000000000000 --- a/solr/licenses/jackson-dataformat-smile-2.9.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dbb47a052ac2b249ae004ce32e1e0c8bd8ee526c diff --git a/solr/licenses/jackson-dataformat-smile-2.9.9.jar.sha1 b/solr/licenses/jackson-dataformat-smile-2.9.9.jar.sha1 new file mode 100644 index 000000000000..7d77811c0019 --- /dev/null +++ b/solr/licenses/jackson-dataformat-smile-2.9.9.jar.sha1 @@ -0,0 +1 @@ +85749406c69b08945d6059db679cc66990340ebc diff --git a/solr/licenses/netty-all-4.0.52.Final.jar.sha1 b/solr/licenses/netty-all-4.0.52.Final.jar.sha1 deleted file mode 100644 index c95a6c896c52..000000000000 --- a/solr/licenses/netty-all-4.0.52.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6adde4fa5e7b8ff8a25500a66b369a110a047862 diff --git a/solr/licenses/netty-all-4.1.29.Final.jar.sha1 b/solr/licenses/netty-all-4.1.29.Final.jar.sha1 new file mode 100644 index 000000000000..596bbafd6a4d --- /dev/null +++ b/solr/licenses/netty-all-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +efce189397bfce0a561b25512696de8d241070b5 diff --git a/solr/licenses/netty-buffer-4.1.29.Final.jar.sha1 b/solr/licenses/netty-buffer-4.1.29.Final.jar.sha1 new file mode 100644 index 000000000000..f8bc6b0513af --- /dev/null +++ b/solr/licenses/netty-buffer-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +c3809f72e4b535b343b7dfa3c0c8210dad2fa5ea diff --git a/solr/licenses/commons-beanutils-LICENSE-ASL.txt b/solr/licenses/netty-buffer-LICENSE-ASL.txt similarity index 100% rename from solr/licenses/commons-beanutils-LICENSE-ASL.txt rename to solr/licenses/netty-buffer-LICENSE-ASL.txt diff --git a/solr/licenses/netty-buffer-NOTICE.txt b/solr/licenses/netty-buffer-NOTICE.txt new file mode 100644 index 000000000000..f973663670b4 --- /dev/null +++ b/solr/licenses/netty-buffer-NOTICE.txt @@ -0,0 +1,223 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2014 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified portion of 'Webbit', an event based +WebSocket and HTTP server, which can be obtained at: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product contains a modified portion of 'SLF4J', a simple logging +facade for Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product contains a modified portion of 'Apache Harmony', an open source +Java SE, which can be obtained at: + + * NOTICE: + * license/NOTICE.harmony.txt + * LICENSE: + * license/LICENSE.harmony.txt (Apache License 2.0) + * HOMEPAGE: + * http://archive.apache.org/dist/harmony/ + +This product contains a modified portion of 'jbzip2', a Java bzip2 compression +and decompression library written by Matthew J. Francis. It can be obtained at: + + * LICENSE: + * license/LICENSE.jbzip2.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jbzip2/ + +This product contains a modified portion of 'libdivsufsort', a C API library to construct +the suffix array and the Burrows-Wheeler transformed string for any input string of +a constant-size alphabet written by Yuta Mori. It can be obtained at: + + * LICENSE: + * license/LICENSE.libdivsufsort.txt (MIT License) + * HOMEPAGE: + * https://github.com/y-256/libdivsufsort + +This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, + which can be obtained at: + + * LICENSE: + * license/LICENSE.jctools.txt (ASL2 License) + * HOMEPAGE: + * https://github.com/JCTools/JCTools + +This product optionally depends on 'JZlib', a re-implementation of zlib in +pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product optionally depends on 'Compress-LZF', a Java library for encoding and +decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: + + * LICENSE: + * license/LICENSE.compress-lzf.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/ning/compress + +This product optionally depends on 'lz4', a LZ4 Java compression +and decompression library written by Adrien Grand. It can be obtained at: + + * LICENSE: + * license/LICENSE.lz4.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jpountz/lz4-java + +This product optionally depends on 'lzma-java', a LZMA Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.lzma-java.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jponge/lzma-java + +This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression +and decompression library written by William Kinney. It can be obtained at: + + * LICENSE: + * license/LICENSE.jfastlz.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jfastlz/ + +This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/protobuf + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'Snappy', a compression library produced +by Google Inc, which can be obtained at: + + * LICENSE: + * license/LICENSE.snappy.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/snappy + +This product optionally depends on 'JBoss Marshalling', an alternative Java +serialization API, which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-marshalling.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://www.jboss.org/jbossmarshalling + +This product optionally depends on 'Caliper', Google's micro- +benchmarking framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.caliper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/google/caliper + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, which +can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'Aalto XML', an ultra-high performance +non-blocking XML processor, which can be obtained at: + + * LICENSE: + * license/LICENSE.aalto-xml.txt (Apache License 2.0) + * HOMEPAGE: + * http://wiki.fasterxml.com/AaltoHome + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: + + * LICENSE: + * license/LICENSE.hpack.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/twitter/hpack + +This product contains a modified portion of 'Apache Commons Lang', a Java library +provides utilities for the java.lang API, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-lang.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/proper/commons-lang/ + + +This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. + + * LICENSE: + * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/takari/maven-wrapper diff --git a/solr/licenses/netty-codec-4.1.29.Final.jar.sha1 b/solr/licenses/netty-codec-4.1.29.Final.jar.sha1 new file mode 100644 index 000000000000..cf3be3554533 --- /dev/null +++ b/solr/licenses/netty-codec-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +1651bc2e279216773c234cafe402d68d2a5adc90 diff --git a/solr/licenses/netty-codec-LICENSE-ASL.txt b/solr/licenses/netty-codec-LICENSE-ASL.txt new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/solr/licenses/netty-codec-LICENSE-ASL.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/solr/licenses/netty-codec-NOTICE.txt b/solr/licenses/netty-codec-NOTICE.txt new file mode 100644 index 000000000000..f973663670b4 --- /dev/null +++ b/solr/licenses/netty-codec-NOTICE.txt @@ -0,0 +1,223 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2014 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified portion of 'Webbit', an event based +WebSocket and HTTP server, which can be obtained at: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product contains a modified portion of 'SLF4J', a simple logging +facade for Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product contains a modified portion of 'Apache Harmony', an open source +Java SE, which can be obtained at: + + * NOTICE: + * license/NOTICE.harmony.txt + * LICENSE: + * license/LICENSE.harmony.txt (Apache License 2.0) + * HOMEPAGE: + * http://archive.apache.org/dist/harmony/ + +This product contains a modified portion of 'jbzip2', a Java bzip2 compression +and decompression library written by Matthew J. Francis. It can be obtained at: + + * LICENSE: + * license/LICENSE.jbzip2.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jbzip2/ + +This product contains a modified portion of 'libdivsufsort', a C API library to construct +the suffix array and the Burrows-Wheeler transformed string for any input string of +a constant-size alphabet written by Yuta Mori. It can be obtained at: + + * LICENSE: + * license/LICENSE.libdivsufsort.txt (MIT License) + * HOMEPAGE: + * https://github.com/y-256/libdivsufsort + +This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, + which can be obtained at: + + * LICENSE: + * license/LICENSE.jctools.txt (ASL2 License) + * HOMEPAGE: + * https://github.com/JCTools/JCTools + +This product optionally depends on 'JZlib', a re-implementation of zlib in +pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product optionally depends on 'Compress-LZF', a Java library for encoding and +decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: + + * LICENSE: + * license/LICENSE.compress-lzf.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/ning/compress + +This product optionally depends on 'lz4', a LZ4 Java compression +and decompression library written by Adrien Grand. It can be obtained at: + + * LICENSE: + * license/LICENSE.lz4.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jpountz/lz4-java + +This product optionally depends on 'lzma-java', a LZMA Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.lzma-java.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jponge/lzma-java + +This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression +and decompression library written by William Kinney. It can be obtained at: + + * LICENSE: + * license/LICENSE.jfastlz.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jfastlz/ + +This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/protobuf + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'Snappy', a compression library produced +by Google Inc, which can be obtained at: + + * LICENSE: + * license/LICENSE.snappy.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/snappy + +This product optionally depends on 'JBoss Marshalling', an alternative Java +serialization API, which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-marshalling.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://www.jboss.org/jbossmarshalling + +This product optionally depends on 'Caliper', Google's micro- +benchmarking framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.caliper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/google/caliper + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, which +can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'Aalto XML', an ultra-high performance +non-blocking XML processor, which can be obtained at: + + * LICENSE: + * license/LICENSE.aalto-xml.txt (Apache License 2.0) + * HOMEPAGE: + * http://wiki.fasterxml.com/AaltoHome + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: + + * LICENSE: + * license/LICENSE.hpack.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/twitter/hpack + +This product contains a modified portion of 'Apache Commons Lang', a Java library +provides utilities for the java.lang API, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-lang.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/proper/commons-lang/ + + +This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. + + * LICENSE: + * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/takari/maven-wrapper diff --git a/solr/licenses/netty-common-4.1.29.Final.jar.sha1 b/solr/licenses/netty-common-4.1.29.Final.jar.sha1 new file mode 100644 index 000000000000..af74679e33f7 --- /dev/null +++ b/solr/licenses/netty-common-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +a5d6a735ed07d8f197daa48db7f097cfc971ee5e diff --git a/solr/licenses/netty-common-LICENSE-ASL.txt b/solr/licenses/netty-common-LICENSE-ASL.txt new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/solr/licenses/netty-common-LICENSE-ASL.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/solr/licenses/netty-common-NOTICE.txt b/solr/licenses/netty-common-NOTICE.txt new file mode 100644 index 000000000000..f973663670b4 --- /dev/null +++ b/solr/licenses/netty-common-NOTICE.txt @@ -0,0 +1,223 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2014 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified portion of 'Webbit', an event based +WebSocket and HTTP server, which can be obtained at: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product contains a modified portion of 'SLF4J', a simple logging +facade for Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product contains a modified portion of 'Apache Harmony', an open source +Java SE, which can be obtained at: + + * NOTICE: + * license/NOTICE.harmony.txt + * LICENSE: + * license/LICENSE.harmony.txt (Apache License 2.0) + * HOMEPAGE: + * http://archive.apache.org/dist/harmony/ + +This product contains a modified portion of 'jbzip2', a Java bzip2 compression +and decompression library written by Matthew J. Francis. It can be obtained at: + + * LICENSE: + * license/LICENSE.jbzip2.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jbzip2/ + +This product contains a modified portion of 'libdivsufsort', a C API library to construct +the suffix array and the Burrows-Wheeler transformed string for any input string of +a constant-size alphabet written by Yuta Mori. It can be obtained at: + + * LICENSE: + * license/LICENSE.libdivsufsort.txt (MIT License) + * HOMEPAGE: + * https://github.com/y-256/libdivsufsort + +This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, + which can be obtained at: + + * LICENSE: + * license/LICENSE.jctools.txt (ASL2 License) + * HOMEPAGE: + * https://github.com/JCTools/JCTools + +This product optionally depends on 'JZlib', a re-implementation of zlib in +pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product optionally depends on 'Compress-LZF', a Java library for encoding and +decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: + + * LICENSE: + * license/LICENSE.compress-lzf.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/ning/compress + +This product optionally depends on 'lz4', a LZ4 Java compression +and decompression library written by Adrien Grand. It can be obtained at: + + * LICENSE: + * license/LICENSE.lz4.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jpountz/lz4-java + +This product optionally depends on 'lzma-java', a LZMA Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.lzma-java.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jponge/lzma-java + +This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression +and decompression library written by William Kinney. It can be obtained at: + + * LICENSE: + * license/LICENSE.jfastlz.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jfastlz/ + +This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/protobuf + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'Snappy', a compression library produced +by Google Inc, which can be obtained at: + + * LICENSE: + * license/LICENSE.snappy.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/snappy + +This product optionally depends on 'JBoss Marshalling', an alternative Java +serialization API, which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-marshalling.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://www.jboss.org/jbossmarshalling + +This product optionally depends on 'Caliper', Google's micro- +benchmarking framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.caliper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/google/caliper + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, which +can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'Aalto XML', an ultra-high performance +non-blocking XML processor, which can be obtained at: + + * LICENSE: + * license/LICENSE.aalto-xml.txt (Apache License 2.0) + * HOMEPAGE: + * http://wiki.fasterxml.com/AaltoHome + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: + + * LICENSE: + * license/LICENSE.hpack.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/twitter/hpack + +This product contains a modified portion of 'Apache Commons Lang', a Java library +provides utilities for the java.lang API, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-lang.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/proper/commons-lang/ + + +This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. + + * LICENSE: + * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/takari/maven-wrapper diff --git a/solr/licenses/netty-handler-4.1.29.Final.jar.sha1 b/solr/licenses/netty-handler-4.1.29.Final.jar.sha1 new file mode 100644 index 000000000000..643a697e97b6 --- /dev/null +++ b/solr/licenses/netty-handler-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +1acf1d94799296a2517533ec75ce7e155e9c4ea7 diff --git a/solr/licenses/netty-handler-LICENSE-ASL.txt b/solr/licenses/netty-handler-LICENSE-ASL.txt new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/solr/licenses/netty-handler-LICENSE-ASL.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/solr/licenses/netty-handler-NOTICE.txt b/solr/licenses/netty-handler-NOTICE.txt new file mode 100644 index 000000000000..f973663670b4 --- /dev/null +++ b/solr/licenses/netty-handler-NOTICE.txt @@ -0,0 +1,223 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2014 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified portion of 'Webbit', an event based +WebSocket and HTTP server, which can be obtained at: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product contains a modified portion of 'SLF4J', a simple logging +facade for Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product contains a modified portion of 'Apache Harmony', an open source +Java SE, which can be obtained at: + + * NOTICE: + * license/NOTICE.harmony.txt + * LICENSE: + * license/LICENSE.harmony.txt (Apache License 2.0) + * HOMEPAGE: + * http://archive.apache.org/dist/harmony/ + +This product contains a modified portion of 'jbzip2', a Java bzip2 compression +and decompression library written by Matthew J. Francis. It can be obtained at: + + * LICENSE: + * license/LICENSE.jbzip2.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jbzip2/ + +This product contains a modified portion of 'libdivsufsort', a C API library to construct +the suffix array and the Burrows-Wheeler transformed string for any input string of +a constant-size alphabet written by Yuta Mori. It can be obtained at: + + * LICENSE: + * license/LICENSE.libdivsufsort.txt (MIT License) + * HOMEPAGE: + * https://github.com/y-256/libdivsufsort + +This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, + which can be obtained at: + + * LICENSE: + * license/LICENSE.jctools.txt (ASL2 License) + * HOMEPAGE: + * https://github.com/JCTools/JCTools + +This product optionally depends on 'JZlib', a re-implementation of zlib in +pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product optionally depends on 'Compress-LZF', a Java library for encoding and +decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: + + * LICENSE: + * license/LICENSE.compress-lzf.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/ning/compress + +This product optionally depends on 'lz4', a LZ4 Java compression +and decompression library written by Adrien Grand. It can be obtained at: + + * LICENSE: + * license/LICENSE.lz4.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jpountz/lz4-java + +This product optionally depends on 'lzma-java', a LZMA Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.lzma-java.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jponge/lzma-java + +This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression +and decompression library written by William Kinney. It can be obtained at: + + * LICENSE: + * license/LICENSE.jfastlz.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jfastlz/ + +This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/protobuf + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'Snappy', a compression library produced +by Google Inc, which can be obtained at: + + * LICENSE: + * license/LICENSE.snappy.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/snappy + +This product optionally depends on 'JBoss Marshalling', an alternative Java +serialization API, which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-marshalling.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://www.jboss.org/jbossmarshalling + +This product optionally depends on 'Caliper', Google's micro- +benchmarking framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.caliper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/google/caliper + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, which +can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'Aalto XML', an ultra-high performance +non-blocking XML processor, which can be obtained at: + + * LICENSE: + * license/LICENSE.aalto-xml.txt (Apache License 2.0) + * HOMEPAGE: + * http://wiki.fasterxml.com/AaltoHome + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: + + * LICENSE: + * license/LICENSE.hpack.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/twitter/hpack + +This product contains a modified portion of 'Apache Commons Lang', a Java library +provides utilities for the java.lang API, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-lang.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/proper/commons-lang/ + + +This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. + + * LICENSE: + * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/takari/maven-wrapper diff --git a/solr/licenses/netty-resolver-4.1.29.Final.jar.sha1 b/solr/licenses/netty-resolver-4.1.29.Final.jar.sha1 new file mode 100644 index 000000000000..8571f08ea6ab --- /dev/null +++ b/solr/licenses/netty-resolver-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +bbec1dc913732e4773893c14d795b15d6c1e878e diff --git a/solr/licenses/netty-resolver-LICENSE-ASL.txt b/solr/licenses/netty-resolver-LICENSE-ASL.txt new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/solr/licenses/netty-resolver-LICENSE-ASL.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/solr/licenses/netty-resolver-NOTICE.txt b/solr/licenses/netty-resolver-NOTICE.txt new file mode 100644 index 000000000000..f973663670b4 --- /dev/null +++ b/solr/licenses/netty-resolver-NOTICE.txt @@ -0,0 +1,223 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2014 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified portion of 'Webbit', an event based +WebSocket and HTTP server, which can be obtained at: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product contains a modified portion of 'SLF4J', a simple logging +facade for Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product contains a modified portion of 'Apache Harmony', an open source +Java SE, which can be obtained at: + + * NOTICE: + * license/NOTICE.harmony.txt + * LICENSE: + * license/LICENSE.harmony.txt (Apache License 2.0) + * HOMEPAGE: + * http://archive.apache.org/dist/harmony/ + +This product contains a modified portion of 'jbzip2', a Java bzip2 compression +and decompression library written by Matthew J. Francis. It can be obtained at: + + * LICENSE: + * license/LICENSE.jbzip2.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jbzip2/ + +This product contains a modified portion of 'libdivsufsort', a C API library to construct +the suffix array and the Burrows-Wheeler transformed string for any input string of +a constant-size alphabet written by Yuta Mori. It can be obtained at: + + * LICENSE: + * license/LICENSE.libdivsufsort.txt (MIT License) + * HOMEPAGE: + * https://github.com/y-256/libdivsufsort + +This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, + which can be obtained at: + + * LICENSE: + * license/LICENSE.jctools.txt (ASL2 License) + * HOMEPAGE: + * https://github.com/JCTools/JCTools + +This product optionally depends on 'JZlib', a re-implementation of zlib in +pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product optionally depends on 'Compress-LZF', a Java library for encoding and +decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: + + * LICENSE: + * license/LICENSE.compress-lzf.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/ning/compress + +This product optionally depends on 'lz4', a LZ4 Java compression +and decompression library written by Adrien Grand. It can be obtained at: + + * LICENSE: + * license/LICENSE.lz4.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jpountz/lz4-java + +This product optionally depends on 'lzma-java', a LZMA Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.lzma-java.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jponge/lzma-java + +This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression +and decompression library written by William Kinney. It can be obtained at: + + * LICENSE: + * license/LICENSE.jfastlz.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jfastlz/ + +This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/protobuf + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'Snappy', a compression library produced +by Google Inc, which can be obtained at: + + * LICENSE: + * license/LICENSE.snappy.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/snappy + +This product optionally depends on 'JBoss Marshalling', an alternative Java +serialization API, which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-marshalling.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://www.jboss.org/jbossmarshalling + +This product optionally depends on 'Caliper', Google's micro- +benchmarking framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.caliper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/google/caliper + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, which +can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'Aalto XML', an ultra-high performance +non-blocking XML processor, which can be obtained at: + + * LICENSE: + * license/LICENSE.aalto-xml.txt (Apache License 2.0) + * HOMEPAGE: + * http://wiki.fasterxml.com/AaltoHome + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: + + * LICENSE: + * license/LICENSE.hpack.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/twitter/hpack + +This product contains a modified portion of 'Apache Commons Lang', a Java library +provides utilities for the java.lang API, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-lang.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/proper/commons-lang/ + + +This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. + + * LICENSE: + * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/takari/maven-wrapper diff --git a/solr/licenses/netty-transport-4.1.29.Final.jar.sha1 b/solr/licenses/netty-transport-4.1.29.Final.jar.sha1 new file mode 100644 index 000000000000..6515f42636e6 --- /dev/null +++ b/solr/licenses/netty-transport-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +c190b90f70e2ae8a48c068afad709e8728fcaa39 diff --git a/solr/licenses/netty-transport-LICENSE-ASL.txt b/solr/licenses/netty-transport-LICENSE-ASL.txt new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/solr/licenses/netty-transport-LICENSE-ASL.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/solr/licenses/netty-transport-NOTICE.txt b/solr/licenses/netty-transport-NOTICE.txt new file mode 100644 index 000000000000..f973663670b4 --- /dev/null +++ b/solr/licenses/netty-transport-NOTICE.txt @@ -0,0 +1,223 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2014 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified portion of 'Webbit', an event based +WebSocket and HTTP server, which can be obtained at: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product contains a modified portion of 'SLF4J', a simple logging +facade for Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product contains a modified portion of 'Apache Harmony', an open source +Java SE, which can be obtained at: + + * NOTICE: + * license/NOTICE.harmony.txt + * LICENSE: + * license/LICENSE.harmony.txt (Apache License 2.0) + * HOMEPAGE: + * http://archive.apache.org/dist/harmony/ + +This product contains a modified portion of 'jbzip2', a Java bzip2 compression +and decompression library written by Matthew J. Francis. It can be obtained at: + + * LICENSE: + * license/LICENSE.jbzip2.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jbzip2/ + +This product contains a modified portion of 'libdivsufsort', a C API library to construct +the suffix array and the Burrows-Wheeler transformed string for any input string of +a constant-size alphabet written by Yuta Mori. It can be obtained at: + + * LICENSE: + * license/LICENSE.libdivsufsort.txt (MIT License) + * HOMEPAGE: + * https://github.com/y-256/libdivsufsort + +This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, + which can be obtained at: + + * LICENSE: + * license/LICENSE.jctools.txt (ASL2 License) + * HOMEPAGE: + * https://github.com/JCTools/JCTools + +This product optionally depends on 'JZlib', a re-implementation of zlib in +pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product optionally depends on 'Compress-LZF', a Java library for encoding and +decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: + + * LICENSE: + * license/LICENSE.compress-lzf.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/ning/compress + +This product optionally depends on 'lz4', a LZ4 Java compression +and decompression library written by Adrien Grand. It can be obtained at: + + * LICENSE: + * license/LICENSE.lz4.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jpountz/lz4-java + +This product optionally depends on 'lzma-java', a LZMA Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.lzma-java.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jponge/lzma-java + +This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression +and decompression library written by William Kinney. It can be obtained at: + + * LICENSE: + * license/LICENSE.jfastlz.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jfastlz/ + +This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/protobuf + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'Snappy', a compression library produced +by Google Inc, which can be obtained at: + + * LICENSE: + * license/LICENSE.snappy.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/snappy + +This product optionally depends on 'JBoss Marshalling', an alternative Java +serialization API, which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-marshalling.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://www.jboss.org/jbossmarshalling + +This product optionally depends on 'Caliper', Google's micro- +benchmarking framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.caliper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/google/caliper + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, which +can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'Aalto XML', an ultra-high performance +non-blocking XML processor, which can be obtained at: + + * LICENSE: + * license/LICENSE.aalto-xml.txt (Apache License 2.0) + * HOMEPAGE: + * http://wiki.fasterxml.com/AaltoHome + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: + + * LICENSE: + * license/LICENSE.hpack.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/twitter/hpack + +This product contains a modified portion of 'Apache Commons Lang', a Java library +provides utilities for the java.lang API, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-lang.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/proper/commons-lang/ + + +This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. + + * LICENSE: + * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/takari/maven-wrapper diff --git a/solr/licenses/netty-transport-native-epoll-4.1.29.Final.jar.sha1 b/solr/licenses/netty-transport-native-epoll-4.1.29.Final.jar.sha1 new file mode 100644 index 000000000000..6b7f77762213 --- /dev/null +++ b/solr/licenses/netty-transport-native-epoll-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +baca9c954b45c495c7caf328abfac49679d90ce8 diff --git a/solr/licenses/netty-transport-native-epoll-LICENSE-ASL.txt b/solr/licenses/netty-transport-native-epoll-LICENSE-ASL.txt new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/solr/licenses/netty-transport-native-epoll-LICENSE-ASL.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/solr/licenses/netty-transport-native-epoll-NOTICE.txt b/solr/licenses/netty-transport-native-epoll-NOTICE.txt new file mode 100644 index 000000000000..f973663670b4 --- /dev/null +++ b/solr/licenses/netty-transport-native-epoll-NOTICE.txt @@ -0,0 +1,223 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2014 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified portion of 'Webbit', an event based +WebSocket and HTTP server, which can be obtained at: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product contains a modified portion of 'SLF4J', a simple logging +facade for Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product contains a modified portion of 'Apache Harmony', an open source +Java SE, which can be obtained at: + + * NOTICE: + * license/NOTICE.harmony.txt + * LICENSE: + * license/LICENSE.harmony.txt (Apache License 2.0) + * HOMEPAGE: + * http://archive.apache.org/dist/harmony/ + +This product contains a modified portion of 'jbzip2', a Java bzip2 compression +and decompression library written by Matthew J. Francis. It can be obtained at: + + * LICENSE: + * license/LICENSE.jbzip2.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jbzip2/ + +This product contains a modified portion of 'libdivsufsort', a C API library to construct +the suffix array and the Burrows-Wheeler transformed string for any input string of +a constant-size alphabet written by Yuta Mori. It can be obtained at: + + * LICENSE: + * license/LICENSE.libdivsufsort.txt (MIT License) + * HOMEPAGE: + * https://github.com/y-256/libdivsufsort + +This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, + which can be obtained at: + + * LICENSE: + * license/LICENSE.jctools.txt (ASL2 License) + * HOMEPAGE: + * https://github.com/JCTools/JCTools + +This product optionally depends on 'JZlib', a re-implementation of zlib in +pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product optionally depends on 'Compress-LZF', a Java library for encoding and +decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: + + * LICENSE: + * license/LICENSE.compress-lzf.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/ning/compress + +This product optionally depends on 'lz4', a LZ4 Java compression +and decompression library written by Adrien Grand. It can be obtained at: + + * LICENSE: + * license/LICENSE.lz4.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jpountz/lz4-java + +This product optionally depends on 'lzma-java', a LZMA Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.lzma-java.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jponge/lzma-java + +This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression +and decompression library written by William Kinney. It can be obtained at: + + * LICENSE: + * license/LICENSE.jfastlz.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jfastlz/ + +This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/protobuf + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'Snappy', a compression library produced +by Google Inc, which can be obtained at: + + * LICENSE: + * license/LICENSE.snappy.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/snappy + +This product optionally depends on 'JBoss Marshalling', an alternative Java +serialization API, which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-marshalling.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://www.jboss.org/jbossmarshalling + +This product optionally depends on 'Caliper', Google's micro- +benchmarking framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.caliper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/google/caliper + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, which +can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'Aalto XML', an ultra-high performance +non-blocking XML processor, which can be obtained at: + + * LICENSE: + * license/LICENSE.aalto-xml.txt (Apache License 2.0) + * HOMEPAGE: + * http://wiki.fasterxml.com/AaltoHome + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: + + * LICENSE: + * license/LICENSE.hpack.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/twitter/hpack + +This product contains a modified portion of 'Apache Commons Lang', a Java library +provides utilities for the java.lang API, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-lang.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/proper/commons-lang/ + + +This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. + + * LICENSE: + * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/takari/maven-wrapper diff --git a/solr/licenses/netty-transport-native-unix-common-4.1.29.Final.jar.sha1 b/solr/licenses/netty-transport-native-unix-common-4.1.29.Final.jar.sha1 new file mode 100644 index 000000000000..45303f8efba8 --- /dev/null +++ b/solr/licenses/netty-transport-native-unix-common-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +9c9f88ca57873b3c00489ae74fb80bb0cd02d986 diff --git a/solr/licenses/netty-transport-native-unix-common-LICENSE-ASL.txt b/solr/licenses/netty-transport-native-unix-common-LICENSE-ASL.txt new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/solr/licenses/netty-transport-native-unix-common-LICENSE-ASL.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/solr/licenses/netty-transport-native-unix-common-NOTICE.txt b/solr/licenses/netty-transport-native-unix-common-NOTICE.txt new file mode 100644 index 000000000000..f973663670b4 --- /dev/null +++ b/solr/licenses/netty-transport-native-unix-common-NOTICE.txt @@ -0,0 +1,223 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2014 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified portion of 'Webbit', an event based +WebSocket and HTTP server, which can be obtained at: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product contains a modified portion of 'SLF4J', a simple logging +facade for Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product contains a modified portion of 'Apache Harmony', an open source +Java SE, which can be obtained at: + + * NOTICE: + * license/NOTICE.harmony.txt + * LICENSE: + * license/LICENSE.harmony.txt (Apache License 2.0) + * HOMEPAGE: + * http://archive.apache.org/dist/harmony/ + +This product contains a modified portion of 'jbzip2', a Java bzip2 compression +and decompression library written by Matthew J. Francis. It can be obtained at: + + * LICENSE: + * license/LICENSE.jbzip2.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jbzip2/ + +This product contains a modified portion of 'libdivsufsort', a C API library to construct +the suffix array and the Burrows-Wheeler transformed string for any input string of +a constant-size alphabet written by Yuta Mori. It can be obtained at: + + * LICENSE: + * license/LICENSE.libdivsufsort.txt (MIT License) + * HOMEPAGE: + * https://github.com/y-256/libdivsufsort + +This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, + which can be obtained at: + + * LICENSE: + * license/LICENSE.jctools.txt (ASL2 License) + * HOMEPAGE: + * https://github.com/JCTools/JCTools + +This product optionally depends on 'JZlib', a re-implementation of zlib in +pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product optionally depends on 'Compress-LZF', a Java library for encoding and +decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: + + * LICENSE: + * license/LICENSE.compress-lzf.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/ning/compress + +This product optionally depends on 'lz4', a LZ4 Java compression +and decompression library written by Adrien Grand. It can be obtained at: + + * LICENSE: + * license/LICENSE.lz4.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jpountz/lz4-java + +This product optionally depends on 'lzma-java', a LZMA Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.lzma-java.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jponge/lzma-java + +This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression +and decompression library written by William Kinney. It can be obtained at: + + * LICENSE: + * license/LICENSE.jfastlz.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jfastlz/ + +This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/protobuf + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'Snappy', a compression library produced +by Google Inc, which can be obtained at: + + * LICENSE: + * license/LICENSE.snappy.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/snappy + +This product optionally depends on 'JBoss Marshalling', an alternative Java +serialization API, which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-marshalling.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://www.jboss.org/jbossmarshalling + +This product optionally depends on 'Caliper', Google's micro- +benchmarking framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.caliper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/google/caliper + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, which +can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'Aalto XML', an ultra-high performance +non-blocking XML processor, which can be obtained at: + + * LICENSE: + * license/LICENSE.aalto-xml.txt (Apache License 2.0) + * HOMEPAGE: + * http://wiki.fasterxml.com/AaltoHome + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: + + * LICENSE: + * license/LICENSE.hpack.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/twitter/hpack + +This product contains a modified portion of 'Apache Commons Lang', a Java library +provides utilities for the java.lang API, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-lang.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/proper/commons-lang/ + + +This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. + + * LICENSE: + * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/takari/maven-wrapper diff --git a/solr/licenses/simple-xml-2.7.1.jar.sha1 b/solr/licenses/simple-xml-2.7.1.jar.sha1 deleted file mode 100644 index d790fb404a1a..000000000000 --- a/solr/licenses/simple-xml-2.7.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dd91fb744c2ff921407475cb29a1e3fee397d411 diff --git a/solr/licenses/simple-xml-NOTICE.txt b/solr/licenses/simple-xml-NOTICE.txt deleted file mode 100644 index 6f139d6e8d6e..000000000000 --- a/solr/licenses/simple-xml-NOTICE.txt +++ /dev/null @@ -1,2 +0,0 @@ -This product includes software developed by -the SimpleXML project (http://simple.sourceforge.net). diff --git a/solr/licenses/simple-xml-safe-2.7.1.jar.sha1 b/solr/licenses/simple-xml-safe-2.7.1.jar.sha1 new file mode 100644 index 000000000000..75e4299c12cc --- /dev/null +++ b/solr/licenses/simple-xml-safe-2.7.1.jar.sha1 @@ -0,0 +1 @@ +045fda5ac6087bc82a209d8cdb73f8d0dbdcfc7b diff --git a/solr/licenses/simple-xml-LICENSE-ASL.txt b/solr/licenses/simple-xml-safe-LICENSE-ASL.txt similarity index 100% rename from solr/licenses/simple-xml-LICENSE-ASL.txt rename to solr/licenses/simple-xml-safe-LICENSE-ASL.txt diff --git a/solr/licenses/simple-xml-safe-NOTICE.txt b/solr/licenses/simple-xml-safe-NOTICE.txt new file mode 100644 index 000000000000..154ac0a6b49b --- /dev/null +++ b/solr/licenses/simple-xml-safe-NOTICE.txt @@ -0,0 +1,2 @@ +This product includes software developed by +the SimpleXML project (http://simple.sourceforge.net). \ No newline at end of file diff --git a/solr/server/etc/jetty-ssl.xml b/solr/server/etc/jetty-ssl.xml index 9ff5accf4022..367064131ba3 100644 --- a/solr/server/etc/jetty-ssl.xml +++ b/solr/server/etc/jetty-ssl.xml @@ -17,6 +17,7 @@ + diff --git a/solr/solr-ref-guide/README.adoc b/solr/solr-ref-guide/README.adoc index d485b9962287..61afa4f4d409 100644 --- a/solr/solr-ref-guide/README.adoc +++ b/solr/solr-ref-guide/README.adoc @@ -23,17 +23,21 @@ Raw content is stored in Asciidoc (`.adoc`) formatted files in the `src/` direct == Prerequisites for Building These files are processed with AsciiDoctor in 2 different ways: -* Via Jekyll to build an HTML browsable version of the Ref Guide. -** Prerequisites: `Ruby` (v2.1 or higher) and the following gems must be installed: -*** `jekyll`: v3.5, not v4.x. Use `gem install jekyll --force --version 3.5.0` to force install of Jekyll 3.5.0. -*** `asciidoctor`: v1.5.6.2, not 1.5.7 or higher. Use `gem install asciidoctor --force --version 1.5.6.2`. NOTE: You must do this before installing `jekyll-asciidoc` or you'll get a version of Asciidoctor that we can't use yet. -*** `jekyll-asciidoc`: v2.1 or higher. Use `gem install jekyll-asciidoc` to install. -*** `pygments.rb`: v1.1.2 or higher. Use `gem install pygments.rb` to install. -// The following is only necessary until we are on Asciidoctor 1.5.8 or higher. -// See https://github.com/asciidoctor/asciidoctor/issues/2928 for details of the problem with Slim 4.x and higher. -*** `slim`: v3.0.1 or higher, only to 3.0.9. Do *NOT* use Slim 4.x. Use `gem install slim --force --version 3.0.9` to install. -* Via `asciidoctor-ant` to build the officially released PDF version of the Ref Guide. -** Prerequisites: None beyond those required to use the main Lucene/Solr build: Java, and Ant. +* HTML version, using Jekyll: +** `Ruby` (v2.3 or higher) +** The following gems must be installed: +*** `jekyll`: v3.5, not v4.x. +Use `gem install jekyll --force --version 3.5.0` to force install of v3.5.0. +*** `jekyll-asciidoc`: v2.1 or higher; latest version (3.0.0) is fine. +Use `gem install jekyll-asciidoc` to install. +*** `slim`: v3.0 or higher; latest version (4.0.1) is fine. +Use `gem install slim` to install. +*** `tilt`: v1.0 or higher; latest version (2.0.10) is fine. +Use `gem install tilt` to install. +*** `concurrent-ruby`: v1.0 or higher; latest version (1.1.5) is fine. +Use `gem install concurrent-ruby` to install. +* PDF version, via `asciidoctor-ant`: +** None beyond those required to use the main Lucene/Solr build: Java, and Ant. == Building the Guide For details on building the ref guide, see `ant -p`. diff --git a/solr/solr-ref-guide/src/_config.yml.template b/solr/solr-ref-guide/src/_config.yml.template index 7c29200fdeb2..5fc023a6064a 100755 --- a/solr/solr-ref-guide/src/_config.yml.template +++ b/solr/solr-ref-guide/src/_config.yml.template @@ -94,7 +94,7 @@ asciidoctor: <<: *solr-attributes-ref attribute-missing: "warn" icons: "font" - source-highlighter: "pygments" - pygments-css: "style" + source-highlighter: "rouge" + rouge-theme: "thankful-eyes" # NOTE: do *NOT* use an self-empty div tag (ie:
) here - it will break jquery section-toc: "
" diff --git a/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc b/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc index 5738c6f26d48..95756f7bb7e2 100644 --- a/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc +++ b/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc @@ -128,11 +128,11 @@ Step 1: Download a jar from github to the current directory ---- curl -o runtimelibs.jar -LO https://github.com/apache/lucene-solr/blob/master/solr/core/src/test-files/runtimecode/runtimelibs.jar.bin?raw=true ---- -Step 2: Get the `sha256` hash of the jar +Step 2: Get the `sha512` hash of the jar [source,bash] ---- - openssl dgst -sha256 runtimelibs.jar + openssl dgst -sha512 runtimelibs.jar ---- Step 3 : Start solr with runtime lib enabled @@ -154,19 +154,19 @@ Step 5: Add the jar to your collection `gettingstarted` [source,bash] ---- curl http://localhost:8983/solr/gettingstarted/config -H 'Content-type:application/json' -d '{ - "add-package": { "name" : "my-pkg", + "add-runtimelib": { "name" : "testjar", "url":"http://localhost:8000/runtimelibs.jar" , - "sha256" : "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420"} + "sha512" : "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420"} }' ---- -Step 6 : Create a new request handler '/test' for the collection 'gettingstarted' from the jar we just added +Step 6: Create a new request handler '/test' for the collection 'gettingstarted' from the jar we just added [source,bash] ---- curl http://localhost:8983/solr/gettingstarted/config -H 'Content-type:application/json' -d '{ "create-requesthandler": { "name" : "/test", - "class": "org.apache.solr.core.RuntimeLibReqHandler", "package" : "my-pkg" } + 'class': 'org.apache.solr.core.RuntimeLibReqHandler', 'runtimeLib' : true} }' ---- @@ -193,23 +193,24 @@ output: "loader":"org.apache.solr.core.MemClassLoader"} ---- -=== Updating remote jars +=== Updating Remote Jars Example: -* Host the new jar to a new url. eg: http://localhost:8000/runtimelibs_v2.jar -* get the `sha256` hash of the new jar -* run the update-runtime lib command +* Host the new jar to a new url, e.g., http://localhost:8000/runtimelibs_v2.jar +* Get the `sha512` hash of the new jar. +* Run the `update-runtimelib` command. [source,bash] ---- curl http://localhost:8983/solr/gettingstarted/config -H 'Content-type:application/json' -d '{ - "update-package": { "name" : "my-pkg", + "update-runtimelib": { "name" : "testjar", "url":"http://localhost:8000/runtimelibs_v2.jar" , - "sha256" : ""} + "sha512" : ""} }' ---- -NOTE: Always upload your jar to a new url as the Solr cluster is still referring to the old jar. If the existing jar is modified it can cause errors as the hash may not match + +NOTE: Always upload your jar to a new url as the Solr cluster is still referring to the old jar. If the existing jar is modified it can cause errors as the hash may not match. == Securing Runtime Libraries diff --git a/solr/solr-ref-guide/src/aliases.adoc b/solr/solr-ref-guide/src/aliases.adoc index b4f5e1e63016..c595f2dfbb22 100644 --- a/solr/solr-ref-guide/src/aliases.adoc +++ b/solr/solr-ref-guide/src/aliases.adoc @@ -254,7 +254,7 @@ field during indexing is impractical, or the TRA behavior is desired across mult Dimensional Routed aliases may be used. This feature has been designed to handle an arbitrary number and combination of category and time dimensions in any order, but users are cautioned to carefully consider the total number of collections that will result from such configurations. Collection counts -in the high hundreds or low 1000's begin to pose significant challenges with zookeeper. +in the high hundreds or low 1000's begin to pose significant challenges with ZooKeeper. NOTE: DRA's are a new feature and presently only 2 dimensions are supported. More dimensions will be supported in the future (see https://issues.apache.org/jira/browse/SOLR-13628 for progress) @@ -270,9 +270,9 @@ with 30 minute intervals): Note that the initial collection will be a throw away place holder for any DRA containing a category based dimension. Name generation for each sub-part of a collection name is identical to the corresponding potion of the component -dimension type. (e.g. a category value generating __CRA__ or __TRA__ would still produce an error) +dimension type. (e.g., a category value generating __CRA__ or __TRA__ would still produce an error) -WARNING: The prior warning about reindexing documents with different route value applies to every dimensio of +WARNING: The prior warning about reindexing documents with different route value applies to every dimension of a DRA. DRA's are inappropriate for documents where categories or timestamps used in routing will change (this of course applies to other route values in future RA types too). diff --git a/solr/solr-ref-guide/src/authentication-and-authorization-plugins.adoc b/solr/solr-ref-guide/src/authentication-and-authorization-plugins.adoc index 0880b9062b1c..2760c3471f00 100644 --- a/solr/solr-ref-guide/src/authentication-and-authorization-plugins.adoc +++ b/solr/solr-ref-guide/src/authentication-and-authorization-plugins.adoc @@ -159,7 +159,9 @@ include::securing-solr.adoc[tag=list-of-authorization-plugins] [#configuring-audit-logging] == Audit Logging -<> plugins helps you keep an audit trail of events happening in your Solr cluster. Audit logging may e.g. ship data to an external audit service. A custom plugin can be implemented by extending the `AuditLoggerPlugin` class. +<> plugins help you keep an audit trail of events happening in your Solr cluster. +Audit logging may e.g., ship data to an external audit service. +A custom plugin can be implemented by extending the `AuditLoggerPlugin` class. == Authenticating in the Admin UI @@ -169,14 +171,14 @@ When authentication is required the Admin UI will presented you with a login dia * <> * <> - + If your plugin of choice is not supported, the Admin UI will still let you perform unrestricted operations, while for restricted operations you will need to interact with Solr by sending HTTP requests instead of through the graphical user interface of the Admin UI. All operations supported by Admin UI can be performed through Solr's RESTful APIs. == Securing Inter-Node Requests -There are a lot of requests that originate from the Solr nodes itself. For example, requests from overseer to nodes, recovery threads, etc. We call these 'inter-node' request. Solr has a special built-in `PKIAuthenticationPlugin` (see below) that will always be available to secure inter-node traffic. +There are a lot of requests that originate from the Solr nodes itself. For example, requests from overseer to nodes, recovery threads, etc. We call these 'inter-node' request. Solr has a special built-in `PKIAuthenticationPlugin` (see below) that will always be available to secure inter-node traffic. -Each Authentication plugin may also decide to secure inter-node requests on its own. They may do this through the so-called `HttpClientBuilder` mechanism, or they may alternatively choose on a per-request basis whether to delegate to PKI or not by overriding a `interceptInternodeRequest()` method from the base class, where any HTTP headers can be set. +Each Authentication plugin may also decide to secure inter-node requests on its own. They may do this through the so-called `HttpClientBuilder` mechanism, or they may alternatively choose on a per-request basis whether to delegate to PKI or not by overriding a `interceptInternodeRequest()` method from the base class, where any HTTP headers can be set. === PKIAuthenticationPlugin diff --git a/solr/solr-ref-guide/src/basic-authentication-plugin.adoc b/solr/solr-ref-guide/src/basic-authentication-plugin.adoc index 10449eeb36a5..6638269494fb 100644 --- a/solr/solr-ref-guide/src/basic-authentication-plugin.adoc +++ b/solr/solr-ref-guide/src/basic-authentication-plugin.adoc @@ -71,7 +71,7 @@ If you are using SolrCloud, you must upload `security.json` to ZooKeeper. You ca bin/solr zk cp file:path_to_local_security.json zk:/security.json -z localhost:9983 ---- -NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from the above command. +NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from the above command. === Caveats diff --git a/solr/solr-ref-guide/src/cluster-node-management.adoc b/solr/solr-ref-guide/src/cluster-node-management.adoc index ede5660c8d35..2ac7e66ee82d 100644 --- a/solr/solr-ref-guide/src/cluster-node-management.adoc +++ b/solr/solr-ref-guide/src/cluster-node-management.adoc @@ -132,7 +132,7 @@ Add, edit or delete a cluster-wide property. === CLUSTERPROP Parameters `name`:: -The name of the property. Supported properties names are `autoAddReplicas`, `legacyCloud` , `location`, `maxCoresPerNode` and `urlScheme`. Other properties can be set +The name of the property. Supported properties names are `autoAddReplicas`, `legacyCloud`, `location`, `maxCoresPerNode` and `urlScheme`. Other properties can be set (for example, if you need them for custom plugins) but they must begin with the prefix `ext.`. Unknown properties that don't begin with `ext.` will be rejected. `val`:: diff --git a/solr/solr-ref-guide/src/common-query-parameters.adoc b/solr/solr-ref-guide/src/common-query-parameters.adoc index aeb77df3c017..0486ff1a41ae 100644 --- a/solr/solr-ref-guide/src/common-query-parameters.adoc +++ b/solr/solr-ref-guide/src/common-query-parameters.adoc @@ -102,7 +102,7 @@ fq=+popularity:[10 TO *] +section:0 ---- * The document sets from each filter query are cached independently. Thus, concerning the previous examples: use a single `fq` containing two mandatory clauses if those clauses appear together often, and use two separate `fq` parameters if they are relatively independent. (To learn about tuning cache sizes and making sure a filter cache actually exists, see <>.) -* It is also possible to use <> inside the `fq` to cache clauses individually and - among other things - to achieve union of cached filter queries. +* It is also possible to use <> inside the `fq` to cache clauses individually and - among other things - to achieve union of cached filter queries. * As with all parameters: special characters in an URL need to be properly escaped and encoded as hex values. Online tools are available to help you with URL-encoding. For example: http://meyerweb.com/eric/tools/dencoder/. diff --git a/solr/solr-ref-guide/src/distributed-requests.adoc b/solr/solr-ref-guide/src/distributed-requests.adoc index 259aa8ff4b1e..dde0fea64964 100644 --- a/solr/solr-ref-guide/src/distributed-requests.adoc +++ b/solr/solr-ref-guide/src/distributed-requests.adoc @@ -74,18 +74,18 @@ http://localhost:8983/solr/gettingstarted/select?q=*:*&shards=shard1,localhost:7 For finer-grained control, you can directly configure and tune aspects of the concurrency and thread-pooling used within distributed search in Solr. The default configuration favors throughput over latency. -This is done by defining a `shardHandler` in the configuration for your search handler. +This is done by defining a `shardHandlerFactory` in the configuration for your search handler. -To add a `shardHandler` to the standard search handler, provide a configuration in `solrconfig.xml`, as in this example: +To add a `shardHandlerFactory` to the standard search handler, provide a configuration in `solrconfig.xml`, as in this example: [source,xml] ---- - + 1000 5000 - + ---- diff --git a/solr/solr-ref-guide/src/enabling-ssl.adoc b/solr/solr-ref-guide/src/enabling-ssl.adoc index 5edff5c856d0..93cf2ea9c2b2 100644 --- a/solr/solr-ref-guide/src/enabling-ssl.adoc +++ b/solr/solr-ref-guide/src/enabling-ssl.adoc @@ -90,6 +90,8 @@ SOLR_SSL_TRUST_STORE_PASSWORD=secret SOLR_SSL_NEED_CLIENT_AUTH=false # Enable clients to authenticate (but not require) SOLR_SSL_WANT_CLIENT_AUTH=false +# Verify client's hostname during SSL handshake +SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION=false # SSL Certificates contain host/ip "peer name" information that is validated by default. Setting # this to false can be useful to disable these checks when re-using a certificate on many hosts SOLR_SSL_CHECK_PEER_NAME=true @@ -101,7 +103,7 @@ SOLR_SSL_TRUST_STORE_TYPE=JKS When you start Solr, the `bin/solr` script includes the settings in `bin/solr.in.sh` and will pass these SSL-related system properties to the JVM. .Client Authentication Settings -WARNING: Enable either SOLR_SSL_NEED_CLIENT_AUTH or SOLR_SSL_WANT_CLIENT_AUTH but not both at the same time. They are mutually exclusive and Jetty will select one of them which may not be what you expect. +WARNING: Enable either SOLR_SSL_NEED_CLIENT_AUTH or SOLR_SSL_WANT_CLIENT_AUTH but not both at the same time. They are mutually exclusive and Jetty will select one of them which may not be what you expect. SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION should be set to true if you only want requests from authenticated host-names to be accepted. Similarly, when you start Solr on Windows, the `bin\solr.cmd` script includes the settings in `bin\solr.in.cmd` - uncomment and update the set of properties beginning with `SOLR_SSL_*` to pass these SSL-related system properties to the JVM: @@ -121,6 +123,8 @@ REM Require clients to authenticate set SOLR_SSL_NEED_CLIENT_AUTH=false REM Enable clients to authenticate (but not require) set SOLR_SSL_WANT_CLIENT_AUTH=false +REM Verify client hostname during SSL handshake +set SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION=false REM SSL Certificates contain host/ip "peer name" information that is validated by default. Setting REM this to false can be useful to disable these checks when re-using a certificate on many hosts set SOLR_SSL_CHECK_PEER_NAME=true @@ -243,7 +247,7 @@ If you have set up your ZooKeeper cluster to use a <>) you can omit `-z ` from all of the `bin/solr`/`bin\solr.cmd` commands below. +NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from all of the `bin/solr`/`bin\solr.cmd` commands below. ==== Create Solr Home Directories for Two Nodes diff --git a/solr/solr-ref-guide/src/how-solrcloud-works.adoc b/solr/solr-ref-guide/src/how-solrcloud-works.adoc index 7f4db3bb69ef..721a6963b4cf 100644 --- a/solr/solr-ref-guide/src/how-solrcloud-works.adoc +++ b/solr/solr-ref-guide/src/how-solrcloud-works.adoc @@ -46,3 +46,9 @@ A SolrCloud cluster consists of some "logical" concepts layered on top of some " * The number of Replicas that each Shard has determines: ** The level of redundancy built into the Collection and how fault tolerant the Cluster can be in the event that some Nodes become unavailable. ** The theoretical limit in the number concurrent search requests that can be processed under heavy load. + +WARNING: Make sure the DNS resolution in your cluster is stable, ie. +for each live host belonging to a Cluster the host name always corresponds to the +same specific IP and physical node. For example, in clusters deployed on AWS this would +require setting `preserve_hostname: true` in `/etc/cloud/cloud.cfg`. Changing DNS resolution +of live nodes may lead to unexpected errors. See SOLR-13159 for more details. \ No newline at end of file diff --git a/solr/solr-ref-guide/src/json-facet-api.adoc b/solr/solr-ref-guide/src/json-facet-api.adoc index bb07c2e22811..7f667f41524b 100644 --- a/solr/solr-ref-guide/src/json-facet-api.adoc +++ b/solr/solr-ref-guide/src/json-facet-api.adoc @@ -131,7 +131,6 @@ include::{example-source-dir}JsonRequestApiTest.java[tag=solrj-json-metrics-face The response to the facet request above will start with documents matching the root domain (docs containing "memory" with inStock:true) followed by the requested statistics in a `facets` block: -[...] [source,java] ---- "facets" : { @@ -407,8 +406,98 @@ By default, the ranges used to compute range faceting between `start` and `end` * "all" shorthand for lower, upper, edge, outer |facet |Aggregations, metrics, or nested facets that will be calculated for every returned bucket +|ranges a|List of arbitrary range when specified calculates facet on given ranges rather than `start`, `gap` and `end`. With `start`, `end` and `gap` the width of the range or bucket is always fixed. If range faceting needs to computed on varying range width then, `ranges` should be specified. + +* Specifying `start`, `end` or `gap` along with `ranges` is disallowed and request would fail. +* When `ranges` are specified in the range facet, `hardend`, `include` and `other` parameters are ignored. + +Refer <> +|=== + +==== Arbitrary Range + +An arbitrary range consists of from and to values over which range bucket is computed. This range can be specified in two syntax. + +[width="100%",cols="10%,90%",options="header",] +|=== +|Parameter |Description +|from |The lower bound of the range. When not specified defaults to `*`. +|to |The upper bound of the range. When not specified defaults to `*`. +|inclusive_from |A boolean, which if true means that include the lower bound `from`. This defaults to `true`. +|inclusive_to |A boolean, which if true means that include the upper bound `to`. This default to `false`. +|range a|The range is specified as string. This is semantically similar to `facet.interval` + +* When `range` is specified then, all the above parameters `from`, `to` and etc in the range are ignored +* `range` always start with `(` or `[` and ends with `)` or `]` +** `(` - exclude lower bound +** `[` - include lower bound +** `)` - exclude upper bound +** `]` - include upper bound + +For example, For range `(5,10]` 5 is excluded and 10 is included |=== +===== other with ranges + +`other` parameter is ignored when `ranges` is specified but there are ways to achieve same behavior with `ranges`. + +* `before` - This is equivalent to `[*,some_val)` or just specifying `to` value +* `after` - This is equivalent to `(som_val, *]` or just specifying `from` value +* `between` - This is equivalent to specifying `start`, `end` as `from` and `to` respectively + +===== include with ranges + +`include` parameter is ignored when `ranges` is specified but there are ways to achieve same behavior with `ranges`. `lower`, `upper`, `outer`, `edge` all can be achieved using combination of `inclusive_to` and `inclusive_from`. + +Range facet with `ranges` + +[source,bash] +---- +curl http://localhost:8983/solr/techproducts/query -d ' +{ + "query": "*:*", + "facet": { + "prices": { + "type": "range", + "field": "price", + "ranges": [ + { + "from": 0, + "to": 20, + "inclusive_from": true, + "inclusive_to": false + }, + { + "range": "[40,100)" + } + ] + } + } +}' +---- + +The output from the range facet above would look a bit like: + +[source,json] +---- +{ + "prices": { + "buckets": [ + { + "val": "[0,20)", + "count": 5 + }, + { + "val": "[40,100)", + "count": 2 + } + ] + } +} +---- + +NOTE: When `range` is specified, its value in the request is used as key in the response. In the other case, key is generated using `from`, `to`, `inclusive_to` and `inclusive_from`. Currently, custom `key` is not supported. + === Heatmap Facet The `heatmap` facet generates a 2D grid of facet counts for documents having spatial data in each grid cell. diff --git a/solr/solr-ref-guide/src/jwt-authentication-plugin.adoc b/solr/solr-ref-guide/src/jwt-authentication-plugin.adoc index 4993149b19c9..b463efb774b1 100644 --- a/solr/solr-ref-guide/src/jwt-authentication-plugin.adoc +++ b/solr/solr-ref-guide/src/jwt-authentication-plugin.adoc @@ -33,36 +33,49 @@ The simplest possible `security.json` for registering the plugin without configu } ---- -The plugin will NOT block anonymous traffic in this mode, since the default for `blockUnknown` is false. It is then possible to start configuring the plugin using REST API calls, which is described below. +The plugin will NOT block anonymous traffic in this mode, since the default for `blockUnknown` is false. It is then possible to start configuring the plugin using REST API calls, which is described in section <>. == Configuration Parameters //TODO: standard is not to put parameters in tables but use labeled lists instead -[%header,format=csv,separator=;] +[%header,format=csv,separator=;,cols="25%,50%,25%"] |=== Key ; Description ; Default blockUnknown ; Set to `true` in order to block requests from users without a token ; `false` -wellKnownUrl ; URL to an https://openid.net/specs/openid-connect-discovery-1_0.html[OpenID Connect Discovery] endpoint ; (no default) -clientId ; Client identifier for use with OpenID Connect ; (no default value) Required to authenticate with Admin UI realm ; Name of the authentication realm to echo back in HTTP 401 responses. Will also be displayed in Admin UI login page ; 'solr-jwt' scope ; Whitespace separated list of valid scopes. If configured, the JWT access token MUST contain a `scope` claim with at least one of the listed scopes. Example: `solr:read solr:admin` ; -jwkUrl ; An https URL to a https://tools.ietf.org/html/rfc7517[JWK] keys file. ; Auto configured if `wellKnownUrl` is provided -jwk ; As an alternative to `jwkUrl` you may provide a JSON object here containing the public key(s) of the issuer. ; -iss ; Validates that the `iss` (issuer) claim equals this string ; Auto configured if `wellKnownUrl` is provided -aud ; Validates that the `aud` (audience) claim equals this string ; If `clientId` is configured, require `aud` to match it -requireSub ; Makes `sub` (subject) claim mandatory ; `true` -requireExp ; Makes `exp` (expiry time) claim mandatory ; `true` +requireIss ; Fails requests that lacks an `iss` (issuer) claim ; `true` +requireExp ; Fails requests that lacks an `exp` (expiry time) claim ; `true` algWhitelist ; JSON array with algorithms to accept: `HS256`, `HS384`, `HS512`, `RS256`, `RS384`, `RS512`, `ES256`, `ES384`, `ES512`, `PS256`, `PS384`, `PS512`, `none ; Default is to allow all algorithms jwkCacheDur ; Duration of JWK cache in seconds ; `3600` (1 hour) principalClaim ; What claim id to pull principal from ; `sub` -claimsMatch ; JSON object of claims (key) that must match a regular expression (value). Example: `{ "foo" : "A|B" }` will require the `foo` claim to be either "A" or "B". ; (none) +claimsMatch ; JSON object of claims (key) that must match a regular expression (value). Example: `{ "foo" : "A|B" }` will require the `foo` claim to be either "A" or "B". ; adminUiScope ; Define what scope is requested when logging in from Admin UI ; If not defined, the first scope from `scope` parameter is used -authorizationEndpoint; The URL for the Id Provider's authorization endpoint ; Auto configured if `wellKnownUrl` is provided redirectUris ; Valid location(s) for redirect after external authentication. Takes a string or array of strings. Must be the base URL of Solr, e.g., https://solr1.example.com:8983/solr/ and must match the list of redirect URIs registered with the Identity Provider beforehand. ; Defaults to empty list, i.e., any node is assumed to be a valid redirect target. +issuers ; List of issuers (Identity providers) to support. See section <> for configuration syntax ; |=== +=== Issuer Configuration + +This plugin supports one or more token issuers (IdPs). Issuers are configured as a list of JSON objects under the `issuers` configuration key. The first issuer in the list is the "Primary Issuer", which is the one used for logging in to the Admin UI. + +[%header,format=csv,separator=;,cols="25%,50%,25%"] +|=== +Key ; Description ; Default +name ; A unique name of the issuer. Used to manipulate list through API. ; +wellKnownUrl ; URL to an https://openid.net/specs/openid-connect-discovery-1_0.html[OpenID Connect Discovery] endpoint ; +clientId ; Client identifier for use with OpenID Connect. Required to authenticate with Admin UI. Needed for primary issuer only ; +jwksUrl ; A URL to a https://tools.ietf.org/html/rfc7517#section-5[JWKs] endpoint. Must use https protocol. Optionally an array of URLs in which case all public keys from all URLs will be consulted when validating signatures. ; Auto configured if `wellKnownUrl` is provided +jwk ; As an alternative to `jwksUrl` you may provide a static JSON object containing the public key(s) of the issuer. The format is either JWK or JWK Set, see https://tools.ietf.org/html/rfc7517#appendix-A[RFC7517] for examples. ; +iss ; Unique issuer id as configured on the IdP. Incoming tokens must have a matching `iss` claim. Also used to resolve issuer when multiple issuers configured. ; Auto configured if `wellKnownUrl` is provided +aud ; Validates that the `aud` (audience) claim equals this string ; Uses `clientId` if configured +authorizationEndpoint; The URL for the Id Provider's authorization endpoint ; Auto configured if `wellKnownUrl` is provided +|=== + +TIP: For backwards compatibility, all the configuration keys for the primary issuer may be configured as top-level keys, except `name`. + == More Configuration Examples -=== With JWK URL +=== With JWKS URL To start enforcing authentication for all users, requiring a valid JWT in the `Authorization` header, you need to configure the plugin with one or more https://tools.ietf.org/html/rfc7517[JSON Web Key]s (JWK). This is a JSON document containing the key used to sign/encrypt the JWT. It could be a symmetric or asymmetric key. The JWK can either be fetched (and cached) from an external HTTPS endpoint or specified directly in `security.json`. Below is an example of the former: [source,json] @@ -71,11 +84,13 @@ To start enforcing authentication for all users, requiring a valid JWT in the `A "authentication": { "class": "solr.JWTAuthPlugin", "blockUnknown": true, - "jwkUrl": "https://my.key.server/jwk.json" + "jwksUrl": "https://my.key.server/jwk.json" } } ---- +TIP: The configuration key `jwkUrl` is also supported as an alternative to `jwksUrl` for backwards compatibility with early versions of the plugin. + === With Admin UI Support The next example shows configuring using https://openid.net/specs/openid-connect-discovery-1_0.html[OpenID Connect Discovery] with a well-known URI for automatic configuration of many common settings, including ability to use the Admin UI with an OpenID Connect enabled Identity Provider. @@ -92,10 +107,10 @@ The next example shows configuring using https://openid.net/specs/openid-connect } ---- -In this case, `jwkUrl`, `iss` and `authorizationEndpoint` will be automatically configured from the fetched configuration. +In this case, `jwksUrl`, `iss` and `authorizationEndpoint` will be automatically configured from the fetched configuration. === Complex Example -Let's look at a more complex configuration, this time with a static embedded JWK: +Let's look at a more complex configuration, this time with two issuers configured, where one uses a static embedded JWK: [source,json] ---- @@ -103,19 +118,29 @@ Let's look at a more complex configuration, this time with a static embedded JWK "authentication": { "class": "solr.JWTAuthPlugin", <1> "blockUnknown": true, <2> - "jwk": { <3> - "e": "AQAB", - "kid": "k1", - "kty": "RSA", - "n": "3ZF6wBGPMsLzsS1KLghxaVpZtXD3nTLzDm0c974i9-KNU_1rhhBeiVfS64VfEQmP8SA470jEy7yWcvnz9GvG-YAlm9iOwVF7jLl2awdws0ocFjdSPT3SjPQKzOeMO7G9XqNTkrvoFCn1YAi26fbhhcqkwZDoeTcHQdRN32frzccuPhZrwImApIedroKLlKWv2IvPDnz2Bpe2WWVc2HdoWYqEVD3p_BEy8f-RTSHK3_8kDDF9yAwI9jx7CK1_C-eYxXltm-6rpS5NGyFm0UNTZMxVU28Tl7LX8Vb6CikyCQ9YRCtk_CvpKWmEuKEp9I28KHQNmGkDYT90nt3vjbCXxw" - }, - "clientId": "solr-client-12345", <4> - "iss": "https://example.com/idp", <5> - "aud": "https://example.com/solr", <6> - "principalClaim": "solruid", <7> - "claimsMatch": { "foo" : "A|B", "dept" : "IT" }, <8> - "scope": "solr:read solr:write solr:admin", <9> - "algWhitelist" : [ "RS256", "RS384", "RS512" ] <10> + "principalClaim": "solruid", <3> + "claimsMatch": { "foo" : "A|B", "dept" : "IT" }, <4> + "scope": "solr:read solr:write solr:admin", <5> + "algWhitelist" : [ "RS256", "RS384", "RS512" ], <6> + "issuers": [ <7> + { + "name": "example1-static", <8> + "jwk": { <9> + "e": "AQAB", + "kid": "k1", + "kty": "RSA", + "n": "3ZF6w....vjbCXxw" + }, + "clientId": "solr-client-12345", <10> + "iss": "https://example.com/idp", <11> + "aud": "https://example.com/solr" <12> + }, + { + "name": "example2", + "wellKnownUrl": "https://example2.com/.well-known/oidc", <13> + "aud": "https://example2.com/solr" + } + ] } } ---- @@ -124,19 +149,22 @@ Let's comment on this config: <1> Plugin class <2> Make sure to block anyone without a valid token -<3> Here we pass the JWK inline instead of referring to a URL with `jwkUrl` -<4> Set the client id registered with Identity Provider -<5> The issuer claim must match "https://example.com/idp" -<6> The audience claim must match "https://example.com/solr" -<7> Fetch the user id from another claim than the default `sub` -<8> Require that the `roles` claim is one of "A" or "B" and that the `dept` claim is "IT" -<9> Require one of the scopes `solr:read`, `solr:write` or `solr:admin` -<10> Only accept RSA algorithms for signatures +<3> Fetch the user id from another claim than the default `sub` +<4> Require that the `roles` claim is one of "A" or "B" and that the `dept` claim is "IT" +<5> Require one of the scopes `solr:read`, `solr:write` or `solr:admin` +<6> Only accept RSA algorithms for signatures +<7> Array of issuer configurations +<8> Each issuer object should have a unique name +<9> Here we pass the JWK inline instead of referring to a URL with `jwksUrl` +<10> Set the client id registered with Identity Provider +<11> Configure the issuer id. Will be used for validating tokens. A token's 'iss' claim must match one of the configured issuer IDs. +<12> Configure the audience claim. A token's 'aud' claim must match 'aud' for one of the configured issuers. +<13> This issuer is auto configured through discovery, so 'iss' and JWK settings are not required == Editing JWT Authentication Plugin Configuration -All properties mentioned above can be set or changed using the Config Edit API. You can thus start with a simple configuration with only `class` configured and then configure the rest using the API. +All properties mentioned above, except the 'issuers' array, can be set or changed using the Config Edit API. You can thus start with a simple configuration with only `class` configured and then configure the rest using the API. === Set a Configuration Property @@ -167,7 +195,9 @@ curl http://localhost:8983/api/cluster/security/authentication -H 'Content-type: ==== -- -Insert a valid JWT access token in compact serialization format (`xxx.yyy.zzz` above) to authenticate with Solr once the plugin is active. +Insert a valid JWT access token in compact serialization format (`xxx.yyy.zzz` above) to authenticate with Solr once the plugin is active, or leave `blockUnknown=false` until configuration is complete and then switch it to `true` to start enforcing. + +NOTE: There is currently no support for adding multiple token issuers though REST API, but you can configure one issuer through the API by using the 'issuer' properties as top-level properties. == Using Clients with JWT Auth @@ -188,7 +218,7 @@ curl -H "Authorization: Bearer xxxxxx.xxxxxx.xxxxxx" http://localhost:8983/solr/ === Admin UI -When this plugin is enabled, users will be redirected to a login page in the Admin UI once they attempt to do a restricted action. The page has a button that users will click and be redirected to the Identity Provider's login page. Once authenticated, the user will be redirected back to Solr Admin UI to the last known location. The session will last as long as the JWT token expiry time and is valid for one Solr server only. That means you have to login again when navigating to another Solr node. There is also a logout menu in the left column where user can explicitly log out. +When this plugin is enabled, users will be redirected to a login page in the Admin UI once they attempt to do a restricted action. The page has a button that users will click and be redirected to the Identity Provider's login page. If more than one issuer (IdP) is configured, the first in the list will be used for Admin UI. Once authenticated, the user will be redirected back to Solr Admin UI to the last known location. The session will last as long as the JWT token expiry time and is valid for one Solr server only. That means you have to login again when navigating to another Solr node. There is also a logout menu in the left column where user can explicitly log out. == Using the Solr Control Script with JWT Auth diff --git a/solr/solr-ref-guide/src/kerberos-authentication-plugin.adoc b/solr/solr-ref-guide/src/kerberos-authentication-plugin.adoc index ed0b3cb900ad..90682be06ebd 100644 --- a/solr/solr-ref-guide/src/kerberos-authentication-plugin.adoc +++ b/solr/solr-ref-guide/src/kerberos-authentication-plugin.adoc @@ -298,7 +298,7 @@ Once the configuration is complete, you can start Solr with the `bin/solr` scrip bin/solr -c -z server1:2181,server2:2181,server3:2181/solr ---- -NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from the above command. +NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from the above command. === Test the Configuration diff --git a/solr/solr-ref-guide/src/metrics-reporting.adoc b/solr/solr-ref-guide/src/metrics-reporting.adoc index dafa172bcdbf..ecefda77c118 100644 --- a/solr/solr-ref-guide/src/metrics-reporting.adoc +++ b/solr/solr-ref-guide/src/metrics-reporting.adoc @@ -87,7 +87,7 @@ The metrics available in your system can be customized by modifying the `> for more information about the `solr.xml` file, where to find it, and how to edit it. -=== The Element +=== The Element This section of `solr.xml` allows you to define the system properties which are considered system-sensitive and should not be exposed via the Metrics API. @@ -106,11 +106,11 @@ If this section is not defined, the following default configuration is used whic ---- -=== The Element +=== The Element Reporters consume the metrics data generated by Solr. See the section <> below for more details on how to configure custom reporters. -=== The Element +=== The Element Suppliers help Solr generate metrics data. The `` section of `solr.xml` allows you to define your own implementations of metrics and configure parameters for them. diff --git a/solr/solr-ref-guide/src/monitoring-solr-with-prometheus-and-grafana.adoc b/solr/solr-ref-guide/src/monitoring-solr-with-prometheus-and-grafana.adoc index 08edd1146d5c..6c4b7739ebad 100644 --- a/solr/solr-ref-guide/src/monitoring-solr-with-prometheus-and-grafana.adoc +++ b/solr/solr-ref-guide/src/monitoring-solr-with-prometheus-and-grafana.adoc @@ -108,12 +108,28 @@ The number of seconds between collecting metrics from Solr. The `solr-exporter` The Solr's metrics exposed by `solr-exporter` can be seen at: `\http://localhost:9983/solr/admin/metrics`. -=== Getting metrics from a secure Solr(Cloud) +=== Environment Variable Options -Your Solr(Cloud) might be secured by measures described in <>. The security configuration can be injected into `solr-exporter` using environment variables in a fashion similar to other clients using <>. This is possible because the main script picks up two external environment variables and passes them on to the Java process: +The start commands provided with the Prometheus Exporter support the use of custom java options through the following environment variables: -* `JAVA_OPTS` allows to add extra JVM options -* `CLASSPATH_PREFIX` allows to add extra libraries +`JAVA_HEAP`:: +Sets the initial (`Xms`) and max (`Xmx`) Java heap size. The default is `512m`. + +`JAVA_MEM`:: +Custom java memory settings (e.g., `-Xms1g -Xmx2g`). This is ignored if `JAVA_HEAP` is provided. + +`GC_TUNE`:: +Custom Java garbage collection settings. The default is `-XX:+UseG1GC`. + +`JAVA_OPTS`:: +Extra JVM options. + +`CLASSPATH_PREFIX`:: +Location of extra libraries to load when starting the `solr-exporter`. + +=== Getting Metrics from a Secured SolrCloud + +Your SolrCloud might be secured by measures described in <>. The security configuration can be injected into `solr-exporter` using environment variables in a fashion similar to other clients using <>. This is possible because the main script picks up <> and passes them on to the Java process. Example for a SolrCloud instance secured by <>, <> and <>: diff --git a/solr/solr-ref-guide/src/ping.adoc b/solr/solr-ref-guide/src/ping.adoc index c1de95c837bb..ed4e7ce481be 100644 --- a/solr/solr-ref-guide/src/ping.adoc +++ b/solr/solr-ref-guide/src/ping.adoc @@ -69,7 +69,7 @@ This command will ping all replicas of the given collection name for a response: Both API calls have the same output. A status=OK indicates that the nodes are responding. -*SolrJ Example* +*SolrJ Example with SolrPing* [source,java] ---- @@ -78,3 +78,12 @@ ping.getParams().add("distrib", "true"); //To make it a distributed request agai rsp = ping.process(solrClient, collectionName); int status = rsp.getStatus(); ---- + +*SolrJ Example with SolrClient* + +[source,java] +---- +SolrClient client = new HttpSolrClient.Builder(solrUrl).build(); +SolrPingResponse pingResponse = client.ping(collectionName); +int status = pingResponse.getStatus(); +---- diff --git a/solr/solr-ref-guide/src/query-settings-in-solrconfig.adoc b/solr/solr-ref-guide/src/query-settings-in-solrconfig.adoc index 20e83cc54ced..8554234de93e 100644 --- a/solr/solr-ref-guide/src/query-settings-in-solrconfig.adoc +++ b/solr/solr-ref-guide/src/query-settings-in-solrconfig.adoc @@ -29,25 +29,33 @@ These settings are all configured in child elements of the `` element in == Caches -Solr caches are associated with a specific instance of an Index Searcher, a specific view of an index that doesn't change during the lifetime of that searcher. As long as that Index Searcher is being used, any items in its cache will be valid and available for reuse. Caching in Solr differs from caching in many other applications in that cached Solr objects do not expire after a time interval; instead, they remain valid for the lifetime of the Index Searcher. +Solr caches are associated with a specific instance of an Index Searcher, a specific view of an index that doesn't change during the lifetime of that searcher. As long as that Index Searcher is being used, any items in its cache will be valid and available for reuse. By default cached Solr objects do not expire after a time interval; instead, they remain valid for the lifetime of the Index Searcher. Idle time-based expiration can be enabled by using `maxIdleTime` option. When a new searcher is opened, the current searcher continues servicing requests while the new one auto-warms its cache. The new searcher uses the current searcher's cache to pre-populate its own. When the new searcher is ready, it is registered as the current searcher and begins handling all new search requests. The old searcher will be closed once it has finished servicing all its requests. -In Solr, there are three cache implementations: `solr.search.LRUCache`, `solr.search.FastLRUCache,` and `solr.search.LFUCache`. +=== Cache Implementations + +In Solr, the following cache implementations are available: recommended `solr.search.CaffeineCache`, and legacy implementations: `solr.search.LRUCache`, `solr.search.FastLRUCache,` and `solr.search.LFUCache`. + +The `CaffeineCache` is an implementation backed by the https://github.com/ben-manes/caffeine[Caffeine caching library]. By default it uses a Window TinyLFU (W-TinyLFU) eviction policy, which allows the eviction based on both frequency and recency of use in O(1) time with a small footprint. Generally this cache implementation is recommended over other legacy caches as it usually offers lower memory footprint, higher hit ratio and better multi-threaded performance over legacy caches. The acronym LRU stands for Least Recently Used. When an LRU cache fills up, the entry with the oldest last-accessed timestamp is evicted to make room for the new entry. The net effect is that entries that are accessed frequently tend to stay in the cache, while those that are not accessed frequently tend to drop out and will be re-fetched from the index if needed again. The `FastLRUCache`, which was introduced in Solr 1.4, is designed to be lock-free, so it is well suited for caches which are hit several times in a request. -Both `LRUCache` and `FastLRUCache` use an auto-warm count that supports both integers and percentages which get evaluated relative to the current size of the cache when warming happens. +`CaffeineCache`, `LRUCache` and `FastLRUCache` use an auto-warm count that supports both integers and percentages which get evaluated relative to the current size of the cache when warming happens. The `LFUCache` refers to the Least Frequently Used cache. This works in a way similar to the LRU cache, except that when the cache fills up, the entry that has been used the least is evicted. The Statistics page in the Solr Admin UI will display information about the performance of all the active caches. This information can help you fine-tune the sizes of the various caches appropriately for your particular application. When a Searcher terminates, a summary of its cache usage is also written to the log. -Each cache has settings to define its initial size (`initialSize`), maximum size (`size`) and number of items to use for during warming (`autowarmCount`). The LRU and FastLRU cache implementations can take a percentage instead of an absolute value for `autowarmCount`. +Each cache has settings to define its initial size (`initialSize`), maximum size (`size`) and number of items to use for during warming (`autowarmCount`). The Caffeine, LRU and FastLRU cache implementations can take a percentage instead of an absolute value for `autowarmCount`. + +Each cache implementation also supports a `maxIdleTime` attribute that controls the automatic eviction of entries that haven't been used for a while. This attribute is expressed in seconds, with the default value of `0` meaning no entries are automatically evicted due to exceeded idle time. Smaller values of this attribute will cause older entries to be evicted quickly, which will reduce cache memory usage but may instead cause thrashing due to a repeating eviction-lookup-miss-insertion cycle of the same entries. Larger values will cause entries to stay around longer, waiting to be reused, at the cost of increased memory usage. Reasonable values, depending on the query volume and patterns, may lie somewhere between 60-3600. Please note that this condition is evaluated synchronously and before other eviction conditions on every entry insertion. + +`CaffeineCache`, `LRUCache` and `FastLRUCache` support a `maxRamMB` attribute that limits the maximum amount of memory a cache may consume. When both `size` and `maxRamMB` limits are specified the behavior will differ among implementations: in `CaffeineCache` the `maxRamMB` limit will take precedence and the `size` limit will be ignored, while in `LRUCache` and `FastLRUCache` both limits will be observed, with entries being evicted whenever any of the limits is reached. -FastLRUCache and LFUCache support `showItems` attribute. This is the number of cache items to display in the stats page for the cache. It is for debugging. +`FastLRUCache` and `LFUCache` support `showItems` attribute. This is the number of cache items to display in the stats page for the cache. It is for debugging. Details of each cache are described below. diff --git a/solr/solr-ref-guide/src/resource-and-plugin-loading.adoc b/solr/solr-ref-guide/src/resource-and-plugin-loading.adoc index 04dc84078df6..6efd13532903 100644 --- a/solr/solr-ref-guide/src/resource-and-plugin-loading.adoc +++ b/solr/solr-ref-guide/src/resource-and-plugin-loading.adoc @@ -46,7 +46,7 @@ CAUTION: By default, ZooKeeper's file size limit is 1MB. If your files are large Under standalone Solr, when looking up a plugin or resource to be loaded, Solr's resource loader will first look under the `/conf/` directory. If the plugin or resource is not found, the configured plugin and resource file paths are searched - see the section <> below. -On core load, Solr's resource loader constructs a list of paths (subdirectories and jars), first under <>, and then under directories pointed to by <` directives in SolrConfig>>. +On core load, Solr's resource loader constructs a list of paths (subdirectories and jars), first under <>, and then under directories pointed to by <` directives in SolrConfig>>. When looking up a resource or plugin to be loaded, the paths on the list are searched in the order they were added. diff --git a/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc b/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc index f237542057fc..99d09874795a 100644 --- a/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc +++ b/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc @@ -16,65 +16,154 @@ // specific language governing permissions and limitations // under the License. -Solr allows configuring roles to control user access to the system. - -This is accomplished through rule-based permission definitions which are assigned to users. The roles are fully customizable, and provide the ability to limit access to specific collections, request handlers, request parameters, and request methods. - -The roles can be used with any of the authentication plugins or with a custom authentication plugin if you have created one. You will only need to ensure that you configure the role-to-user mappings with the proper user IDs that your authentication system provides. - -Once defined through the API, roles are stored in `security.json`. +Solr's authentication plugins control whether users can access Solr in a binary fashion. A user is either authenticated, or they aren't. For more fine-grained access control, Solr's Rule-Based Authorization Plugin (hereafter, "RBAP") can be used. [CAUTION] ==== Solr's Admin UI interacts with Solr using its regular APIs. When rule-based authorization is in use, logged-in users not authorized to access the full range of these APIs may see some sections of the UI that appear blank or "broken". For best results, the Admin UI should only be accessed by users with full API access. ==== -== Enable the Authorization Plugin +== Rule-Based Auth Concepts + +"Users", "roles" and "permissions" play a central role in configuring authorization correctly. + +In Rule-Based Authorization, administrators define a series of roles based on the permissions they want those roles to confer. Users are then assigned one or more roles. + +=== Users + +The users that RBAP sees come from whatever authentication plugin has been configured. RBAP is compatible with all of the authentication plugins that Solr ships with out of the box. It is also compatible with any custom authentication plugins users might write, provided that the plugin sets a user principal on the HttpServletRequest it receives. The user value seen by RBAP in each case depends on the authentication plugin being used: the Kerberos principal if the <> is being used, the "sub" JWT claim if the <> is being used, etc. + +=== Roles + +Roles help bridge the gap between users and permissions. Users are assigned one or more roles, and permissions are then given to each of these roles in `security.json` + +=== Permissions + +Permissions control which roles (and consequently, which users) have access to particular chunks of Solr's API. Each permission has two main components: a description of the APIs this permission applies to, and a list of the roles that should be allowed to access to this set of APIs. + +Administrators can use permissions from a list of predefined options or define their own custom permissions, are are free to mix and match both. -The plugin must be enabled in `security.json`. This file and where to put it in your system is described in detail in the section <>. +== Configuring the Rule-Based Authorization Plugin -This file has two parts, the `authentication` part and the `authorization` part. The `authentication` part stores information about the class being used for authentication. +Like all of Solr's security plugins, configuration for RBAP lives in a file or ZooKeeper node with the name `security.json`. See <> for more information on how to setup `security.json` in your cluster. + +Solr offers an <> for making changes to RBAP configuration. Authorized administrators should use this to make changes under most circumstances. Users may also make edits to `security.json` directly if it is stored in ZooKeeper, but this is an expert-level feature and is discouraged in most circumstances. The API simplifies some aspects of configuration, and provides error feedback that isn't provided when editing ZooKeeper directly. + +=== Configuration Syntax + +RBAP configuration consists of a small number of required configuration properties. Each of these lives under the `authorization` top level property in `security.json` + +class:: The authorization plugin to use. For RBAP, this value will always be `solr.RuleBasedAuthorizationPlugin` +user-role:: A mapping of individual users to the roles they belong to. The value of this property is a JSON map, where each property name is a user, and each property value is either the name of a single role or a JSON array of multiple roles that the specified user belongs to. For example: ++ +[source,json] +---- +"user-role": { + "user1": "role1", + "user2": ["role1", "role2"] +} +---- +permissions:: A JSON array of permission rules used to restrict access to sections of Solr's API. For example: ++ +[source,json] +---- +"permissions": [ + { "name": "read", "collection": "techproducts", "role": ["admin", "dev"] }, + { "name": "all", "role": "admin"} +] +---- ++ +The syntax for individual permissions is more involved and is treated in greater detail <>. -The `authorization` part is not related to Basic authentication, but is a separate authorization plugin designed to support fine-grained user access control. When creating `security.json` you can add the permissions to the file, or you can use the Authorization API described below to add them as needed. +=== Complete Example -This example `security.json` shows how the <> can work with this authorization plugin: +The example below shows how the configuration properties above can be used to achieve a typical (if simple) RBAP use-case. [source,json] ---- { -"authentication":{ - "class":"solr.BasicAuthPlugin", <1> - "blockUnknown": true, <2> - "credentials":{"solr":"IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c="} <3> -}, -"authorization":{ - "class":"solr.RuleBasedAuthorizationPlugin", <4> - "permissions":[{"name":"security-edit", - "role":"admin"}], <5> - "user-role":{"solr":"admin"} <6> -}} + "authentication": { + "class": "solr.BasicAuthPlugin", <1> + "blockUnknown": true, + "credentials": { + "admin-user": "IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c=", + "dev-user": "IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c=" + } + }, + "authorization": { + "class": "solr.RuleBasedAuthorizationPlugin", <2> + "user-role": { <3> + "admin-user": "admin", + "dev-user": "dev" + }, + "permissions": [ <4> + { "name": "dev-private-collection", "collection": "dev-private", "role": "dev"}, + { "name": "security-read", "role": "admin"}, + { "name": "security-edit", "role": "admin"} + ] + } +} ---- -There are several things defined in this example: +<1> Solr is using the Basic Authentication plugin for authentication. This configuration establishes two users: `admin-user` and `dev-user`. +<2> The `authorization` property begins the authorization configuration. Solr will use RBAP for authorization. +<3> Two roles are defined: `admin` and `dev`. Each user belongs to one role: `admin-user` is an `admin`, and `dev-user` is a `dev`. +<4> Three permissions restrict access to Solr. The first permission (a "custom" permission) indicates that only the `dev` role can read from a special collection with the name `dev-private`. The last two permissions ("predefined" permissions) indicate that only the `admin` role is permitted to use Solr's security APIs. See below for more information on permission syntax. + +Altogether, this example carves out two restricted areas. Only `admin-user` can access Solr's Authentication and Authorization APIs, and only `dev-user` can access their `dev-private` collection. All other APIs are left open, and can be accessed by both users. + +== Permissions -<1> Basic authentication plugin is enabled. -<2> All requests w/o credentials will be rejected with a 401 error. Set `'blockUnknown'` to false (or remove it altogether) if you wish to let unauthenticated requests to go through. However, if a particular resource is protected by a rule, they are rejected anyway with a 401 error. -<3> A user named 'solr', with a password has been defined. -<4> Rule-based authorization plugin is enabled. -<5> The 'admin' role has been defined, and it has permission to edit security settings. -<6> The 'solr' user has been defined to the 'admin' role. +Solr's Rule-Based Authorization plugin supports a flexible and powerful permission syntax. RBAP supports two types of permissions, each with a slightly different syntax. -== Permission Attributes +=== Custom Permissions -Each role is comprised of one or more permissions which define what the user is allowed to do. Each permission is made up of several attributes that define the allowed activity. There are some pre-defined permissions which cannot be modified. +Administrators can write their own custom permissions that can match requests based on the collection, request handler, HTTP method, particular request parameters, etc. + +Each custom permission is a JSON object under the `permissions` property, with one or more of the properties below: + +name:: An optional identifier for the permission. For custom permissions, this is used only as a clue to administrators about what this permission does. Even so, care must be taken when setting this property to avoid colliding with one of Solr's predefined permissions, whose names are semantically meaningful. If this name matches a predefined permission, Solr ignores any other properties set and uses the semantics of the predefined permission instead. +collection:: An optional property identifying which collection(s) this permission applies to. The value can either be a single collection name, or a JSON array containing multiple collections. The wildcard `\*` can be used to indicate that this rule applies to all collections. Similarly the special value `null` can be used to indicate that this permission governs Solr's collection-agnostic ("admin") APIs. If not specified, this property defaults to `"*"`. ++ +[NOTE] +==== +The collection property can only be used to match _collections_. It currently cannot be used to match aliases. Aliases are resolved before Solr's security plugins are invoked; a `collection` property given an alias will never match because RBAP will be comparing an alias name to already-resolved collection names. Instead, set a `collection` property that contains all collections in the alias concerned (or the `*` wildcard). +==== +path:: An optional property identifying which paths this permission applies to. The value can either be a single path string, or a JSON array containing multiple strings. For APIs accessing collections, path values should start after the collection name, and often just look like the request handler (e.g., `"/select"`). For collection-agnostic ("admin") APIs, path values should start at the `"/admin` path segment. The wildcard `\*` can be used to indicate that this permission applies to all paths. If not specified, this property defaults to `null`. +method:: An optional property identifying which HTTP methods this permission applies to. Options include `HEAD`, `POST`, `PUT`, `GET`, `DELETE`, and the wildcard `\*`. Multiple values can also be specified using a JSON array. If not specified, this property defaults to `*`. +params:: An optional property identifying which query parameters this permission applies to. The value is a JSON object containing the names and values of request parameters that must be matched for this permission to apply. ++ +For example, this property could be used to limit the actions a role is allowed to perform with the Collections API. If the role should only be allowed to perform the LIST or CLUSTERSTATUS requests, you would define this as follows: ++ +[source,json] +---- +"params": { + "action": ["LIST", "CLUSTERSTATUS"] +} +---- ++ +The request parameter value can be a simple string or a regular expression. Use the prefix `REGEX:` to use a regular expression match instead of simpler string matching ++ +If the commands LIST and CLUSTERSTATUS are case insensitive, the example above can be written as follows: ++ +[source,json] +---- +"params": { + "action": ["REGEX:(?i)LIST", "REGEX:(?i)CLUSTERSTATUS"] +} +---- ++ +If not specified, the permission is independent of any parameters. +role:: A required property identifying which role (or roles) are allowed access to the APIs controlled by this permission. Multiple values can be specified using a JSON array. The wildcard `*` can be used to indicate that all roles can access the described functionality. -The permissions are consulted in order they appear in `security.json`. The first permission that matches is applied for each user, so the strictest permissions should be at the top of the list. Permissions order can be controlled with a parameter of the Authorization API, as described below. === Predefined Permissions -There are several permissions that are pre-defined. These have fixed default values, which cannot be modified, and new attributes cannot be added. To use these attributes, simply define a role that includes this permission, and then assign a user to that role. +Custom permissions give administrators flexibility in configuring fine-grained access control. But in an effort to make configuration as simple as possible, RBAP also offers a handful of predefined permissions, which cover many common use-cases. + +Administrators invoke a predefined permission by choosing a `name` property that matches one of Solr's predefined permission options (listed below). Solr has its own definition for each of these permissions, and uses this information when checking whether a predefined permission matches an incoming request. This trades flexibility for simplicity: predefined permissions do not support the `path`, `params`, or `method` properties which custom permissions allow. -The pre-defined permissions are: +The predefined permission names (and their effects) are: * *security-edit:* this permission is allowed to edit the security configuration, meaning any update action that modifies `security.json` through the APIs will be allowed. * *security-read*: this permission is allowed to read the security configuration, meaning any action that reads `security.json` settings through the APIs will be allowed. @@ -129,67 +218,60 @@ The pre-defined permissions are: * *read*: this permission is allowed to perform any read action on any collection. This includes querying using search handlers (using <>) such as `/select`, `/get`, `/browse`, `/tvrh`, `/terms`, `/clustering`, `/elevate`, `/export`, `/spell`, `/clustering`, and `/sql`. This applies to all collections by default ( `collection:"*"` ). * *all*: Any requests coming to Solr. -== Authorization API - -=== Authorization API Endpoint - -`/admin/authorization`: takes a set of commands to create permissions, map permissions to roles, and map roles to users. - -=== Manage Permissions +=== Permission Ordering and Resolution -Three commands control managing permissions: +The permission syntax discussed above doesn't do anything to prevent multiple permissions from overlapping and applying to the same Solr APIs. In cases where multiple permissions match an incoming request, Solr chooses the first matching permission and ignores all others - even if those other permissions would match the incoming request! -* `set-permission`: create a new permission, overwrite an existing permission definition, or assign a pre-defined permission to a role. -* `update-permission`: update some attributes of an existing permission definition. -* `delete-permission`: remove a permission definition. +Since Solr only uses the first matching permission it finds, it's important for administrators to understand what ordering Solr uses when processing the permission list. -Permissions need to be created if they are not on the list of pre-defined permissions above. +The ordering Solr uses is complex. Solr tries to check first any permissions which are specific or relevant to the incoming request, only moving on to more general permissions if none of the more-specific ones match. In effect, this means that different requests may check the same permissions in very different orders. -Several properties can be used to define your custom permission. +If the incoming request is collection-agnostic (doesn't apply to a paritcular collection), Solr checks permissions in the following order: -`name`:: -The name of the permission. This is required only if it is a predefined permission. +. Permissions with a `collection` value of `null` and a `path` value matching the request's request handler +. Permissions with a `collection` value of `null` and a `path` value of `*` +. Permissions with a `collection` value of `null` and a `path` value of `null` -`collection`:: -The collection or collections the permission will apply to. -+ -When the path that will be allowed is collection-specific, such as when setting permissions to allow use of the Schema API, omitting the collection property will allow the defined path and/or method for all collections. However, when the path is one that is non-collection-specific, such as the Collections API, the collection value must be `null`. The default value is `*`, or all collections. +If the incoming request is to a collection, Solr checks permissions in the following order: -`path`:: -A request handler name, such as `/update` or `/select`. A wild card is supported, to allow for all paths as appropriate (such as, `/update/*`). +. Permissions with `collection` and `path` values matching the request specifically (not a wildcard match) +. Permissions with `collection` matching the request specifically, and a `path` value of `*` +. Permissions with `collection` matching the request specifically, and a `path` value of `null` +. Permissions with `path` matching the request specifically, and a `collection` value of `*` +. Permissions with both `collection` and `path` values of `*`. +. Permissions with a `collection` value of `*` and a `path` value of `null` -`method`:: HTTP methods that are allowed for this permission. You could allow only GET requests, or have a role that allows PUT and POST requests. The method values that are allowed for this property are GET, POST, PUT,DELETE and HEAD. +As an example, consider the permissions below: -`params`:: -The names and values of request parameters. This property can be omitted if all request parameters are to be matched, but will restrict access only to the values provided if defined. -+ -For example, this property could be used to limit the actions a role is allowed to perform with the Collections API. If the role should only be allowed to perform the LIST or CLUSTERSTATUS requests, you would define this as follows: -+ -[source,json] ----- -{"params": { - "action": ["LIST", "CLUSTERSTATUS"] - } -} ----- -+ -The value of the parameter can be a simple string or it could be a regular expression. Use the prefix `REGEX:` to use a regular expression match instead of a string identity match -+ -If the commands LIST and CLUSTERSTATUS are case insensitive, the above example should be as follows -+ [source,json] ---- -{"params": { - "action": ["REGEX:(?i)LIST", "REGEX:(?i)CLUSTERSTATUS"] - } -} +{"name": "read", "role": "dev"}, <1> +{"name": "coll-read", "path": "/select", "role": "*"}, <2> +{"name": "techproducts-read", "collection": "techproducts", "role": "other", "path": "/select"}, <3> +{"name": "all", "role": "admin"} <4> ---- -`before`:: -This property allows ordering of permissions. The value of this property is the index of the permission that this new permission should be placed before in `security.json`. The index is automatically assigned in the order they are created. +All of the permissions in this list match `/select` queries. But different permissions will be used depending on the collection being queried. + +For a query to the `techproducts` collection, permission 3 will be used because it specifically targets `techproducts`. Only users with the `other` role will be authorized. + +For a query to a collection called `collection1` on the other hand, the most specific permission present is permission 2, so _all_ roles are given access. + +== Authorization API + +=== Authorization API Endpoint + +`/admin/authorization`: takes a set of commands to create permissions, map permissions to roles, and map roles to users. + +=== Manage Permissions + +Three commands control managing permissions: + +* `set-permission`: create a new permission, overwrite an existing permission definition, or assign a pre-defined permission to a role. +* `update-permission`: update some attributes of an existing permission definition. +* `delete-permission`: remove a permission definition. -`role`:: -The name of the role(s) to give this permission. This name will be used to map user IDs to the role to grant these permissions. The value can be wildcard such as (`*`), which means that any user is OK, but no user is NOT OK. +Created properties can either be custom or predefined. In addition to the permission syntax discussed above, these commands also allow permissions to have a `before` property, whose value matches the index of the permission that this new permission should be placed before in `security.json`. The following creates a new permission named "collection-mgr" that is allowed to create and list collections. The permission will be placed before the "read" permission. Note also that we have defined "collection as `null`, this is because requests to the Collections API are never collection-specific. diff --git a/solr/solr-ref-guide/src/setting-up-an-external-zookeeper-ensemble.adoc b/solr/solr-ref-guide/src/setting-up-an-external-zookeeper-ensemble.adoc index 224dd00dd5ef..837c4d9f79db 100644 --- a/solr/solr-ref-guide/src/setting-up-an-external-zookeeper-ensemble.adoc +++ b/solr/solr-ref-guide/src/setting-up-an-external-zookeeper-ensemble.adoc @@ -201,7 +201,7 @@ And create the `myid` file in the `/var/lib/zookeeper` directory: Repeat this for servers 4 and 5 if you are creating a 5-node ensemble (a rare case). -=== ZooKeeper Environment Configuration +=== ZooKeeper Environment Configuration To ease troubleshooting in case of problems with the ensemble later, it's recommended to run ZooKeeper with logging enabled and with proper JVM garbage collection (GC) settings. @@ -295,9 +295,9 @@ For example, to point the Solr instance to the ZooKeeper you've started on port bin/solr start -e cloud -z zk1:2181,zk2:2181,zk3:2181/solr ---- -=== Updating Solr's Include Files +=== Updating Solr Include Files -If you update Solr's include file (`solr.in.sh` or `solr.in.cmd`), which overrides defaults used with `bin/solr`, you will not have to use the `-z` parameter with `bin/solr` commands. +If you update Solr include files (`solr.in.sh` or `solr.in.cmd`), which overrides defaults used with `bin/solr`, you will not have to use the `-z` parameter with `bin/solr` commands. [.dynamic-tabs] diff --git a/solr/solr-ref-guide/src/shard-management.adoc b/solr/solr-ref-guide/src/shard-management.adoc index 1a18d048c997..089df7dbf8cf 100644 --- a/solr/solr-ref-guide/src/shard-management.adoc +++ b/solr/solr-ref-guide/src/shard-management.adoc @@ -93,6 +93,35 @@ If `true` then each stage of processing will be timed and a `timing` section wil `async`:: Request ID to track this action which will be <> +`splitByPrefix`:: +If `true`, the split point will be selected by taking into account the distribution of compositeId values in the shard. +A compositeId has the form `!`, where all documents with the same prefix are colocated on in the hash space. +If there are multiple prefixes in the shard being split, then the split point will be selected to divide up the prefixes into as equal sized shards as possible without splitting any prefix. +If there is only a single prefix in a shard, the range of the prefix will be divided in half. ++ +The id field is usually scanned to determine the number of documents with each prefix. +As an optimization, if an optional field called `id_prefix` exists and has the document prefix indexed (including the !) for each document, +then that will be used to generate the counts. ++ +One simple way to populate `id_prefix` is a copyField in the schema: +[source,xml] +---- + + + + + + + + +---- + +Current implementation details and limitations: + +* Prefix size is calculated using number of documents with the prefix. +* Only two level compositeIds are supported. +* The shard can only be split into two. + === SPLITSHARD Response The output will include the status of the request and the new shard names, which will use the original shard as their basis, adding an underscore and a number. For example, "shard1" will become "shard1_0" and "shard1_1". If the status is anything other than "success", an error message will explain why the request failed. diff --git a/solr/solr-ref-guide/src/solr-control-script-reference.adoc b/solr/solr-ref-guide/src/solr-control-script-reference.adoc index e71b6684862f..d676545937a0 100644 --- a/solr/solr-ref-guide/src/solr-control-script-reference.adoc +++ b/solr/solr-ref-guide/src/solr-control-script-reference.adoc @@ -60,7 +60,7 @@ Start Solr in SolrCloud mode, which will also launch the embedded ZooKeeper inst + This option can be shortened to simply `-c`. + -If you are already running a ZooKeeper ensemble that you want to use instead of the embedded (single-node) ZooKeeper, you should also either specify `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) or pass the -z parameter. +If you are already running a ZooKeeper ensemble that you want to use instead of the embedded (single-node) ZooKeeper, you should also either specify `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) or pass the -z parameter. + For more details, see the section <> below. + @@ -172,7 +172,7 @@ The `-c` and `-cloud` options are equivalent: If you specify a ZooKeeper connection string, such as `-z 192.168.1.4:2181`, then Solr will connect to ZooKeeper and join the cluster. -NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from all `bin/solr` commands. +NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from all `bin/solr` commands. When starting Solr in SolrCloud mode, if you do not define `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` nor specify the `-z` option, then Solr will launch an embedded ZooKeeper server listening on the Solr port + 1000, i.e., if Solr is running on port 8983, then the embedded ZooKeeper will be listening on port 9983. @@ -891,11 +891,11 @@ Export all documents from a collection `gettingstarted` to a file called `gettin *Arguments* -* `format` : `jsonl` (default) or `javabin`. `format=javabin` exports to a file with extension `.javabin` which is the native Solr format. This is compact & faster to import +* `format` : `jsonl` (default) or `javabin`. `format=javabin` exports to a file with extension `.javabin` which is the native Solr format. This is compact & faster to import. * `out` : export file name -* `query` : a custom query , default is *:* -* `fields`: a comma separated list of fields to be exported -* `limit` : no:of docs. default is 100 , send -1 to import all the docs +* `query` : a custom query, default is `*:*`. +* `fields`: a comma separated list of fields to be exported. +* `limit` : number of documents, default is 100, send `-1` to import all the documents. === Importing the data to a collection @@ -905,4 +905,4 @@ Export all documents from a collection `gettingstarted` to a file called `gettin *Example: importing the `javabin` files* -`curl -X POST --header "Content-Type: application/javabin" --data-binary @gettingstarted.javabin http://localhost:8983/solr/gettingstarted/update?commit=true` \ No newline at end of file +`curl -X POST --header "Content-Type: application/javabin" --data-binary @gettingstarted.javabin http://localhost:8983/solr/gettingstarted/update?commit=true` diff --git a/solr/solr-ref-guide/src/solr-system-requirements.adoc b/solr/solr-ref-guide/src/solr-system-requirements.adoc index 6a4bdd48ccae..6e4872620231 100644 --- a/solr/solr-ref-guide/src/solr-system-requirements.adoc +++ b/solr/solr-ref-guide/src/solr-system-requirements.adoc @@ -39,7 +39,7 @@ Java HotSpot(TM) 64-Bit Server VM (build 25.60-b23, mixed mode) The exact output will vary, but you need to make sure you meet the minimum version requirement. We also recommend choosing a version that is not end-of-life from its vendor. Oracle/OpenJDK are the most tested JREs and are preferred. It's also preferred to use the latest available official release. -Some versions of Java VM have bugs that may impact your implementation. To be sure, check the page https://wiki.apache.org/lucene-java/JavaBugs[Lucene Java Bugs]. +Some versions of Java VM have bugs that may impact your implementation. To be sure, check the page https://wiki.apache.org/confluence/display/LUCENEJAVA/JavaBugs[Lucene Java Bugs]. === Sources for Java diff --git a/solr/solr-ref-guide/src/solr-tracing.adoc b/solr/solr-ref-guide/src/solr-tracing.adoc index c989c0df7feb..1a9b9c5a8e5e 100644 --- a/solr/solr-ref-guide/src/solr-tracing.adoc +++ b/solr/solr-ref-guide/src/solr-tracing.adoc @@ -49,7 +49,7 @@ in `io.opentracing.util.GlobalTracer`. By doing this some backend like DataDog i https://docs.datadoghq.com/tracing/setup/java/[datadog-java-agent] use Javaagent to register a `Tracer` in `io.opentracing.util.GlobalTracer`. -=== Configuring sample rate +=== Configuring Sample Rate By default only 0.1% of requests are sampled, this ensure that tracing activities does not affect performance of a node. diff --git a/solr/solr-ref-guide/src/solr-tutorial.adoc b/solr/solr-ref-guide/src/solr-tutorial.adoc index ca881341e70f..77b0deb1c9c7 100644 --- a/solr/solr-ref-guide/src/solr-tutorial.adoc +++ b/solr/solr-ref-guide/src/solr-tutorial.adoc @@ -513,7 +513,7 @@ This starts the first node. When it's done start the second node, and tell it ho `./bin/solr start -c -p 7574 -s example/cloud/node2/solr -z localhost:9983` -NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from the above command. +NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from the above command. === Create a New Collection diff --git a/solr/solr-ref-guide/src/solr-upgrade-notes.adoc b/solr/solr-ref-guide/src/solr-upgrade-notes.adoc index 760fba7cae62..567b6bcb0c22 100644 --- a/solr/solr-ref-guide/src/solr-upgrade-notes.adoc +++ b/solr/solr-ref-guide/src/solr-upgrade-notes.adoc @@ -31,6 +31,71 @@ Detailed steps for upgrading a Solr cluster are in the section <> below. +=== Solr 8.3 + +See the 8.3 Release Notes for an overview of the main new features of Solr 8.3. + +When upgrading to 8.3.x users should be aware of the following major changes from 8.2. + +*JWT Authentication* + +JWT Authentication now supports multiple identity providers. +To allow this, the parameter `jwkUrl` has been deprecated and replaced with `jwksUrl`. +Implementations using `jwkUrl` will continue to work as normal, but users + should plan to transition their configurations to use `jwksUrl` instead as + soon as feasible. + +*Caches* + +* Solr has a new cache implementation, `CaffeineCache`, which is now recommended over other caches. This cache is expected to generally provide most users lower memory footprint, higher hit ratio, and better multi-threaded performance. ++ +Since caching has a direct impact on the performance of your Solr + implementation, before switching to any new cache implementation in + production, take care to test for your environment and traffic patterns so + you fully understand the ramifications of the change. + +* A new parameter, `maxIdleTime`, allows automatic eviction of cache items that have not been used in the defined amount of time. This allows the cache to release some memory and should aid those who want or need to fine-tune their caches. + +See the section <> for more details about these and other cache options and parameters. + +=== Solr 8.2 + +See the https://cwiki.apache.org/confluence/display/SOLR/ReleaseNote82[8.2 Release Notes] for an overview of the main new features of Solr 8.2. + +When upgrading to 8.2.x, users should be aware of the following major changes from v8.1. + +*ZooKeeper 3.5.5* + +Solr 8.2 updates the version of ZooKeeper included with Solr to v3.5.5. + +It is recommended that external ensembles set up to work with Solr also be updated to ZooKeeper 3.5.5. + +This ZooKeeper release includes many new security features. +In order for Solr's Admin UI to work with 3.5.5, the `zoo.cfg` file must allow access to ZooKeeper's "four-letter commands". +At a minimum, `ruok`, `conf`, and `mntr` must be enabled, but other commands can optionally be enabled if you choose. +See the section <> for details. + +[WARNING] +Until 8.3, https://issues.apache.org/jira/browse/SOLR-13672[SOLR-13672] causes the ZK Status screen in the Admin UI to not be able to report status. This only impacts the UI, ZooKeeper still operates correctly. + +*Routed Aliases* + +* Routed aliases now use collection properties to identify collections that belong to the alias; prior to 8.2, these aliases used core properties. ++ +This is backward-compatible and aliases created with prior versions will + continue to work. However, new collections will no longer add the + `routedAliasName` property to the `core.properties` file so any external code + depending on this location will need to be updated. + +// TODO: aliases.adoc still says this is per-core? + +* Time-routed aliases now include a `TRA` infix in the collection name, in the pattern `_TRA_`. + +Collections created with older versions will continue to work. + +*Distributed Tracing Support* + +This release adds support for tracing requests in Solr. Please review the section <> for details on how to configure this feature. + === Solr 8.1 See the https://wiki.apache.org/solr/ReleaseNote810[8.1 Release Notes] for an overview of the main new features of Solr 8.1. diff --git a/solr/solr-ref-guide/src/solrcloud-autoscaling-api.adoc b/solr/solr-ref-guide/src/solrcloud-autoscaling-api.adoc index ed51b1db9444..0ae519c56eb1 100644 --- a/solr/solr-ref-guide/src/solrcloud-autoscaling-api.adoc +++ b/solr/solr-ref-guide/src/solrcloud-autoscaling-api.adoc @@ -320,11 +320,11 @@ The suggested `operation` is an API call that can be invoked to remedy the curre The types of suggestions available are -* `violation` : Fixes a violation to one or more policy rules -* `repair` : Add missing replicas -* `improvement` : move replicas around so that the load is more evenly balanced according to the autoscaling preferences +* `violation`: Fixes a violation to one or more policy rules +* `repair`: Add missing replicas +* `improvement`: move replicas around so that the load is more evenly balanced according to the autoscaling preferences -By default, the suggestions API return all of the above , in that order. However it is possible to fetch only certain types by adding a request parameter `type`. e.g: `type=violation&type=repair` +By default, the suggestions API returns all of the above, in that order. However it is possible to fetch only certain types by adding a request parameter `type`. e.g: `type=violation&type=repair` === Inline Policy Configuration @@ -676,7 +676,7 @@ The `remove-policy` command accepts a policy name to be removed from Solr. The p If you attempt to remove a policy that is being used by a collection, this command will fail to delete the policy until the collection itself is deleted. -=== Create/Update Trigger +=== Create or Update a Trigger The `set-trigger` command can be used to create a new trigger or overwrite an existing one. @@ -732,7 +732,7 @@ The `remove-trigger` command can be used to remove a trigger. It accepts a singl } ---- -=== Create/Update Trigger Listener +=== Create or Update a Trigger Listener The `set-listener` command can be used to create or modify a listener for a trigger. diff --git a/solr/solr-ref-guide/src/solrcloud-autoscaling-policy-preferences.adoc b/solr/solr-ref-guide/src/solrcloud-autoscaling-policy-preferences.adoc index b2a0c0831e12..5721e0196e0a 100644 --- a/solr/solr-ref-guide/src/solrcloud-autoscaling-policy-preferences.adoc +++ b/solr/solr-ref-guide/src/solrcloud-autoscaling-policy-preferences.adoc @@ -126,7 +126,6 @@ examples [source,json] { "replica" : "<2", "node":"#ANY"} - [source,json] //place 3 replicas in the group of nodes node-name1, node-name2 { "replica" : "3", "nodeset":["node-name1","node-name2"]} @@ -134,7 +133,7 @@ examples [source,json] { "nodeset":{"":""}} -The property names can be one of `node` , `host` , `sysprop.*` , `freedisk` , `ip_*` , `nodeRole` , `heapUsage` , `metrics.*` +The property names can be one of: `node`, `host`, `sysprop.*`, `freedisk`, `ip_*`, `nodeRole`, `heapUsage`, `metrics.*`. when using the `nodeset` attribute, an optional attribute `put` can be used to specify how to distribute the replicas in that node set. diff --git a/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc b/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc index 5571377fbeb7..3ad37726b103 100644 --- a/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc +++ b/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc @@ -69,10 +69,19 @@ The `ExecutePlanAction` executes the Collection API commands emitted by the `Com the cluster using SolrJ. It executes the commands serially, waiting for each of them to succeed before continuing with the next one. -Currently, it has no configurable parameters. +Currently, it has the following configurable parameters: -If any one of the commands fail, then the complete chain of actions are -executed again at the next run of the trigger. If the Overseer node fails while `ExecutePlanAction` is running, +`taskTimeoutSeconds`:: +Default value of this parameter is 120 seconds. This value defines how long the action will wait for a +command to complete its execution. If a timeout is reached while the command is still running then +the command status is provisionally considered a success but a warning is logged, unless `taskTimeoutFail` +is set to true. + +`taskTimeoutFail`:: +Boolean with a default value of false. If this value is true then a timeout in command processing will be +marked as failure and an exception will be thrown. + +If the Overseer node fails while `ExecutePlanAction` is running, then the new Overseer node will run the chain of actions for the same event again after waiting for any running Collection API operations belonging to the event to complete. diff --git a/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc b/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc index 484f514a19b6..860b1f969bbe 100644 --- a/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc +++ b/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc @@ -55,7 +55,7 @@ generated, which may significantly differ due to the rate limits set by `waitFor indicates the nodes that were lost or added. == Trigger Configuration -Trigger configurations are managed using the <> with the commands `<>`, `<>`, +Trigger configurations are managed using the <> with the commands `<>`, `<>`, `suspend-trigger`, and `resume-trigger`. === Trigger Properties @@ -218,8 +218,8 @@ The metric trigger can be used to monitor any metric exposed by the <>, this trigger supports the following parameters: -`metric`:: -(string, required) The metric property name to be watched in the format metric:__group__:__prefix__, e.g., `metric:solr.node:CONTAINER.fs.coreRoot.usableSpace`. +`metrics`:: +(string, required) The metric property name to be watched in the format metrics:__group__:__prefix__, e.g., `metrics:solr.node:CONTAINER.fs.coreRoot.usableSpace`. `below`:: (double, optional) The lower threshold for the metric value. The trigger produces a metric breached event if the metric's value falls below this value. @@ -247,7 +247,7 @@ In addition to the parameters described at <>, this trigg "name": "metric_trigger", "event": "metric", "waitFor": "5s", - "metric": "metric:solr.node:CONTAINER.fs.coreRoot.usableSpace", + "metrics": "metric:solr.node:CONTAINER.fs.coreRoot.usableSpace", "below": 107374182400, "collection": "mycollection" } @@ -522,6 +522,7 @@ request node deletion. } ---- +[[scheduledtrigger]] === Scheduled Trigger The Scheduled trigger generates events according to a fixed rate schedule. @@ -563,3 +564,46 @@ ever executing if a new scheduled event is ready as soon as the cooldown period Solr randomizes the order in which the triggers are resumed after the cooldown period to mitigate this problem. However, it is recommended that scheduled triggers are not used with low `every` values and an external scheduling process such as cron be used for such cases instead. ==== + +== Default Triggers +A fresh installation of SolrCloud always creates some default triggers. If these triggers are missing (eg. they were +deleted) they are re-created on any autoscaling configuration change or Overseer restart. These triggers can be +suspended if their functionality somehow interferes with other configuration but they can't be permanently deleted. + +=== Auto-add Replicas Trigger +The default configuration and functionality of this trigger is described in detail in the +section titled <>. + +=== Scheduled Maintenance Trigger +This is a <> named `.scheduled_maintenance` and it's configured to run once per day. +It executes the following actions: + +==== `solr.InactiveShardPlanAction` +This action checks existing collections for any shards in `INACTIVE` state, which indicates that they +are the original parent shards remaining after a successful `SPLITSHARD` operation. + +These shards are not immediately deleted because shard splitting is a complex operation that may fail in +non-obvious ways, so keeping the original parent shard gives users a chance to recover from potential failures. + +However, keeping these shards indefinitely doesn't make sense either because they still use system +resources (their Solr cores are still being loaded, and their indexes still occupy disk space). +This scheduled action is responsible for removing such inactive parent shards after their +time-to-live expires. By default the TTL is set to 48 hours after the shard state was set to +`INACTIVE`. When this TTL elapses this scheduled action requests that the shard be deleted, which is then +executed by `solr.ExecutePlanAction` that is configured for this trigger. + +==== `solr.InactiveMarkersPlanAction` +When a node is lost or added an event is generated - but if the lost node was the one running +Overseer leader such event may not be properly processed by the triggers (which run in the Overseer leader context). +For this reason a special marker is created in ZooKeeper so that when the next Overseer leader is elected the +triggers will be able to learn about and process these past events. + +Triggers don't delete these markers once they are done processing (because several triggers may need them and eg. +scheduled triggers may run at arbitrary times with arbitrary delays) so Solr needs a mechanism to clean up +old markers for such events so that they don't accumulate over time. This trigger action performs the clean-up +- it deletes markers older than the configured time-to-live (by default it's 48 hours). + +=== `solr.ExecutePlanAction` +This action simply executes any collection admin requests generated by other +actions - in particular, in the default configuration it executes `DELETESHARD` requests produced by +`solr.InactiveShardPlanAction`, as described above. diff --git a/solr/solr-ref-guide/src/taking-solr-to-production.adoc b/solr/solr-ref-guide/src/taking-solr-to-production.adoc index 9e3595d492c9..7b11886ae791 100644 --- a/solr/solr-ref-guide/src/taking-solr-to-production.adoc +++ b/solr/solr-ref-guide/src/taking-solr-to-production.adoc @@ -241,7 +241,7 @@ Setting the hostname of the Solr server is recommended, especially when running === Environment banner in Admin UI -To guard against accidentally doing changes to the wrong cluster, you may configure a visual indication in the Admin UI of whether you currently work with a production environment or not. To do this, edit your `solr.in.sh` or `solr.in.cmd` file with a `-Dsolr.environment=prod` setting, or set the cluster property named `environment`. To specify label and/or color, use a comma delimited format as below. The `+` character can be used instead of space to avoid quoting. Colors may be valid CSS colors or numeric e.g. `#ff0000` for bright red. Examples of valid environment configs: +To guard against accidentally doing changes to the wrong cluster, you may configure a visual indication in the Admin UI of whether you currently work with a production environment or not. To do this, edit your `solr.in.sh` or `solr.in.cmd` file with a `-Dsolr.environment=prod` setting, or set the cluster property named `environment`. To specify label and/or color, use a comma delimited format as below. The `+` character can be used instead of space to avoid quoting. Colors may be valid CSS colors or numeric, e.g., `#ff0000` for bright red. Examples of valid environment configs: * `prod` * `test,label=Functional+test` @@ -273,26 +273,33 @@ The `bin/solr` script simply passes options starting with `-D` on to the JVM dur SOLR_OPTS="$SOLR_OPTS -Dsolr.autoSoftCommit.maxTime=10000" ---- -=== File Handles and Processes (ulimit settings) +=== Ulimit Settings (*nix Operating Systems) -Two common settings that result in errors on *nix systems are file handles and user processes. - -It is common for the default limits for number of processes and file handles to default to values that are too low for a large Solr installation. The required number of each of these will increase based on a combination of the number of replicas hosted per node and the number of segments in the index for each replica. - -The usual recommendation is to make processes and file handles at least 65,000 each, unlimited if possible. On most *nix systems, this command will show the currently-defined limits: +There are several settings that should be monitored and set as high as possible, "unlimited" by preference. On most "*nix" operating systems, you can see the current values by typing the following at a command prompt. [source,bash] ---- ulimit -a ---- -It is strongly recommended that file handle and process limits be permanently raised as above. The exact form of the command will vary per operating system, and some systems require editing configuration files and restarting your server. Consult your system administrators for guidance in your particular environment. +These four settings in particular are important to have set very high, unlimited if possible. + +* max processes (`ulimit -u`): 65,000 is the recommended _minimum_. +* file handles (`ulimit -n`): 65,000 is the recommended _minimum_. All the files used by all replicas have their file handles open at once so this can grow quite large. +* virtual memory (`ulimit -v`): Set to unlimited. This is used to by MMapping the indexes. +* max memory size (`ulimit -m`): Also used by MMap, set to unlimited. +* If your system supports it, `sysctl vm.max_map_count`, should be set to unlimited as well. + +We strongly recommend that these settings be permanently raised. The exact process to permanently raise them will vary per operating system. Some systems require editing configuration files and restarting your server. Consult your system administrators for guidance in your particular environment. [WARNING] ==== -If these limits are exceeded, the problems reported by Solr vary depending on the specific operation responsible for exceeding the limit. Errors such as "too many open files", "connection error", and "max processes exceeded" have been reported, as well as SolrCloud recovery failures. +Check these limits every time you upgrade your kernel or operating system. These operations can reset these to their defaults. +==== -Since exceeding these limits can result in such varied symptoms it is _strongly_ recommended that these limits be permanently raised as recommended above. +[WARNING] +==== +If these limits are exceeded, the problems reported by Solr vary depending on the specific operation responsible for exceeding the limit. Errors such as "too many open files", "connection error", and "max processes exceeded" have been reported, as well as SolrCloud recovery failures. ==== == Running Multiple Solr Nodes per Host diff --git a/solr/solr-ref-guide/src/updating-parts-of-documents.adoc b/solr/solr-ref-guide/src/updating-parts-of-documents.adoc index d8a0da50dd1b..f40cecf98934 100644 --- a/solr/solr-ref-guide/src/updating-parts-of-documents.adoc +++ b/solr/solr-ref-guide/src/updating-parts-of-documents.adoc @@ -415,7 +415,7 @@ $ curl -X POST -H 'Content-Type: application/json' 'http://localhost:8983/solr/t "ccc",1632740949182382080]} ---- -In this example, we have added 2 documents "aaa" and "ccc". As we have specified the parameter `\_version_=-1` , this request should not add the document with the id `aaa` because it already exists. The request succeeds & does not throw any error because the `failOnVersionConflicts=false` parameter is specified. The response shows that only document `ccc` is added and `aaa` is silently ignored. +In this example, we have added 2 documents "aaa" and "ccc". As we have specified the parameter `\_version_=-1`, this request should not add the document with the id `aaa` because it already exists. The request succeeds & does not throw any error because the `failOnVersionConflicts=false` parameter is specified. The response shows that only document `ccc` is added and `aaa` is silently ignored. For more information, please also see Yonik Seeley's presentation on https://www.youtube.com/watch?v=WYVM6Wz-XTw[NoSQL features in Solr 4] from Apache Lucene EuroCon 2012. diff --git a/solr/solr-ref-guide/src/using-zookeeper-to-manage-configuration-files.adoc b/solr/solr-ref-guide/src/using-zookeeper-to-manage-configuration-files.adoc index 546ee99b197c..fe13460a5f0e 100644 --- a/solr/solr-ref-guide/src/using-zookeeper-to-manage-configuration-files.adoc +++ b/solr/solr-ref-guide/src/using-zookeeper-to-manage-configuration-files.adoc @@ -93,4 +93,4 @@ If you for example would like to keep your `solr.xml` in ZooKeeper to avoid havi bin/solr zk cp file:local/file/path/to/solr.xml zk:/solr.xml -z localhost:2181 ---- -NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from the above command. +NOTE: If you have defined `ZK_HOST` in `solr.in.sh`/`solr.in.cmd` (see <>) you can omit `-z ` from the above command. diff --git a/solr/solrj/ivy.xml b/solr/solrj/ivy.xml index 5182ae785b64..5312c4207a78 100644 --- a/solr/solrj/ivy.xml +++ b/solr/solrj/ivy.xml @@ -52,6 +52,15 @@ + + + + + + + + + diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java index 885edc9d95ce..0bbdc1a2ad36 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java @@ -958,6 +958,21 @@ public UpdateResponse deleteByQuery(String query, int commitWithinMs) throws Sol return deleteByQuery(null, query, commitWithinMs); } + /** + * Issues a ping request to check if the collection's replicas are alive + * + * @param collection collection to ping + * + * @return a {@link org.apache.solr.client.solrj.response.SolrPingResponse} containing the response + * from the server + * + * @throws IOException If there is a low-level I/O error. + * @throws SolrServerException if there is an error on the server + */ + public SolrPingResponse ping(String collection) throws SolrServerException, IOException { + return new SolrPing().process(this, collection); + } + /** * Issues a ping request to check if the server is alive * @@ -971,6 +986,7 @@ public SolrPingResponse ping() throws SolrServerException, IOException { return new SolrPing().process(this, null); } + /** * Performs a query to the Solr server * diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java index 73eb86362354..ef52eb223931 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java @@ -18,12 +18,10 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; -import org.apache.solr.common.MapWriter; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.util.NamedList; @@ -34,7 +32,7 @@ * * @since solr 1.3 */ -public abstract class SolrResponse implements Serializable, MapWriter { +public abstract class SolrResponse implements Serializable { /** Elapsed time in milliseconds for the request as seen from the client. */ public abstract long getElapsedTime(); @@ -45,11 +43,6 @@ public abstract class SolrResponse implements Serializable, MapWriter { public abstract NamedList getResponse(); - @Override - public void writeMap(EntryWriter ew) throws IOException { - getResponse().writeMap(ew); - } - public Exception getException() { NamedList exp = (NamedList) getResponse().get("exception"); if (exp == null) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaInfo.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaInfo.java index 19bd16188ea7..f9a83de12aa0 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaInfo.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/ReplicaInfo.java @@ -88,7 +88,7 @@ public ReplicaInfo(Map map) { } public Object clone() { - return new ReplicaInfo(name, core, collection, shard, type, node, variables); + return new ReplicaInfo(name, core, collection, shard, type, node, new HashMap<>(variables)); } @Override diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java index 0b087802f89d..24748caba5f9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java @@ -253,7 +253,14 @@ public Builder() {} public Builder(List solrUrls) { this.solrUrls = solrUrls; } - + + /** + * Provide an already created {@link ClusterStateProvider} instance + */ + public Builder(ClusterStateProvider stateProvider) { + this.stateProvider = stateProvider; + } + /** * Provide a series of ZK hosts which will be used when configuring {@link CloudSolrClient} instances. * diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java index fb2af9423c5c..16ab919039fc 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java @@ -279,9 +279,7 @@ void sendUpdateStream() throws Exception { } finally { try { - if (rspBody != null) { - while (rspBody.read() != -1) {} - } + consumeFully(rspBody); } catch (Exception e) { log.error("Error consuming and closing http response stream.", e); } @@ -295,6 +293,27 @@ void sendUpdateStream() throws Exception { } } + private void consumeFully(InputStream is) { + if (is != null) { + try { + // make sure the stream is full read + is.skip(is.available()); + while (is.read() != -1) { + } + } catch (UnsupportedOperationException e) { + // nothing to do then + } catch (IOException e) { + // quiet + } finally { + try { + is.close(); + } catch (IOException e) { + // quiet + } + } + } + } + private void notifyQueueAndRunnersIfEmptyQueue() { if (queue.size() == 0) { synchronized (queue) { @@ -512,6 +531,7 @@ public void handleError(Throwable ex) { /** * Intended to be used as an extension point for doing post processing after a request completes. + * @param respBody the body of the response, subclasses must not close this stream. */ public void onSuccess(Response resp, InputStream respBody) { // no-op by design, override to add functionality diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java index 8f1af8c7831a..c5273278d9f8 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java @@ -374,7 +374,7 @@ protected HttpRequestBase createMethod(SolrRequest request, String collection) t if (request instanceof V2Request) { if (System.getProperty("solr.v2RealPath") == null || ((V2Request) request).isForceV2()) { - basePath = changeV2RequestEndpoint(basePath); + basePath = baseUrl.replace("/solr", "/api"); } else { basePath = baseUrl + "/____v2"; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java index eed6b872a00a..bd3710fbfe2e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java @@ -207,7 +207,7 @@ public static void register(StreamFactory streamFactory) { .withFunctionName("ttest", TTestEvaluator.class) .withFunctionName("pairedTtest", PairedTTestEvaluator.class) .withFunctionName("multiVariateNormalDistribution", MultiVariateNormalDistributionEvaluator.class) - .withFunctionName("integrate", IntegrateEvaluator.class) + .withFunctionName("integral", IntegrateEvaluator.class) .withFunctionName("density", DensityEvaluator.class) .withFunctionName("mannWhitney", MannWhitneyUEvaluator.class) .withFunctionName("sumSq", SumSqEvaluator.class) @@ -300,6 +300,8 @@ public static void register(StreamFactory streamFactory) { .withFunctionName("upper", UpperEvaluator.class) .withFunctionName("split", SplitEvaluator.class) .withFunctionName("trim", TrimEvaluator.class) + .withFunctionName("cosine", CosineDistanceEvaluator.class) + .withFunctionName("trunc", TruncEvaluator.class) // Boolean Stream Evaluators diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java index ac6f2e22aaa8..c8c72f414d00 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java @@ -17,6 +17,7 @@ package org.apache.solr.client.solrj.io.eval; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -26,6 +27,7 @@ import org.apache.commons.math3.stat.correlation.KendallsCorrelation; import org.apache.commons.math3.stat.correlation.SpearmansCorrelation; +import org.apache.solr.client.solrj.io.stream.ZplotStream; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; @@ -109,6 +111,9 @@ public Object doWork(Object ... values) throws IOException{ double[][] corrMatrixData = corrMatrix.getData(); Matrix realMatrix = new Matrix(corrMatrixData); realMatrix.setAttribute("corr", pearsonsCorrelation); + List labels = getColumnLabels(matrix.getColumnLabels(), corrMatrixData.length); + realMatrix.setColumnLabels(labels); + realMatrix.setRowLabels(labels); return realMatrix; } else if (type.equals(CorrelationType.kendalls)) { KendallsCorrelation kendallsCorrelation = new KendallsCorrelation(data); @@ -116,6 +121,9 @@ public Object doWork(Object ... values) throws IOException{ double[][] corrMatrixData = corrMatrix.getData(); Matrix realMatrix = new Matrix(corrMatrixData); realMatrix.setAttribute("corr", kendallsCorrelation); + List labels = getColumnLabels(matrix.getColumnLabels(), corrMatrixData.length); + realMatrix.setColumnLabels(labels); + realMatrix.setRowLabels(labels); return realMatrix; } else if (type.equals(CorrelationType.spearmans)) { SpearmansCorrelation spearmansCorrelation = new SpearmansCorrelation(new Array2DRowRealMatrix(data, false)); @@ -123,6 +131,9 @@ public Object doWork(Object ... values) throws IOException{ double[][] corrMatrixData = corrMatrix.getData(); Matrix realMatrix = new Matrix(corrMatrixData); realMatrix.setAttribute("corr", spearmansCorrelation.getRankCorrelation()); + List labels = getColumnLabels(matrix.getColumnLabels(), corrMatrixData.length); + realMatrix.setColumnLabels(labels); + realMatrix.setRowLabels(labels); return realMatrix; } else { return null; @@ -134,4 +145,18 @@ public Object doWork(Object ... values) throws IOException{ throw new IOException("corr function operates on either two numeric arrays or a single matrix as parameters."); } } + + public static List getColumnLabels(List labels, int length) { + if(labels != null) { + return labels; + } else { + List l = new ArrayList(); + for(int i=0; i ignoredNamedParameters) throws IOException{ + super(expression, factory, ignoredNamedParameters); + } + + @Override + public Object evaluate(Tuple tuple) throws IOException { + return new CosineDistance(); + } + + @Override + public Object doWork(Object... values) throws IOException { + // Nothing to do here + throw new IOException("This call should never occur"); + } + + public static class CosineDistance implements DistanceMeasure { + + private static final long serialVersionUID = -9108154600539125566L; + + public double compute(double[] v1, double[] v2) throws DimensionMismatchException { + return Precision.round(1-Math.abs(CosineSimilarityEvaluator.cosineSimilarity(v1, v2)), 8); + } + } +} \ No newline at end of file diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CosineSimilarityEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CosineSimilarityEvaluator.java index 2b21ac8ff9ca..07823c055433 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CosineSimilarityEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CosineSimilarityEvaluator.java @@ -20,6 +20,7 @@ import java.util.List; import java.util.Locale; +import org.apache.commons.math3.util.Precision; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; @@ -51,7 +52,7 @@ public Object doWork(Object first, Object second) throws IOException{ return cosineSimilarity(d1, d2); } - private double cosineSimilarity(double[] vectorA, double[] vectorB) { + public static double cosineSimilarity(double[] vectorA, double[] vectorB) { double dotProduct = 0.0; double normA = 0.0; double normB = 0.0; @@ -60,7 +61,8 @@ private double cosineSimilarity(double[] vectorA, double[] vectorB) { normA += Math.pow(vectorA[i], 2); normB += Math.pow(vectorB[i], 2); } - return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB)); + double d = dotProduct / (Math.sqrt(normA) * Math.sqrt(normB)); + return Precision.round(d, 8); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java index 8a2895124caa..3cb316138a00 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java @@ -49,7 +49,11 @@ public Object doWork(Object ... values) throws IOException{ Covariance covariance = new Covariance(data, true); RealMatrix coMatrix = covariance.getCovarianceMatrix(); double[][] coData = coMatrix.getData(); - return new Matrix(coData); + Matrix realMatrix = new Matrix(coData); + List labels = CorrelationEvaluator.getColumnLabels(matrix.getColumnLabels(), coData.length); + realMatrix.setColumnLabels(labels); + realMatrix.setRowLabels(labels); + return realMatrix; } else { throw new IOException("The cov function expects either two numeric arrays or a matrix as parameters."); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DerivativeEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DerivativeEvaluator.java index 183a47babf06..895d3b5544f5 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DerivativeEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DerivativeEvaluator.java @@ -21,6 +21,7 @@ import org.apache.commons.math3.analysis.DifferentiableUnivariateFunction; import org.apache.commons.math3.analysis.UnivariateFunction; +import org.apache.commons.math3.analysis.interpolation.AkimaSplineInterpolator; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; @@ -42,12 +43,17 @@ public Object doWork(Object value) throws IOException { } VectorFunction vectorFunction = (VectorFunction) value; + + DifferentiableUnivariateFunction func = null; + double[] x = (double[])vectorFunction.getFromContext("x"); + if(!(vectorFunction.getFunction() instanceof DifferentiableUnivariateFunction)) { - throw new IOException("Cannot evaluate derivative from parameter."); + double[] y = (double[])vectorFunction.getFromContext("y"); + func = new AkimaSplineInterpolator().interpolate(x, y); + } else { + func = (DifferentiableUnivariateFunction) vectorFunction.getFunction(); } - DifferentiableUnivariateFunction func = (DifferentiableUnivariateFunction)vectorFunction.getFunction(); - double[] x = (double[])vectorFunction.getFromContext("x"); UnivariateFunction derfunc = func.derivative(); double[] dvalues = new double[x.length]; for(int i=0; i labels = CorrelationEvaluator.getColumnLabels(matrix.getColumnLabels(), data.length); + m.setColumnLabels(labels); + m.setRowLabels(labels); + return m; } } \ No newline at end of file diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DoubleEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DoubleEvaluator.java index 7fce45f0f861..e2e699878757 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DoubleEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DoubleEvaluator.java @@ -44,7 +44,11 @@ else if(value instanceof List){ return ((List)value).stream().map(innerValue -> doWork(innerValue)).collect(Collectors.toList()); } else{ - return Double.valueOf(value.toString()); + if(value instanceof String) { + return Double.valueOf(value.toString()); + } else { + return ((Number) value).doubleValue(); + } } } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java index 62a3444ea302..fbd5561ed9c0 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java @@ -27,6 +27,7 @@ import org.apache.commons.math3.ml.clustering.CentroidCluster; import org.apache.commons.math3.ml.distance.EuclideanDistance; import org.apache.commons.math3.ml.clustering.FuzzyKMeansClusterer; +import org.apache.solr.client.solrj.io.stream.ZplotStream; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; @@ -100,6 +101,11 @@ public Object doWork(Object value1, Object value2) throws IOException { double[][] mmData = realMatrix.getData(); Matrix mmMatrix = new Matrix(mmData); mmMatrix.setRowLabels(matrix.getRowLabels()); + List clusterCols = new ArrayList(); + for(int i=0; i 3) { + throw new IOException("The integrate function requires at most 3 parameters"); } if (!(values[0] instanceof VectorFunction)) { @@ -43,28 +44,45 @@ public Object doWork(Object... values) throws IOException { } VectorFunction vectorFunction = (VectorFunction) values[0]; - if(!(vectorFunction.getFunction() instanceof UnivariateFunction)) { + if (!(vectorFunction.getFunction() instanceof UnivariateFunction)) { throw new IOException("Cannot evaluate integral from parameter."); } - Number min = null; - Number max = null; + UnivariateFunction func = (UnivariateFunction) vectorFunction.getFunction(); - if(values[1] instanceof Number) { - min = (Number) values[1]; - } else { - throw new IOException("The second parameter of the integrate function must be a number"); - } + if(values.length == 3) { + + + Number min = null; + Number max = null; + + if (values[1] instanceof Number) { + min = (Number) values[1]; + } else { + throw new IOException("The second parameter of the integrate function must be a number"); + } - if(values[2] instanceof Number ) { - max = (Number) values[2]; + if (values[2] instanceof Number) { + max = (Number) values[2]; + } else { + throw new IOException("The third parameter of the integrate function must be a number"); + } + + RombergIntegrator rombergIntegrator = new RombergIntegrator(); + return rombergIntegrator.integrate(5000, func, min.doubleValue(), max.doubleValue()); } else { - throw new IOException("The third parameter of the integrate function must be a number"); - } + RombergIntegrator integrator = new RombergIntegrator(); - UnivariateFunction func = (UnivariateFunction)vectorFunction.getFunction(); + double[] x = (double[])vectorFunction.getFromContext("x"); + double[] y = (double[])vectorFunction.getFromContext("y"); + ArrayList out = new ArrayList(); + out.add(0); + for(int i=1; i)value).stream().map(innerValue -> doWork(innerValue)).collect(Collectors.toList()); } else{ - return Long.valueOf(value.toString()); + if(value instanceof String) { + return Long.valueOf(value.toString()); + } else { + return ((Number) value).longValue(); + } } } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeEvaluator.java index 22cbfc90e828..1971c1510f66 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeEvaluator.java @@ -53,7 +53,10 @@ else if(value instanceof List){ double[] row = data[i]; standardized[i] = StatUtils.normalize(row); } - return new Matrix(standardized); + Matrix m = new Matrix(standardized); + m.setRowLabels(matrix.getRowLabels()); + m.setColumnLabels(matrix.getColumnLabels()); + return m; } else { return doWork(Arrays.asList((BigDecimal)value)); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java index d300f598cc22..6717909e9df3 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java @@ -62,7 +62,10 @@ public Object doWork(Object... values) throws IOException{ unitData[i] = unitRow; } - return new Matrix(unitData); + Matrix m = new Matrix(unitData); + m.setRowLabels(matrix.getRowLabels()); + m.setColumnLabels(matrix.getColumnLabels()); + return m; } else if(value instanceof List) { List vals = (List)value; double[] doubles = new double[vals.size()]; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java index a5fae7e6af0a..97224dd6018f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java @@ -126,27 +126,16 @@ protected Object normalizeOutputType(Object value) { return value; } else if(value instanceof BigDecimal){ BigDecimal bd = (BigDecimal)value; - if(bd.signum() == 0 || bd.scale() <= 0 || bd.stripTrailingZeros().scale() <= 0){ - try{ - return bd.longValueExact(); - } - catch(ArithmeticException e){ - // value was too big for a long, so use a double which can handle scientific notation - } - } - return bd.doubleValue(); } + else if(value instanceof Long || value instanceof Integer) { + return ((Number) value).longValue(); + } else if(value instanceof Double){ - if(Double.isNaN((Double)value)){ - return value; - } - - // could be a long so recurse back in as a BigDecimal - return normalizeOutputType(new BigDecimal((Double)value)); + return value; } else if(value instanceof Number){ - return normalizeOutputType(new BigDecimal(((Number)value).toString())); + return ((Number) value).doubleValue(); } else if(value instanceof List){ // normalize each value in the list diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java index e2100b1fbde3..e2dddfb8ff7f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java @@ -73,9 +73,11 @@ public Object doWork(Object value1, Object value2) throws IOException { private List getMaxIndexes(double[] values, int k) { TreeSet set = new TreeSet(); for(int i=0; i k) { - set.pollFirst(); + if(values[i] > 0){ + set.add(new Pair(i, values[i])); + if (set.size() > k) { + set.pollFirst(); + } } } @@ -89,16 +91,22 @@ private List getMaxIndexes(double[] values, int k) { public static class Pair implements Comparable { - private int index; + private Integer index; private Double value; - public Pair(int index, Number value) { - this.index = index; + public Pair(int _index, Number value) { + this.index = _index; this.value = value.doubleValue(); } public int compareTo(Pair pair) { - return value.compareTo(pair.value); + + int c = value.compareTo(pair.value); + if(c==0) { + return index.compareTo(pair.index); + } else { + return c; + } } public int getIndex() { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TruncEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TruncEvaluator.java new file mode 100644 index 000000000000..0e4ebaca4884 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TruncEvaluator.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.solrj.io.eval; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.stream.Collectors; + +import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; +import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; + +public class TruncEvaluator extends RecursiveObjectEvaluator implements TwoValueWorker { + protected static final long serialVersionUID = 1L; + + public TruncEvaluator(StreamExpression expression, StreamFactory factory) throws IOException{ + super(expression, factory); + + if(2 != containedEvaluators.size()){ + throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting exactly 2 values but found %d",expression,containedEvaluators.size())); + } + } + + @Override + public Object doWork(Object value1, Object value2){ + if(null == value1){ + return null; + } + + int endIndex = ((Number)value2).intValue(); + + if(value1 instanceof List){ + return ((List)value1).stream().map(innerValue -> doWork(innerValue, endIndex)).collect(Collectors.toList()); + } + else { + return value1.toString().substring(0, endIndex); + } + } +} diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java index 16d72ae367f6..f6463cd9a470 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java @@ -55,7 +55,7 @@ public Object doWork(Object value) throws IOException{ Matrix m = new Matrix(unitData); m.setRowLabels(matrix.getRowLabels()); - m.setColumnLabels(matrix.getRowLabels()); + m.setColumnLabels(matrix.getColumnLabels()); return m; } else if(value instanceof List) { List values = (List)value; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java index 64fcd027969c..c03db3820d37 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java @@ -227,8 +227,8 @@ public Tuple read() throws IOException { if(documentIterator.hasNext()) { Map map = new HashMap(); SolrDocument doc = documentIterator.next(); - for(String key : doc.keySet()) { - map.put(key, doc.get(key)); + for(Entry entry : doc.entrySet()) { + map.put(entry.getKey(), entry.getValue()); } return new Tuple(map); } else { @@ -241,9 +241,9 @@ public Tuple read() throws IOException { private ModifiableSolrParams getParams(Map props) { ModifiableSolrParams params = new ModifiableSolrParams(); - for(String key : props.keySet()) { - String value = props.get(key); - params.add(key, value); + for(Entry entry : props.entrySet()) { + String value = entry.getValue(); + params.add(entry.getKey(), value); } return params; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomFacadeStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomFacadeStream.java index 5a343eb525ad..f9735cef9c4f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomFacadeStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomFacadeStream.java @@ -101,8 +101,8 @@ else if(zkHostExpression.getParameter() instanceof StreamExpressionValue){ private SolrParams toSolrParams(Map props) { ModifiableSolrParams sp = new ModifiableSolrParams(); - for(String key : props.keySet()) { - sp.add(key, props.get(key)); + for(Map.Entry entry : props.entrySet()) { + sp.add(entry.getKey(), entry.getValue()); } return sp; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java index 68988a0478c5..1a6083b8d9bd 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java @@ -216,8 +216,8 @@ public Tuple read() throws IOException { if(documentIterator.hasNext()) { Map map = new HashMap(); SolrDocument doc = documentIterator.next(); - for(String key : doc.keySet()) { - map.put(key, doc.get(key)); + for(Entry entry : doc.entrySet()) { + map.put(entry.getKey(), entry.getValue()); } return new Tuple(map); } else { @@ -230,9 +230,9 @@ public Tuple read() throws IOException { private ModifiableSolrParams getParams(Map props) { ModifiableSolrParams params = new ModifiableSolrParams(); - for(String key : props.keySet()) { - String value = props.get(key); - params.add(key, value); + for(Entry entry : props.entrySet()) { + String value = entry.getValue(); + params.add(entry.getKey(), value); } return params; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java index 3643969f23ce..24368a0a9821 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java @@ -210,8 +210,8 @@ public Tuple read() throws IOException { if(documentIterator.hasNext()) { Map map = new HashMap(); SolrDocument doc = documentIterator.next(); - for(String key : doc.keySet()) { - map.put(key, doc.get(key)); + for(Entry entry : doc.entrySet()) { + map.put(entry.getKey(), entry.getValue()); } return new Tuple(map); } else { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java index ebf31210ab3f..66d9867b4140 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java @@ -36,6 +36,7 @@ import org.apache.solr.client.solrj.io.comp.StreamComparator; import org.apache.solr.client.solrj.io.eval.KmeansEvaluator; import org.apache.solr.client.solrj.io.eval.StreamEvaluator; +import org.apache.solr.client.solrj.io.eval.Matrix; import org.apache.solr.client.solrj.io.stream.expr.Explanation; import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType; import org.apache.solr.client.solrj.io.stream.expr.Expressible; @@ -129,6 +130,7 @@ public void open() throws IOException { boolean table = false; boolean distribution = false; boolean clusters = false; + boolean heat = false; for(Map.Entry entry : entries) { ++columns; @@ -139,6 +141,9 @@ public void open() throws IOException { distribution = true; } else if(name.equals("clusters")) { clusters = true; + } else if(name.equals("heat")) { + heat = true; + } Object o = entry.getValue(); @@ -176,6 +181,8 @@ public void open() throws IOException { evaluated.put(name, l); } else if(eval instanceof Tuple) { evaluated.put(name, eval); + } else if(eval instanceof Matrix) { + evaluated.put(name, eval); } } } @@ -186,7 +193,7 @@ public void open() throws IOException { //Load the values into tuples List outTuples = new ArrayList(); - if(!table && !distribution && !clusters) { + if(!table && !distribution && !clusters && !heat) { //Handle the vectors for (int i = 0; i < numTuples; i++) { Tuple tuple = new Tuple(new HashMap()); @@ -304,20 +311,96 @@ public void open() throws IOException { } } } - } else if(table){ + } else if(table) { //Handle the Tuple and List of Tuples Object o = evaluated.get("table"); - if(o instanceof List) { - List tuples = (List)o; - outTuples.addAll(tuples); - } else if(o instanceof Tuple) { - outTuples.add((Tuple)o); + if (o instanceof Matrix) { + Matrix m = (Matrix) o; + List rowLabels = m.getRowLabels(); + List colLabels = m.getColumnLabels(); + double[][] data = m.getData(); + for (int i = 0; i < data.length; i++) { + String rowLabel = null; + if (rowLabels != null) { + rowLabel = rowLabels.get(i); + } else { + rowLabel = Integer.toString(i); + } + Tuple tuple = new Tuple(new HashMap()); + tuple.put("rowLabel", rowLabel); + double[] row = data[i]; + for (int j = 0; j < row.length; j++) { + String colLabel = null; + if (colLabels != null) { + colLabel = colLabels.get(j); + } else { + colLabel = "col" + Integer.toString(j); + } + + tuple.put(colLabel, data[i][j]); + } + outTuples.add(tuple); + } + } + } else if (heat) { + //Handle the Tuple and List of Tuples + Object o = evaluated.get("heat"); + if(o instanceof Matrix) { + Matrix m = (Matrix) o; + List rowLabels = m.getRowLabels(); + List colLabels = m.getColumnLabels(); + double[][] data = m.getData(); + for (int i = 0; i < data.length; i++) { + String rowLabel = null; + if (rowLabels != null) { + rowLabel = rowLabels.get(i); + } else { + rowLabel = "row"+pad(Integer.toString(i), data.length); + } + + double[] row = data[i]; + for (int j = 0; j < row.length; j++) { + Tuple tuple = new Tuple(new HashMap()); + tuple.put("y", rowLabel); + String colLabel = null; + if (colLabels != null) { + colLabel = colLabels.get(j); + } else { + colLabel = "col" + pad(Integer.toString(j), row.length); + } + tuple.put("x", colLabel); + tuple.put("z", data[i][j]); + outTuples.add(tuple); + } + } } } this.out = outTuples.iterator(); } + public static String pad(String v, int length) { + if(length < 11) { + return v; + } else if(length < 101) { + return prepend(v, 2); + } else if (length < 1001) { + return prepend(v, 3); + } else if(length < 10001){ + return prepend(v, 4); + } else { + return prepend(v, 5); + } + } + + private static String prepend(String v, int length) { + while(v.length() < length) { + v="0"+v; + } + + return v; + } + /** Return the stream sort - ie, the order in which records are returned */ public StreamComparator getStreamSort(){ return null; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java index 43ba73732e88..74d0bbc175c7 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java @@ -207,23 +207,6 @@ public String getParamSubstitute(String param) { POST, null, "set-obj-property", null), - - ADD_PACKAGE(CLUSTER_CMD, - POST,null, - "add-package",null ), - UPDATE_PACKAGE(CLUSTER_CMD, - POST,null, - "update-package",null ), - DELETE_RUNTIME_LIB(CLUSTER_CMD, - POST,null, - "delete-package",null ), - ADD_REQ_HANDLER(CLUSTER_CMD, - POST,null, - "add-requesthandler",null ), - DELETE_REQ_HANDLER(CLUSTER_CMD, - POST,null, - "delete-requesthandler",null ), - UTILIZE_NODE(CLUSTER_CMD, POST, UTILIZENODE, diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java index ea7c2fd2c70f..c088d45b8e4a 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java @@ -55,9 +55,6 @@ public TermsFacetMap setBucketOffset(int numToSkip) { * Defaults to 10 if not specified. */ public TermsFacetMap setLimit(int maximumBuckets) { - if (maximumBuckets < 0) { - throw new IllegalArgumentException("Parameter 'maximumBuckets' must be non-negative"); - } put("limit", maximumBuckets); return this; } @@ -147,8 +144,8 @@ public TermsFacetMap setOverRefine(int numExtraBuckets) { * Defaults to 1 if not specified. */ public TermsFacetMap setMinCount(int minCount) { - if (minCount < 1) { - throw new IllegalArgumentException("Parameter 'minCount' must be a positive integer"); + if (minCount < 0) { + throw new IllegalArgumentException("Parameter 'minCount' must be a non-negative integer"); } put("mincount", minCount); return this; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java index b700c2cab863..e0ee895c326d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java @@ -17,6 +17,7 @@ package org.apache.solr.client.solrj.response.json; +import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -36,6 +37,7 @@ public class NestableJsonFacet { private final Map queryFacetsByName; private final Map bucketBasedFacetByName; private final Map statFacetsByName; + private final Map statsByName; private final Map heatmapFacetsByName; public NestableJsonFacet(NamedList facetNL) { @@ -43,15 +45,19 @@ public NestableJsonFacet(NamedList facetNL) { bucketBasedFacetByName = new HashMap<>(); statFacetsByName = new HashMap<>(); heatmapFacetsByName = new HashMap<>(); + statsByName = new HashMap<>(); for (Map.Entry entry : facetNL) { final String key = entry.getKey(); if (getKeysToSkip().contains(key)) { continue; } else if ("count".equals(key)) { - domainCount = (int) entry.getValue(); + domainCount = ((Number) entry.getValue()).longValue(); } else if(entry.getValue() instanceof Number) { // Stat/agg facet value statFacetsByName.put(key, (Number)entry.getValue()); + statsByName.put(key, (Number) entry.getValue()); + } else if (entry.getValue() instanceof String || entry.getValue() instanceof Date) { + statsByName.put(key, entry.getValue()); } else if(entry.getValue() instanceof NamedList) { // Either heatmap/query/range/terms facet final NamedList facet = (NamedList) entry.getValue(); final boolean isBucketBased = facet.get("buckets") != null; @@ -104,18 +110,34 @@ public Set getBucketBasedFacetNames() { /** * Retrieve the value for a stat or agg facet with the provided name + * @deprecated this method works only for numeric value stats, instead use {@link #getStatValue(String)} */ public Number getStatFacetValue(String name) { return statFacetsByName.get(name); } + /** + * Retrieve the value for a stat or agg with the provided name + */ + public Object getStatValue(String name) { + return statsByName.get(name); + } + /** * @return the names of any stat or agg facets that are direct descendants of this facet + * @deprecated this method returns only stats names with numeric value, instead use {@link #getStatNames()} */ public Set getStatFacetNames() { return statFacetsByName.keySet(); } + /** + * @return the names of any stat or agg that are direct descendants of this facet + */ + public Set getStatNames() { + return statsByName.keySet(); + } + /** * Retrieve a "heatmap" facet by its name */ diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java index fa35e8885770..96e53718f9c8 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java @@ -89,18 +89,14 @@ public T getClusterProperty(List key, T defaultValue) throws IOExcep return value; } - public Map getClusterProperties() throws IOException { - return getClusterProperties(new Stat()); - - } /** * Return the cluster properties * @throws IOException if there is an error reading properties from the cluster */ @SuppressWarnings("unchecked") - public Map getClusterProperties(Stat stat) throws IOException { + public Map getClusterProperties() throws IOException { try { - Map properties = (Map) Utils.fromJSON(client.getData(ZkStateReader.CLUSTER_PROPS, null, stat, true)); + Map properties = (Map) Utils.fromJSON(client.getData(ZkStateReader.CLUSTER_PROPS, null, new Stat(), true)); return convertCollectionDefaultsToNestedFormat(properties); } catch (KeeperException.NoNodeException e) { return Collections.emptyMap(); @@ -109,12 +105,6 @@ public Map getClusterProperties(Stat stat) throws IOException { } } - /**This applies the new map over the existing map. it's a merge operation, not an overwrite - * This applies the changes atomically over an existing object tree even if multiple nodes are - * trying to update this simultaneously - * - * @param properties The partial Object tree that needs to be applied - */ public void setClusterProperties(Map properties) throws IOException, KeeperException, InterruptedException { client.atomicUpdate(ZkStateReader.CLUSTER_PROPS, zkData -> { if (zkData == null) return Utils.toJSON(convertCollectionDefaultsToNestedFormat(properties)); diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java index 3c955f169d42..ecd33857933b 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java @@ -200,7 +200,6 @@ public class ZkStateReader implements SolrCloseable { private final ConcurrentHashMap collectionPropsWatchers = new ConcurrentHashMap<>(); private volatile SortedSet liveNodes = emptySortedSet(); - private volatile int clusterPropsVersion = -1; private volatile Map clusterProperties = Collections.emptyMap(); @@ -500,20 +499,40 @@ public Integer compareStateVersions(String coll, int version) { return collection.getZNodeVersion(); } - private final Watcher clusterPropertiesWatcher = event -> { - // session events are not change events, and do not remove the watcher - if (Watcher.Event.EventType.None.equals(event.getType())) { - return; + public synchronized void createClusterStateWatchersAndUpdate() throws KeeperException, + InterruptedException { + // We need to fetch the current cluster state and the set of live nodes + + log.debug("Updating cluster state from ZooKeeper... "); + + // Sanity check ZK structure. + if (!zkClient.exists(CLUSTER_STATE, true)) { + throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, + "Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready"); } + + // on reconnect of SolrZkClient force refresh and re-add watches. loadClusterProperties(); - }; + refreshLiveNodes(new LiveNodeWatcher()); + refreshLegacyClusterState(new LegacyClusterStateWatcher()); + refreshStateFormat2Collections(); + refreshCollectionList(new CollectionsChildWatcher()); + refreshAliases(aliasesManager); - public void forceRefreshClusterProps(int expectedVersion) { - log.debug("Expected version of clusterprops.json is {} , my version is {}", expectedVersion, clusterPropsVersion); - if (expectedVersion > clusterPropsVersion) { - log.info("reloading clusterprops.json"); - loadClusterProperties(); + if (securityNodeListener != null) { + addSecurityNodeWatcher(pair -> { + ConfigData cd = new ConfigData(); + cd.data = pair.first() == null || pair.first().length == 0 ? EMPTY_MAP : Utils.getDeepCopy((Map) fromJSON(pair.first()), 4, false); + cd.version = pair.second() == null ? -1 : pair.second().getVersion(); + securityData = cd; + securityNodeListener.run(); + }); + securityData = getSecurityProps(true); } + + collectionPropsObservers.forEach((k, v) -> { + collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true); + }); } private void addSecurityNodeWatcher(final Callable> callback) @@ -1089,52 +1108,22 @@ public Map getClusterProperties() { return Collections.unmodifiableMap(clusterProperties); } - public synchronized void createClusterStateWatchersAndUpdate() throws KeeperException, - InterruptedException { - // We need to fetch the current cluster state and the set of live nodes - - log.debug("Updating cluster state from ZooKeeper... "); - - // Sanity check ZK structure. - if (!zkClient.exists(CLUSTER_STATE, true)) { - throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, - "Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready"); + private final Watcher clusterPropertiesWatcher = event -> { + // session events are not change events, and do not remove the watcher + if (Watcher.Event.EventType.None.equals(event.getType())) { + return; } - - // on reconnect of SolrZkClient force refresh and re-add watches. loadClusterProperties(); - refreshLiveNodes(new LiveNodeWatcher()); - refreshLegacyClusterState(new LegacyClusterStateWatcher()); - refreshStateFormat2Collections(); - refreshCollectionList(new CollectionsChildWatcher()); - refreshAliases(aliasesManager); - - if (securityNodeListener != null) { - addSecurityNodeWatcher(pair -> { - ConfigData cd = new ConfigData(); - cd.data = pair.first() == null || pair.first().length == 0 ? EMPTY_MAP : Utils.getDeepCopy((Map) fromJSON(pair.first()), 4, false); - cd.version = pair.second() == null ? -1 : pair.second().getVersion(); - securityData = cd; - securityNodeListener.run(); - }); - securityData = getSecurityProps(true); - } - - collectionPropsObservers.forEach((k, v) -> { - collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true); - }); - } + }; @SuppressWarnings("unchecked") private void loadClusterProperties() { try { while (true) { try { - Stat stat = new Stat(); - byte[] data = zkClient.getData(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, stat, true); + byte[] data = zkClient.getData(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, new Stat(), true); this.clusterProperties = ClusterProperties.convertCollectionDefaultsToNestedFormat((Map) Utils.fromJSON(data)); - this.clusterPropsVersion = stat.getVersion(); - log.debug("Loaded cluster properties: {} to version {}", this.clusterProperties, clusterPropsVersion); + log.debug("Loaded cluster properties: {}", this.clusterProperties); for (ClusterPropertiesListener listener : clusterPropertiesListeners) { listener.onChange(getClusterProperties()); @@ -1142,7 +1131,6 @@ private void loadClusterProperties() { return; } catch (KeeperException.NoNodeException e) { this.clusterProperties = Collections.emptyMap(); - this.clusterPropsVersion = -1; log.debug("Loaded empty cluster properties"); // set an exists watch, and if the node has been created since the last call, // read the data again @@ -1155,10 +1143,6 @@ private void loadClusterProperties() { } } - public int getClusterPropsVersion() { - return clusterPropsVersion; - } - /** * Get collection properties for a given collection. If the collection is watched, simply return it from the cache, * otherwise fetch it directly from zookeeper. This is a convenience for {@code getCollectionProperties(collection,0)} diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java index 4ce1f5084e71..39a02428161a 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java @@ -296,7 +296,5 @@ public static EchoParamStyle get( String v ) { String JAVABIN_MIME = "application/javabin"; - String PACKAGE = "package"; - } diff --git a/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java b/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java index a2f1563725ed..088882a3f8a3 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java @@ -41,7 +41,10 @@ public interface ShardParams { /** The requested URL for this shard */ String SHARD_URL = "shard.url"; - + + /** The requested shard name */ + String SHARD_NAME = "shard.name"; + /** The Request Handler for shard requests */ String SHARDS_QT = "shards.qt"; diff --git a/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java b/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java index 3804f784dcd8..277324affda8 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java @@ -28,7 +28,6 @@ import java.util.Map; import java.util.Set; -import org.apache.solr.common.MapWriter; import org.apache.solr.common.SolrException; import org.noggit.JSONParser; import org.noggit.ObjectBuilder; @@ -39,7 +38,7 @@ import static org.apache.solr.common.util.StrUtils.formatString; import static org.apache.solr.common.util.Utils.toJSON; -public class CommandOperation implements MapWriter { +public class CommandOperation { public final String name; private Object commandData;//this is most often a map private List errors = new ArrayList<>(); @@ -387,10 +386,4 @@ public Integer getInt(String name) { if (o == null) return null; return getInt(name, null); } - - @Override - public void writeMap(EntryWriter ew) throws IOException { - ew.put(name, commandData); - ew.putIfNotNull("errors", errors); - } } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/PathTrie.java b/solr/solrj/src/java/org/apache/solr/common/util/PathTrie.java index 1d64834f362e..742c59dadea2 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/PathTrie.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/PathTrie.java @@ -26,24 +26,25 @@ import static java.util.Collections.emptyList; -/**A utility class to efficiently parse/store/lookup hierarchical paths which are templatized +/** + * A utility class to efficiently parse/store/lookup hierarchical paths which are templatized * like /collections/{collection}/shards/{shard}/{replica} */ public class PathTrie { private final Set reserved = new HashSet<>(); Node root = new Node(emptyList(), null); - public PathTrie() { } + public PathTrie() { + } public PathTrie(Set reserved) { this.reserved.addAll(reserved); } - public void insert(String path, Map replacements, T o) { List parts = getPathSegments(path); - insert(parts,replacements, o); + insert(parts, replacements, o); } public void insert(List parts, Map replacements, T o) { @@ -122,6 +123,9 @@ class Node { private synchronized void insert(List path, T o) { String part = path.get(0); Node matchedChild = null; + if ("*".equals(name)) { + return; + } if (children == null) children = new ConcurrentHashMap<>(); String varName = templateName(part); @@ -169,9 +173,8 @@ public T lookup(List pieces, int i, Map templateValues) } /** - * - * @param pathSegments pieces in the url /a/b/c has pieces as 'a' , 'b' , 'c' - * @param index current index of the pieces that we are looking at in /a/b/c 0='a' and 1='b' + * @param pathSegments pieces in the url /a/b/c has pieces as 'a' , 'b' , 'c' + * @param index current index of the pieces that we are looking at in /a/b/c 0='a' and 1='b' * @param templateVariables The mapping of template variable to its value * @param availableSubPaths If not null , available sub paths will be returned in this set */ @@ -179,13 +182,36 @@ public T lookup(List pathSegments, int index, Map templa if (templateName != null) templateVariables.put(templateName, pathSegments.get(index - 1)); if (pathSegments.size() < index + 1) { findAvailableChildren("", availableSubPaths); + if (obj == null) {//this is not a leaf node + Node n = children.get("*"); + if (n != null) { + return n.obj; + } + + } return obj; } String piece = pathSegments.get(index); - if (children == null) return null; + if (children == null) { + return null; + } Node n = children.get(piece); if (n == null && !reserved.contains(piece)) n = children.get(""); - if (n == null) return null; + if (n == null) { + n = children.get("*"); + if (n != null) { + StringBuffer sb = new StringBuffer(); + for (int i = index; i < pathSegments.size(); i++) { + sb.append("/").append(pathSegments.get(i)); + } + templateVariables.put("*", sb.toString()); + return n.obj; + + } + } + if (n == null) { + return null; + } return n.lookup(pathSegments, index + 1, templateVariables, availableSubPaths); } } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java b/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java index 9a68c3bacefd..c0b19f57f7a6 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java @@ -40,15 +40,6 @@ public static List splitSmart(String s, char separator) { } - static final String DELIM_CHARS = "/:;.,%#"; - public static List split(String s, char sep){ - if(DELIM_CHARS.indexOf(s.charAt(0)) >-1){ - sep = s.charAt(0); - } - return splitSmart(s,sep, true); - - } - public static List splitSmart(String s, char separator, boolean trimEmpty) { List l = splitSmart(s, separator); if(trimEmpty){ @@ -157,7 +148,7 @@ public static List splitSmart(String s, String separator, boolean decode */ public static List splitFileNames(String fileNames) { if (fileNames == null) - return Collections.emptyList(); + return Collections.emptyList(); List result = new ArrayList<>(); for (String file : fileNames.split("(? Collections.synchronizedList(new ArrayList<>()); public static final Function NEW_HASHSET_FUN = o -> new HashSet<>(); private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + public static Map getDeepCopy(Map map, int maxDepth) { return getDeepCopy(map, maxDepth, true, false); } @@ -100,18 +100,21 @@ public static Map getDeepCopy(Map map, int maxDepth, boolean mutable) { return getDeepCopy(map, maxDepth, mutable, false); } - public static final Function MAPWRITEROBJBUILDER = jsonParser -> { - try { - return new ObjectBuilder(jsonParser) { - @Override - public Object newObject() { - return new LinkedHashMapWriter(); - } - }; - } catch (IOException e) { - throw new RuntimeException(e); + public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) { + if(map == null) return null; + if (maxDepth < 1) return map; + Map copy; + if (sorted) { + copy = new TreeMap(); + } else { + copy = map instanceof LinkedHashMap? new LinkedHashMap(map.size()): new HashMap(map.size()); } - }; + for (Object o : map.entrySet()) { + Map.Entry e = (Map.Entry) o; + copy.put(e.getKey(), makeDeepCopy(e.getValue(),maxDepth, mutable, sorted)); + } + return mutable ? copy : Collections.unmodifiableMap(copy); + } public static void forEachMapEntry(Object o, String path, BiConsumer fun) { Object val = Utils.getObjectByPath(o, false, path); @@ -141,40 +144,6 @@ public MapWriter.EntryWriter put(CharSequence k, Object v) { ((Map) o).forEach((k, v) -> fun.accept(k, v)); } } - public static final Function MAPOBJBUILDER = jsonParser -> { - try { - return new ObjectBuilder(jsonParser) { - @Override - public Object newObject() { - return new HashMap(); - } - }; - } catch (IOException e) { - throw new RuntimeException(e); - } - }; - public static final Pattern ARRAY_ELEMENT_INDEX = Pattern - .compile("(\\S*?)\\[([-]?\\d+)\\]"); - - public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) { - if (map == null) return null; - if (maxDepth < 1) return map; - Map copy; - if (sorted) { - copy = new TreeMap(); - } else { - copy = map instanceof LinkedHashMap ? new LinkedHashMap(map.size()) : new HashMap(map.size()); - } - for (Object o : map.entrySet()) { - Map.Entry e = (Map.Entry) o; - copy.put(e.getKey(), makeDeepCopy(e.getValue(), maxDepth, mutable, sorted)); - } - return mutable ? copy : Collections.unmodifiableMap(copy); - } - - public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) { - return getDeepCopy(c, maxDepth, mutable, false); - } private static Object makeDeepCopy(Object v, int maxDepth, boolean mutable, boolean sorted) { if (v instanceof MapWriter && maxDepth > 1) { @@ -182,7 +151,7 @@ private static Object makeDeepCopy(Object v, int maxDepth, boolean mutable, bool } else if (v instanceof IteratorWriter && maxDepth > 1) { v = ((IteratorWriter) v).toList(new ArrayList<>()); if (sorted) { - Collections.sort((List) v); + Collections.sort((List)v); } } @@ -194,6 +163,29 @@ private static Object makeDeepCopy(Object v, int maxDepth, boolean mutable, bool return v; } + public static InputStream toJavabin(Object o) throws IOException { + try (final JavaBinCodec jbc = new JavaBinCodec()) { + BinaryRequestWriter.BAOS baos = new BinaryRequestWriter.BAOS(); + jbc.marshal(o,baos); + return new ByteBufferInputStream(ByteBuffer.wrap(baos.getbuf(),0,baos.size())); + } + } + + public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) { + return getDeepCopy(c, maxDepth, mutable, false); + } + + public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable, boolean sorted) { + if (c == null || maxDepth < 1) return c; + Collection result = c instanceof Set ? + ( sorted? new TreeSet() : new HashSet()) : new ArrayList(); + for (Object o : c) result.add(makeDeepCopy(o, maxDepth, mutable, sorted)); + if (sorted && (result instanceof List)) { + Collections.sort((List)result); + } + return mutable ? result : result instanceof Set ? unmodifiableSet((Set) result) : unmodifiableList((List) result); + } + public static void writeJson(Object o, OutputStream os, boolean indent) throws IOException { writeJson(o, new OutputStreamWriter(os, UTF_8), indent) .flush(); @@ -207,14 +199,37 @@ public static Writer writeJson(Object o, Writer writer, boolean indent) throws I return writer; } - public static InputStream toJavabin(Object o) throws IOException { - try (final JavaBinCodec jbc = new JavaBinCodec()) { - BinaryRequestWriter.BAOS baos = new BinaryRequestWriter.BAOS(); - jbc.marshal(o, baos); - return new ByteBufferInputStream(ByteBuffer.wrap(baos.getbuf(), 0, baos.size())); + private static class MapWriterJSONWriter extends JSONWriter { + + public MapWriterJSONWriter(CharArr out, int indentSize) { + super(out, indentSize); + } + + @Override + public void handleUnknownClass(Object o) { + if (o instanceof MapWriter) { + Map m = ((MapWriter)o).toMap(new LinkedHashMap<>()); + write(m); + } else { + super.handleUnknownClass(o); + } } } + public static byte[] toJSON(Object o) { + if(o == null) return new byte[0]; + CharArr out = new CharArr(); + if (!(o instanceof List) && !(o instanceof Map)) { + if (o instanceof MapWriter) { + o = ((MapWriter)o).toMap(new LinkedHashMap<>()); + } else if(o instanceof IteratorWriter){ + o = ((IteratorWriter)o).toList(new ArrayList<>()); + } + } + new MapWriterJSONWriter(out, 2).write(o); // indentation by default + return toUTF8(out); + } + public static String toJSONString(Object o) { return new String(toJSON(o), StandardCharsets.UTF_8); } @@ -259,29 +274,15 @@ public static Map makeMap(boolean skipNulls, Object... keyVals) return propMap; } - public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable, boolean sorted) { - if (c == null || maxDepth < 1) return c; - Collection result = c instanceof Set ? - (sorted ? new TreeSet() : new HashSet()) : new ArrayList(); - for (Object o : c) result.add(makeDeepCopy(o, maxDepth, mutable, sorted)); - if (sorted && (result instanceof List)) { - Collections.sort((List) result); - } - return mutable ? result : result instanceof Set ? unmodifiableSet((Set) result) : unmodifiableList((List) result); + public static Object fromJSON(InputStream is){ + return fromJSON(new InputStreamReader(is, UTF_8)); } - - public static byte[] toJSON(Object o) { - if (o == null) return new byte[0]; - CharArr out = new CharArr(); - if (!(o instanceof List) && !(o instanceof Map)) { - if (o instanceof MapWriter) { - o = ((MapWriter) o).toMap(new LinkedHashMap<>()); - } else if (o instanceof IteratorWriter) { - o = ((IteratorWriter) o).toList(new ArrayList<>()); - } + public static Object fromJSON(Reader is){ + try { + return STANDARDOBJBUILDER.apply(getJSONParser(is)).getVal(); + } catch (IOException e) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e); } - new MapWriterJSONWriter(out, 2).write(o); // indentation by default - return toUTF8(out); } @@ -292,14 +293,35 @@ public static byte[] toJSON(Object o) { throw new RuntimeException(e); } }; + public static final Function MAPWRITEROBJBUILDER = jsonParser -> { + try { + return new ObjectBuilder(jsonParser){ + @Override + public Object newObject() { + return new LinkedHashMapWriter(); + } + }; + } catch (IOException e) { + throw new RuntimeException(e); + } + }; - public static Object fromJSON(InputStream is) { - return fromJSON(new InputStreamReader(is, UTF_8)); - } + public static final Function MAPOBJBUILDER = jsonParser -> { + try { + return new ObjectBuilder(jsonParser){ + @Override + public Object newObject() { + return new HashMap(); + } + }; + } catch (IOException e) { + throw new RuntimeException(e); + } + }; - public static Object fromJSON(Reader is) { + public static Object fromJSON(InputStream is, Function objBuilderProvider) { try { - return STANDARDOBJBUILDER.apply(getJSONParser(is)).getVal(); + return objBuilderProvider.apply(getJSONParser((new InputStreamReader(is, StandardCharsets.UTF_8)))).getVal(); } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e); } @@ -314,19 +336,10 @@ public static Object fromJSONResource(String resourceName) { return fromJSON(stream); } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Resource error: " + e.getMessage(), e); + "Resource error: " + e.getMessage(), e); } } - - public static Object fromJSON(InputStream is, Function objBuilderProvider) { - try { - return objBuilderProvider.apply(getJSONParser((new InputStreamReader(is, StandardCharsets.UTF_8)))).getVal(); - } catch (IOException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e); - } - } - - public static JSONParser getJSONParser(Reader reader) { + public static JSONParser getJSONParser(Reader reader){ JSONParser parser = new JSONParser(reader); parser.setFlags(parser.getFlags() | JSONParser.ALLOW_MISSING_COLON_COMMA_BEFORE_OBJECT | @@ -334,11 +347,11 @@ public static JSONParser getJSONParser(Reader reader) { return parser; } - public static Object fromJSONString(String json) { + public static Object fromJSONString(String json) { try { return STANDARDOBJBUILDER.apply(getJSONParser(new StringReader(json))).getVal(); } catch (Exception e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error : " + json, e); + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error : "+ json, e ); } } @@ -350,10 +363,10 @@ public static Object getObjectByPath(Object root, boolean onlyPrimitive, String public static boolean setObjectByPath(Object root, String hierarchy, Object value) { List parts = StrUtils.splitSmart(hierarchy, '/', true); - return setObjectByPath(root, parts, value, true); + return setObjectByPath(root, parts, value); } - public static boolean setObjectByPath(Object root, List hierarchy, Object value, boolean insertMissing) { + public static boolean setObjectByPath(Object root, List hierarchy, Object value) { if (root == null) return false; if (!isMapLike(root)) throw new RuntimeException("must be a Map or NamedList"); Object obj = root; @@ -369,10 +382,7 @@ public static boolean setObjectByPath(Object root, List hierarchy, Objec } if (i < hierarchy.size() - 1) { Object o = getVal(obj, s, -1); - if (o == null) { - if (insertMissing) insertItem(o = new LinkedHashMap<>(), obj, s); - else return false; - } + if (o == null) return false; if (idx > -1) { List l = (List) o; o = idx < l.size() ? l.get(idx) : null; @@ -381,7 +391,14 @@ public static boolean setObjectByPath(Object root, List hierarchy, Objec obj = o; } else { if (idx == -2) { - insertItem(value, obj, s); + if (obj instanceof NamedList) { + NamedList namedList = (NamedList) obj; + int location = namedList.indexOf(s, 0); + if (location == -1) namedList.add(s, value); + else namedList.setVal(location, value); + } else if (obj instanceof Map) { + ((Map) obj).put(s, value); + } return true; } else { Object v = getVal(obj, s, -1); @@ -405,20 +422,10 @@ public static boolean setObjectByPath(Object root, List hierarchy, Objec } - private static void insertItem(Object value, Object container, String name) { - if (container instanceof NamedList) { - NamedList namedList = (NamedList) container; - int location = namedList.indexOf(name, 0); - if (location == -1) namedList.add(name, value); - else namedList.setVal(location, value); - } else if (container instanceof Map) { - ((Map) container).put(name, value); - } - } public static Object getObjectByPath(Object root, boolean onlyPrimitive, List hierarchy) { - if (root == null) return null; - if (!isMapLike(root)) return null; + if(root == null) return null; + if(!isMapLike(root)) return null; Object obj = root; for (int i = 0; i < hierarchy.size(); i++) { int idx = -1; @@ -511,7 +518,6 @@ private static Object getVal(Object obj, String key, int idx) { try { ((MapWriter) obj).writeMap(new MapWriter.EntryWriter() { int count = -1; - @Override public MapWriter.EntryWriter put(CharSequence k, Object v) { if (result[0] != null) return this; @@ -527,14 +533,15 @@ public MapWriter.EntryWriter put(CharSequence k, Object v) { throw new RuntimeException(e); } return result[0]; - } else if (obj instanceof Map) return ((Map) obj).get(key); + } + else if (obj instanceof Map) return ((Map) obj).get(key); else throw new RuntimeException("must be a NamedList or Map"); } /** * If the passed entity has content, make sure it is fully * read and closed. - * + * * @param entity to consume or null */ public static void consumeFully(HttpEntity entity) { @@ -555,21 +562,31 @@ public static void consumeFully(HttpEntity entity) { /** * Make sure the InputStream is fully read. - * + * * @param is to read * @throws IOException on problem with IO */ private static void readFully(InputStream is) throws IOException { is.skip(is.available()); - while (is.read() != -1) { + while (is.read() != -1) {} + } + + public static Map getJson(DistribStateManager distribStateManager, String path) throws InterruptedException, IOException, KeeperException { + VersionedData data = null; + try { + data = distribStateManager.getData(path); + } catch (KeeperException.NoNodeException | NoSuchElementException e) { + return Collections.emptyMap(); } + if (data == null || data.getData() == null || data.getData().length == 0) return Collections.emptyMap(); + return (Map) Utils.fromJSON(data.getData()); } /** * Assumes data in ZooKeeper is a JSON string, deserializes it and returns as a Map * - * @param zkClient the zookeeper client - * @param path the path to the znode being read + * @param zkClient the zookeeper client + * @param path the path to the znode being read * @param retryOnConnLoss whether to retry the operation automatically on connection loss, see {@link org.apache.solr.common.cloud.ZkCmdExecutor#retryOperation(ZkOperation)} * @return a Map if the node exists and contains valid JSON or an empty map if znode does not exist or has a null data */ @@ -585,23 +602,39 @@ public static Map getJson(SolrZkClient zkClient, String path, bo return Collections.emptyMap(); } - public static Map getJson(DistribStateManager distribStateManager, String path) throws InterruptedException, IOException, KeeperException { - VersionedData data = null; - try { - data = distribStateManager.getData(path); - } catch (KeeperException.NoNodeException | NoSuchElementException e) { - return Collections.emptyMap(); + public static final Pattern ARRAY_ELEMENT_INDEX = Pattern + .compile("(\\S*?)\\[([-]?\\d+)\\]"); + + public static SpecProvider getSpec(final String name) { + return () -> { + return ValidatingJsonMap.parse(CommonParams.APISPEC_LOCATION + name + ".json", CommonParams.APISPEC_LOCATION); + }; + } + + public static String parseMetricsReplicaName(String collectionName, String coreName) { + if (collectionName == null || !coreName.startsWith(collectionName)) { + return null; + } else { + // split "collection1_shard1_1_replica1" into parts + if (coreName.length() > collectionName.length()) { + String str = coreName.substring(collectionName.length() + 1); + int pos = str.lastIndexOf("_replica"); + if (pos == -1) { // ?? no _replicaN part ?? + return str; + } else { + return str.substring(pos + 1); + } + } else { + return null; + } } - if (data == null || data.getData() == null || data.getData().length == 0) return Collections.emptyMap(); - return (Map) Utils.fromJSON(data.getData()); } - /** - * Applies one json over other. The 'input' is applied over the sink - * The values in input are applied over the values in 'sink' . If a value is 'null' + /**Applies one json over other. The 'input' is applied over the sink + * The values in input isapplied over the values in 'sink' . If a value is 'null' * that value is removed from sink * - * @param sink the original json object to start with. Ensure that this Map is mutable + * @param sink the original json object to start with. Ensure that this Map is mutable * @param input the json with new values * @return whether there was any change made to sink or not. */ @@ -639,62 +672,20 @@ public static boolean mergeJson(Map sink, Map in return isModified; } - public static SpecProvider getSpec(final String name) { - return () -> { - return ValidatingJsonMap.parse(CommonParams.APISPEC_LOCATION + name + ".json", CommonParams.APISPEC_LOCATION); - }; - } - - public static String parseMetricsReplicaName(String collectionName, String coreName) { - if (collectionName == null || !coreName.startsWith(collectionName)) { - return null; - } else { - // split "collection1_shard1_1_replica1" into parts - if (coreName.length() > collectionName.length()) { - String str = coreName.substring(collectionName.length() + 1); - int pos = str.lastIndexOf("_replica"); - if (pos == -1) { // ?? no _replicaN part ?? - return str; - } else { - return str.substring(pos + 1); - } - } else { - return null; - } - } - } - public static String getBaseUrlForNodeName(final String nodeName, String urlScheme) { final int _offset = nodeName.indexOf("_"); if (_offset < 0) { throw new IllegalArgumentException("nodeName does not contain expected '_' separator: " + nodeName); } - final String hostAndPort = nodeName.substring(0, _offset); + final String hostAndPort = nodeName.substring(0,_offset); try { - final String path = URLDecoder.decode(nodeName.substring(1 + _offset), "UTF-8"); + final String path = URLDecoder.decode(nodeName.substring(1+_offset), "UTF-8"); return urlScheme + "://" + hostAndPort + (path.isEmpty() ? "" : ("/" + path)); } catch (UnsupportedEncodingException e) { throw new IllegalStateException("JVM Does not seem to support UTF-8", e); } } - private static class MapWriterJSONWriter extends JSONWriter { - - public MapWriterJSONWriter(CharArr out, int indentSize) { - super(out, indentSize); - } - - @Override - public void handleUnknownClass(Object o) { - if (o instanceof MapWriter) { - Map m = ((MapWriter) o).toMap(new LinkedHashMap<>()); - write(m); - } else { - super.handleUnknownClass(o); - } - } - } - public static long time(TimeSource timeSource, TimeUnit unit) { return unit.convert(timeSource.getTimeNs(), TimeUnit.NANOSECONDS); } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java b/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java index 28c001935df0..b5375365fead 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java @@ -31,6 +31,7 @@ import java.util.Map; import java.util.Set; +import org.apache.solr.common.NavigableObject; import org.apache.solr.common.SolrException; import org.noggit.JSONParser; import org.noggit.ObjectBuilder; @@ -39,7 +40,7 @@ import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableSet; -public class ValidatingJsonMap implements Map { +public class ValidatingJsonMap implements Map, NavigableObject { private static final String INCLUDE = "#include"; private static final String RESOURCE_EXTENSION = ".json"; diff --git a/solr/solrj/src/resources/apispec/cluster.Commands.json b/solr/solrj/src/resources/apispec/cluster.Commands.json index af7c19da2722..069cd1d3e8d6 100644 --- a/solr/solrj/src/resources/apispec/cluster.Commands.json +++ b/solr/solrj/src/resources/apispec/cluster.Commands.json @@ -169,47 +169,6 @@ "required": [ "name" ] - }, - "add-package": { - "documentation": "", - "description" : "Add a package to the classpath", - "#include": "cluster.Commands.runtimelib.properties" - }, - "update-package": { - "documentation": "", - "description" : "Update the jar details", - "#include": "cluster.Commands.runtimelib.properties" - }, - "delete-package": { - "documentation": "", - "description" : "delete a lib", - "type": "string" - }, - "add-requesthandler": { - "type": "object", - "documentation": "", - "description" : "Create a node level request handler", - "properties": { - "name": { - "type": "string", - "description": "Name of the request handler. This is the path" - }, - "class": { - "type": "string", - "description": "The class name" - }, - "package" : { - "type": "string", - "description": " The package from where the plugin can be loaded from" - } - }, - "required": ["name", "class"], - "additionalProperties": true - }, - "delete-requesthandler" : { - "description" : "delete a requesthandler", - "type": "string" } - } } diff --git a/solr/solrj/src/resources/apispec/cluster.Commands.runtimelib.properties.json b/solr/solrj/src/resources/apispec/cluster.Commands.runtimelib.properties.json deleted file mode 100644 index ab334b56c548..000000000000 --- a/solr/solrj/src/resources/apispec/cluster.Commands.runtimelib.properties.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "type": "object", - "properties": { - "name": { - "type": "string", - "description": "A name for the library" - }, - "url": { - "type": "string", - "description": "The remote url" - }, - "sha256": { - "type": "string", - "description": "The sha256 hash of the jar" - }, - "sig": { - "type": "string", - "description": "the signature of the jar" - } - }, - "required" : ["name","url","sha256"] - -} \ No newline at end of file diff --git a/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json b/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json index 6ee14981593c..731c3d857914 100644 --- a/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json +++ b/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json @@ -10,7 +10,7 @@ "description": "The request handler class. Class names do not need to be fully qualified if they are included with Solr, so you can abbreviate the name as 'solr.SearchHandler'. Custom or third-party class names may need to be fully qualified, however." }, "runtimeLib": { - "type": "string", + "type": "boolean", "description": "An optional parameter to use a custom .jar file that has been uploaded to Solr's blobstore. This additionally requires that the .jar has also been registered with the 'add-runtimelib' command, which is one of the available commands for the Config API." }, "startup": { diff --git a/solr/solrj/src/resources/apispec/core.config.Commands.generic.json b/solr/solrj/src/resources/apispec/core.config.Commands.generic.json index 2ebfdf83e23a..9d2b01d03a45 100644 --- a/solr/solrj/src/resources/apispec/core.config.Commands.generic.json +++ b/solr/solrj/src/resources/apispec/core.config.Commands.generic.json @@ -10,7 +10,7 @@ "description": "The configuration item class. Class names do not need to be fully qualified if they are included with Solr, so you can abbreviate the name as 'solr.SearchHandler'. Custom or third-party class names may need to be fully qualified, however." }, "runtimeLib": { - "type": "string", + "type": "boolean", "description": "An optional parameter to use a custom .jar file that has been uploaded to Solr's blobstore. This additionally requires that the .jar has also been registered with the 'add-runtimelib' command, which is one of the available commands for the Config API." } }, diff --git a/solr/solrj/src/resources/apispec/core.config.json b/solr/solrj/src/resources/apispec/core.config.json index 2324821e1954..81e7d54d6518 100644 --- a/solr/solrj/src/resources/apispec/core.config.json +++ b/solr/solrj/src/resources/apispec/core.config.json @@ -12,8 +12,7 @@ "/config/jmx", "/config/requestDispatcher", "/config/znodeVersion", - "/config/{plugin}", - "/config/{plugin}/{pluginName}" + "/config/{plugin}" ] } } diff --git a/solr/solrj/src/resources/apispec/node.blob.GET.json b/solr/solrj/src/resources/apispec/node.blob.GET.json deleted file mode 100644 index 273333e8414c..000000000000 --- a/solr/solrj/src/resources/apispec/node.blob.GET.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "methods": [ - "GET" - ], - "url": { - "paths": [ - "/node/blob", - "/node/blob/{sha256}" - ] - } -} diff --git a/solr/solrj/src/resources/apispec/node.ext.json b/solr/solrj/src/resources/apispec/node.ext.json deleted file mode 100644 index 161b2aa66130..000000000000 --- a/solr/solrj/src/resources/apispec/node.ext.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "methods": [ - "POST", - "GET", - "DELETE" - ], - "url": { - "paths": [ - "/node/ext/{handlerName}", - "/node/ext" - ] - } -} diff --git a/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java b/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java index 5a64c3f0f6e9..5d78d069a31e 100644 --- a/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java @@ -20,6 +20,7 @@ import java.io.File; import java.util.ArrayList; import java.util.Collection; +import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -34,10 +35,10 @@ import org.apache.solr.client.solrj.request.json.QueryFacetMap; import org.apache.solr.client.solrj.request.json.RangeFacetMap; import org.apache.solr.client.solrj.request.json.TermsFacetMap; -import org.apache.solr.client.solrj.response.json.BucketJsonFacet; -import org.apache.solr.client.solrj.response.json.NestableJsonFacet; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.UpdateResponse; +import org.apache.solr.client.solrj.response.json.BucketJsonFacet; +import org.apache.solr.client.solrj.response.json.NestableJsonFacet; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.params.ModifiableSolrParams; @@ -455,6 +456,7 @@ public void testStatFacet1() throws Exception { .setQuery("memory") .withFilter("inStock:true") .withStatFacet("avg_price", "avg(price)") + .withStatFacet("min_manufacturedate_dt", "min(manufacturedate_dt)") .withStatFacet("num_suppliers", "unique(manu_exact)") .withStatFacet("median_weight", "percentile(weight,50)"); QueryResponse queryResponse = request.process(solrClient, COLLECTION_NAME); @@ -464,9 +466,13 @@ public void testStatFacet1() throws Exception { assertEquals(4, queryResponse.getResults().getNumFound()); assertEquals(4, queryResponse.getResults().size()); final NestableJsonFacet topLevelFacetingData = queryResponse.getJsonFacetingResponse(); - assertEquals(146.66, (double) topLevelFacetingData.getStatFacetValue("avg_price"), 0.5); - assertEquals(3, topLevelFacetingData.getStatFacetValue("num_suppliers")); - assertEquals(352.0, (double) topLevelFacetingData.getStatFacetValue("median_weight"), 0.5); + assertEquals(146.66, (double) topLevelFacetingData.getStatValue("avg_price"), 0.5); + assertEquals(3, topLevelFacetingData.getStatValue("num_suppliers")); + assertEquals(352.0, (double) topLevelFacetingData.getStatValue("median_weight"), 0.5); + + Object val = topLevelFacetingData.getStatValue("min_manufacturedate_dt"); + assertTrue(val instanceof Date); + assertEquals("2006-02-13T15:26:37Z", ((Date)val).toInstant().toString()); } @Test @@ -478,6 +484,7 @@ public void testStatFacetSimple() throws Exception { .setQuery("*:*") .withFilter("price:[1.0 TO *]") .withFilter("popularity:[0 TO 10]") + .withStatFacet("min_manu_id_s", "min(manu_id_s)") .withStatFacet("avg_value", "avg(div(popularity,price))"); QueryResponse queryResponse = request.process(solrClient, COLLECTION_NAME); //end::solrj-json-metrics-facet-simple[] @@ -486,7 +493,10 @@ public void testStatFacetSimple() throws Exception { assertEquals(13, queryResponse.getResults().getNumFound()); assertEquals(10, queryResponse.getResults().size()); final NestableJsonFacet topLevelFacetingData = queryResponse.getJsonFacetingResponse(); - assertEquals(0.036, (double) topLevelFacetingData.getStatFacetValue("avg_value"), 0.1); + assertEquals(0.036, (double) topLevelFacetingData.getStatValue("avg_value"), 0.1); + Object val = topLevelFacetingData.getStatValue("min_manu_id_s"); + assertTrue(val instanceof String); + assertEquals("apple", val.toString()); } @Test @@ -511,7 +521,7 @@ public void testStatFacetExpanded() throws Exception { assertEquals(13, queryResponse.getResults().getNumFound()); assertEquals(10, queryResponse.getResults().size()); final NestableJsonFacet topLevelFacetingData = queryResponse.getJsonFacetingResponse(); - assertEquals(0.108, (double) topLevelFacetingData.getStatFacetValue("avg_value"), 0.1); + assertEquals(0.108, (double) topLevelFacetingData.getStatValue("avg_value"), 0.1); } @Test @@ -551,7 +561,7 @@ public void testQueryFacetExpanded() throws Exception { assertEquals(10, queryResponse.getResults().size()); final NestableJsonFacet topLevelFacetingData = queryResponse.getJsonFacetingResponse(); assertEquals(2, topLevelFacetingData.getQueryFacet("high_popularity").getCount()); - assertEquals(199.5, topLevelFacetingData.getQueryFacet("high_popularity").getStatFacetValue("average_price")); + assertEquals(199.5, topLevelFacetingData.getQueryFacet("high_popularity").getStatValue("average_price")); } @Test diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/GetByIdTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/GetByIdTest.java index 6085a08c5dd5..3078a0a0560c 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/GetByIdTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/GetByIdTest.java @@ -18,7 +18,7 @@ import java.util.Arrays; -import org.apache.solr.SolrJettyTestBase; +import org.apache.solr.EmbeddedSolrServerTestBase; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.params.CommonParams; @@ -27,13 +27,13 @@ import org.junit.BeforeClass; import org.junit.Test; -public class GetByIdTest extends SolrJettyTestBase { - +public class GetByIdTest extends EmbeddedSolrServerTestBase { + @BeforeClass public static void beforeClass() throws Exception { initCore(); } - + @Before @Override public void setUp() throws Exception { @@ -43,39 +43,39 @@ public void setUp() throws Exception { sdoc("id", "1", "term_s", "Microsoft", "term2_s", "MSFT"), sdoc("id", "2", "term_s", "Apple", "term2_s", "AAPL"), sdoc("id", "3", "term_s", "Yahoo", "term2_s", "YHOO"))); - + getSolrClient().commit(true, true); } - + @Test public void testGetId() throws Exception { SolrDocument rsp = getSolrClient().getById("0"); assertNull(rsp); - + rsp = getSolrClient().getById("1"); assertEquals("1", rsp.get("id")); assertEquals("Microsoft", rsp.get("term_s")); assertEquals("MSFT", rsp.get("term2_s")); - rsp = getSolrClient().getById("2"); + rsp = getSolrClient().getById("2"); assertEquals("2", rsp.get("id")); assertEquals("Apple", rsp.get("term_s")); assertEquals("AAPL", rsp.get("term2_s")); } - + @Test public void testGetIdWithParams() throws Exception { final SolrParams ID_FL_ONLY = params(CommonParams.FL, "id"); - + SolrDocument rsp = getSolrClient().getById("0", ID_FL_ONLY); assertNull(rsp); - + rsp = getSolrClient().getById("1", ID_FL_ONLY); assertEquals("1", rsp.get("id")); assertNull("This field should have been removed from the response.", rsp.get("term_s")); assertNull("This field should have been removed from the response.", rsp.get("term2_s")); - rsp = getSolrClient().getById("2", ID_FL_ONLY); + rsp = getSolrClient().getById("2", ID_FL_ONLY); assertEquals("2", rsp.get("id")); assertNull("This field should have been removed from the response.", rsp.get("term_s")); assertNull("This field should have been removed from the response.", rsp.get("term2_s")); @@ -88,25 +88,25 @@ public void testGetIds() throws Exception { assertEquals("1", rsp.get(0).get("id")); assertEquals("Microsoft", rsp.get(0).get("term_s")); assertEquals("MSFT", rsp.get(0).get("term2_s")); - + assertEquals("2", rsp.get(1).get("id")); assertEquals("Apple", rsp.get(1).get("term_s")); assertEquals("AAPL", rsp.get(1).get("term2_s")); - + assertEquals("3", rsp.get(2).get("id")); assertEquals("Yahoo", rsp.get(2).get("term_s")); assertEquals("YHOO", rsp.get(2).get("term2_s")); } - + @Test public void testGetIdsWithParams() throws Exception { SolrDocumentList rsp = getSolrClient().getById(Arrays.asList("0", "1", "2"), params(CommonParams.FL, "id")); assertEquals(2, rsp.getNumFound()); - + assertEquals("1", rsp.get(0).get("id")); assertNull("This field should have been removed from the response.", rsp.get(0).get("term_s")); assertNull("This field should have been removed from the response.", rsp.get(0).get("term2_s")); - + assertEquals("2", rsp.get(1).get("id")); assertNull("This field should have been removed from the response.", rsp.get(1).get("term_s")); assertNull("This field should have been removed from the response.", rsp.get(1).get("term2_s")); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java b/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java index 8f43c3314c5f..eb1dbc53c124 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java @@ -16,7 +16,12 @@ */ package org.apache.solr.client.solrj; -import org.apache.solr.SolrJettyTestBase; +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.List; + +import org.apache.solr.EmbeddedSolrServerTestBase; import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.UpdateResponse; @@ -25,16 +30,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.List; - /** * * @since solr 1.3 */ -public abstract class LargeVolumeTestBase extends SolrJettyTestBase +public abstract class LargeVolumeTestBase extends EmbeddedSolrServerTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeBinaryJettyTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeBinaryJettyTest.java index 5b5bd111b691..3323dd0958f2 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeBinaryJettyTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeBinaryJettyTest.java @@ -27,6 +27,6 @@ public class LargeVolumeBinaryJettyTest extends LargeVolumeTestBase { @BeforeClass public static void beforeTest() throws Exception { - createAndStartJetty(legacyExampleCollection1SolrHome()); + initCore(); } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeJettyTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeJettyTest.java index e7cb58f4d1f4..9c172da0a619 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeJettyTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/LargeVolumeJettyTest.java @@ -24,6 +24,6 @@ public class LargeVolumeJettyTest extends LargeVolumeTestBase { @BeforeClass public static void beforeTest() throws Exception { - createAndStartJetty(legacyExampleCollection1SolrHome()); + initCore(); } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/MergeIndexesEmbeddedTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/MergeIndexesEmbeddedTest.java index 6a1ab9bd516e..85279d7c8d31 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/MergeIndexesEmbeddedTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/MergeIndexesEmbeddedTest.java @@ -52,7 +52,7 @@ protected SolrClient getSolrCore(String name) { @Override protected SolrClient getSolrAdmin() { - return new EmbeddedSolrServer(cores, "core0"); + return new EmbeddedSolrServer(cores, null); } @Override diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleEmbeddedTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleEmbeddedTest.java index b4d89d4f015b..05d871710a6a 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleEmbeddedTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleEmbeddedTest.java @@ -29,6 +29,6 @@ public class SolrExampleEmbeddedTest extends SolrExampleTests { @BeforeClass public static void beforeTest() throws Exception { - initCore(); + createAndStartJetty(legacyExampleCollection1SolrHome()); } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java index 6972d96949af..30ddfc96ea4a 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java @@ -47,13 +47,9 @@ public class TestSolrProperties extends AbstractEmbeddedSolrServerTestCase { RuleChain.outerRule(new SystemPropertiesRestoreRule()); protected SolrClient getSolrAdmin() { - return new EmbeddedSolrServer(cores, "core0"); + return new EmbeddedSolrServer(cores, null); } - protected SolrClient getRenamedSolrAdmin() { - return new EmbeddedSolrServer(cores, "renamed_core"); - } - @Test public void testProperties() throws Exception { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java index 1c9ba04a5d17..a505799c6e3c 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java @@ -49,6 +49,7 @@ import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.RequestStatusState; import org.apache.solr.client.solrj.response.UpdateResponse; +import org.apache.solr.client.solrj.response.SolrPingResponse; import org.apache.solr.cloud.AbstractDistribZkTestBase; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.SolrDocument; @@ -951,4 +952,16 @@ private void queryWithPreferReplicaTypes(CloudSolrClient cloudClient, log.info("Shards giving the response: " + Arrays.toString(shardAddresses.toArray())); } + @Test + public void testPing() throws Exception { + final String testCollection = "ping_test"; + CollectionAdminRequest.createCollection(testCollection, "conf", 2, 1).process(cluster.getSolrClient()); + cluster.waitForActiveCollection(testCollection, 2, 2); + final SolrClient clientUnderTest = getRandomClient(); + + final SolrPingResponse response = clientUnderTest.ping(testCollection); + + assertEquals("This should be OK", 0, response.getStatus()); + } + } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java index 0435ed55f76c..2e427002c9d1 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java @@ -57,7 +57,7 @@ public class TestLang extends SolrTestCase { "triangularDistribution", "precision", "minMaxScale", "markovChain", "grandSum", "scalarAdd", "scalarSubtract", "scalarMultiply", "scalarDivide", "sumRows", "sumColumns", "diff", "corrPValues", "normalizeSum", "geometricDistribution", "olsRegress", - "derivative", "spline", "ttest", "pairedTtest", "multiVariateNormalDistribution", "integrate", + "derivative", "spline", "ttest", "pairedTtest", "multiVariateNormalDistribution", "integral", "density", "mannWhitney", "sumSq", "akima", "lerp", "chiSquareDataSet", "gtestDataSet", "termVectors", "getColumnLabels", "getRowLabels", "getAttribute", "kmeans", "getCentroids", "getCluster", "topFeatures", "featureSelect", "rowAt", "colAt", "setColumnLabels", @@ -77,7 +77,7 @@ public class TestLang extends SolrTestCase { "getSupportPoints", "pairSort", "log10", "plist", "recip", "pivot", "ltrim", "rtrim", "export", "zplot", "natural", "repeat", "movingMAD", "hashRollup", "noop", "var", "stddev", "recNum", "isNull", "notNull", "matches", "projectToBorder", "double", "long", "parseCSV", "parseTSV", "dateTime", - "split", "upper", "trim", "lower"}; + "split", "upper", "trim", "lower", "trunc", "cosine"}; @Test public void testLang() { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java index 890d0d33b33e..bc76c4ef5b29 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java @@ -229,6 +229,27 @@ public void testConcat() throws Exception { assertEquals(s2, "c-d-hello"); } + + @Test + public void testTrunc() throws Exception { + String expr = " select(list(tuple(field1=\"abcde\", field2=\"012345\")), trunc(field1, 2) as field3, trunc(field2, 4) as field4)"; + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); + paramsLoc.set("expr", expr); + paramsLoc.set("qt", "/stream"); + + String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS; + TupleStream solrStream = new SolrStream(url, paramsLoc); + + StreamContext context = new StreamContext(); + solrStream.setStreamContext(context); + List tuples = getTuples(solrStream); + assertEquals(tuples.size(), 1); + String s1 = tuples.get(0).getString("field3"); + assertEquals(s1, "ab"); + String s2 = tuples.get(0).getString("field4"); + assertEquals(s2, "0123"); + } + @Test public void testUpperLowerSingle() throws Exception { String expr = " select(list(tuple(field1=\"a\", field2=\"C\")), upper(field1) as field3, lower(field2) as field4)"; @@ -249,6 +270,28 @@ public void testUpperLowerSingle() throws Exception { assertEquals(s2, "c"); } + + @Test + public void testTruncArray() throws Exception { + String expr = " select(list(tuple(field1=array(\"aaaa\",\"bbbb\",\"cccc\"))), trunc(field1, 3) as field2)"; + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); + paramsLoc.set("expr", expr); + paramsLoc.set("qt", "/stream"); + + String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS; + TupleStream solrStream = new SolrStream(url, paramsLoc); + + StreamContext context = new StreamContext(); + solrStream.setStreamContext(context); + List tuples = getTuples(solrStream); + assertEquals(tuples.size(), 1); + List l1 = (List)tuples.get(0).get("field2"); + assertEquals(l1.get(0), "aaa"); + assertEquals(l1.get(1), "bbb"); + assertEquals(l1.get(2), "ccc"); + + } + @Test public void testUpperLowerArray() throws Exception { String expr = " select(list(tuple(field1=array(\"a\",\"b\",\"c\"), field2=array(\"X\",\"Y\",\"Z\"))), upper(field1) as field3, lower(field2) as field4)"; @@ -722,6 +765,27 @@ public void testCovariance() throws Exception { assertTrue(tuples.get(0).getDouble("cov").equals(-625.0D)); } + @Test + public void testCosineDistance() throws Exception { + String cexpr = "let(echo=true, " + + "a=array(1,2,3,4)," + + "b=array(10, 20, 30, 45), " + + "c=distance(a, b, cosine()), " + + ")"; + + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); + paramsLoc.set("expr", cexpr); + paramsLoc.set("qt", "/stream"); + String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS; + TupleStream solrStream = new SolrStream(url, paramsLoc); + StreamContext context = new StreamContext(); + solrStream.setStreamContext(context); + List tuples = getTuples(solrStream); + assertTrue(tuples.size() == 1); + Number d = (Number) tuples.get(0).get("c"); + assertEquals(d.doubleValue(), 0.0017046159, 0.0001); + } + @Test public void testDistance() throws Exception { String cexpr = "let(echo=true, " + @@ -1139,7 +1203,7 @@ public void testBinomialCoefficient() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); Tuple tuple = tuples.get(0); - long binomialCoefficient = (long) tuple.get("return-value"); + long binomialCoefficient = tuple.getLong("return-value"); assertEquals(binomialCoefficient, 56); } @@ -1498,58 +1562,24 @@ public void testMatrix() throws Exception { @Test public void testZplot() throws Exception { - String cexpr = "let(c=tuple(a=add(1,2), b=add(2,3))," + - " zplot(table=c))"; - ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); - paramsLoc.set("expr", cexpr); - paramsLoc.set("qt", "/stream"); String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS; - TupleStream solrStream = new SolrStream(url, paramsLoc); - StreamContext context = new StreamContext(); - solrStream.setStreamContext(context); - List tuples = getTuples(solrStream); - assertTrue(tuples.size() == 1); - Tuple out = tuples.get(0); - - assertEquals(out.getDouble("a").doubleValue(), 3.0, 0.0); - assertEquals(out.getDouble("b").doubleValue(), 5.0, 0.0); - - cexpr = "let(c=list(tuple(a=add(1,2), b=add(2,3)), tuple(a=add(1,3), b=add(2,4)))," + - " zplot(table=c))"; - - paramsLoc = new ModifiableSolrParams(); - paramsLoc.set("expr", cexpr); - paramsLoc.set("qt", "/stream"); - solrStream = new SolrStream(url, paramsLoc); - context = new StreamContext(); - solrStream.setStreamContext(context); - tuples = getTuples(solrStream); - assertTrue(tuples.size() == 2); - out = tuples.get(0); - assertEquals(out.getDouble("a").doubleValue(), 3.0, 0.0); - assertEquals(out.getDouble("b").doubleValue(), 5.0, 0.0); - out = tuples.get(1); - - assertEquals(out.getDouble("a").doubleValue(), 4.0, 0.0); - assertEquals(out.getDouble("b").doubleValue(), 6.0, 0.0); - - cexpr = "let(a=array(1,2,3,4)," + + String cexpr = "let(a=array(1,2,3,4)," + " b=array(10,11,12,13),"+ " zplot(x=a, y=b))"; - paramsLoc = new ModifiableSolrParams(); + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); paramsLoc.set("expr", cexpr); paramsLoc.set("qt", "/stream"); - solrStream = new SolrStream(url, paramsLoc); - context = new StreamContext(); + TupleStream solrStream = new SolrStream(url, paramsLoc); + StreamContext context = new StreamContext(); solrStream.setStreamContext(context); - tuples = getTuples(solrStream); + List tuples = getTuples(solrStream); assertTrue(tuples.size() == 4); - out = tuples.get(0); + Tuple out = tuples.get(0); assertEquals(out.getDouble("x").doubleValue(), 1.0, 0.0); assertEquals(out.getDouble("y").doubleValue(), 10.0, 0.0); @@ -1680,6 +1710,152 @@ public void testZplot() throws Exception { assertTrue(clusters.contains("cluster3")); assertTrue(clusters.contains("cluster4")); assertTrue(clusters.contains("cluster5")); + + cexpr = "let(a=matrix(array(0,1,2,3,4,5,6,7,8,9,10,11), array(10,11,12,13,14,15,16,17,18,19,20,21))," + + " zplot(heat=a))"; + + paramsLoc = new ModifiableSolrParams(); + paramsLoc.set("expr", cexpr); + paramsLoc.set("qt", "/stream"); + solrStream = new SolrStream(url, paramsLoc); + context = new StreamContext(); + solrStream.setStreamContext(context); + tuples = getTuples(solrStream); + assertTrue(tuples.size() == 24); + Tuple tuple = tuples.get(0); + String xLabel = tuple.getString("x"); + String yLabel = tuple.getString("y"); + Number z = tuple.getLong("z"); + + assertEquals(xLabel, "col00"); + assertEquals(yLabel, "row0"); + assertEquals(z.longValue(), 0L); + + tuple = tuples.get(1); + xLabel = tuple.getString("x"); + yLabel = tuple.getString("y"); + z = tuple.getLong("z"); + + assertEquals(xLabel, "col01"); + assertEquals(yLabel, "row0"); + assertEquals(z.longValue(), 1L); + + tuple = tuples.get(2); + xLabel = tuple.getString("x"); + yLabel = tuple.getString("y"); + z = tuple.getLong("z"); + + assertEquals(xLabel, "col02"); + assertEquals(yLabel, "row0"); + assertEquals(z.longValue(), 2L); + + tuple = tuples.get(12); + xLabel = tuple.getString("x"); + yLabel = tuple.getString("y"); + z = tuple.getLong("z"); + + assertEquals(xLabel, "col00"); + assertEquals(yLabel, "row1"); + assertEquals(z.longValue(), 10L); + + + cexpr = "let(a=transpose(matrix(array(0, 1, 2, 3, 4, 5, 6, 7,8,9,10,11), " + + " array(10,11,12,13,14,15,16,17,18,19,20,21)))," + + " zplot(heat=a))"; + + paramsLoc = new ModifiableSolrParams(); + paramsLoc.set("expr", cexpr); + paramsLoc.set("qt", "/stream"); + solrStream = new SolrStream(url, paramsLoc); + context = new StreamContext(); + solrStream.setStreamContext(context); + tuples = getTuples(solrStream); + assertTrue(tuples.size() == 24); + tuple = tuples.get(0); + xLabel = tuple.getString("x"); + yLabel = tuple.getString("y"); + z = tuple.getLong("z"); + + assertEquals(xLabel, "col0"); + assertEquals(yLabel, "row00"); + assertEquals(z.longValue(), 0L); + + tuple = tuples.get(1); + xLabel = tuple.getString("x"); + yLabel = tuple.getString("y"); + z = tuple.getLong("z"); + + assertEquals(xLabel, "col1"); + assertEquals(yLabel, "row00"); + assertEquals(z.longValue(), 10L); + + tuple = tuples.get(2); + xLabel = tuple.getString("x"); + yLabel = tuple.getString("y"); + z = tuple.getLong("z"); + + assertEquals(xLabel, "col0"); + assertEquals(yLabel, "row01"); + assertEquals(z.longValue(), 1L); + + tuple = tuples.get(12); + xLabel = tuple.getString("x"); + yLabel = tuple.getString("y"); + z = tuple.getLong("z"); + + assertEquals(xLabel, "col0"); + assertEquals(yLabel, "row06"); + assertEquals(z.longValue(), 6L); + + cexpr = "let(a=matrix(array(0, 1, 2, 3, 4, 5, 6, 7,8,9,10,11), " + + " array(10,11,12,13,14,15,16,17,18,19,20,21))," + + " b=setRowLabels(a, array(\"blah1\", \"blah2\")),"+ + " c=setColumnLabels(b, array(\"rah1\", \"rah2\", \"rah3\", \"rah4\", \"rah5\", \"rah6\", \"rah7\", \"rah8\", \"rah9\", \"rah10\", \"rah11\", \"rah12\")),"+ + " zplot(heat=c))"; + + paramsLoc = new ModifiableSolrParams(); + paramsLoc.set("expr", cexpr); + paramsLoc.set("qt", "/stream"); + solrStream = new SolrStream(url, paramsLoc); + context = new StreamContext(); + solrStream.setStreamContext(context); + tuples = getTuples(solrStream); + assertTrue(tuples.size() == 24); + tuple = tuples.get(0); + xLabel = tuple.getString("x"); + yLabel = tuple.getString("y"); + z = tuple.getLong("z"); + + assertEquals(xLabel, "rah1"); + assertEquals(yLabel, "blah1"); + assertEquals(z.longValue(), 0L); + + tuple = tuples.get(1); + xLabel = tuple.getString("x"); + yLabel = tuple.getString("y"); + z = tuple.getLong("z"); + + assertEquals(xLabel, "rah2"); + assertEquals(yLabel, "blah1"); + assertEquals(z.longValue(), 1L); + + tuple = tuples.get(2); + xLabel = tuple.getString("x"); + yLabel = tuple.getString("y"); + z = tuple.getLong("z"); + + assertEquals(xLabel, "rah3"); + assertEquals(yLabel, "blah1"); + assertEquals(z.longValue(), 2L); + + tuple = tuples.get(12); + xLabel = tuple.getString("x"); + yLabel = tuple.getString("y"); + z = tuple.getLong("z"); + + assertEquals(xLabel, "rah1"); + assertEquals(yLabel, "blah2"); + assertEquals(z.longValue(), 10L); } @@ -3343,9 +3519,27 @@ public void testCosineSimilarity() throws Exception { List tuples = getTuples(solrStream); assertTrue(tuples.size() == 1); Number cs = (Number)tuples.get(0).get("return-value"); - assertTrue(cs.doubleValue() == 0.9838197164968291); + assertEquals(cs.doubleValue(),0.9838197164968291, .00000001); } + @Test + public void testCosineSimilaritySort() throws Exception { + String cexpr = "sort(select(list(tuple(id=\"1\", f=array(1,2,3,4)), tuple(id=\"2\",f=array(10,2,3,4)))," + + " cosineSimilarity(f, array(1,2,3,4)) as sim, id)," + + " by=\"sim desc\")"; + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); + paramsLoc.set("expr", cexpr); + paramsLoc.set("qt", "/stream"); + String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS; + TupleStream solrStream = new SolrStream(url, paramsLoc); + StreamContext context = new StreamContext(); + solrStream.setStreamContext(context); + List tuples = getTuples(solrStream); + assertEquals(tuples.size(), 2); + assertEquals(tuples.get(0).getString("id"), "1"); + } + + @Test public void testPoissonDistribution() throws Exception { String cexpr = "let(a=poissonDistribution(100)," + @@ -4085,9 +4279,10 @@ public void testIntegrate() throws Exception { String cexpr = "let(echo=true, " + "a=sequence(50, 1, 0), " + "b=spline(a), " + - "c=integrate(b, 0, 49), " + - "d=integrate(b, 0, 20), " + - "e=integrate(b, 20, 49))"; + "c=integral(b, 0, 49), " + + "d=integral(b, 0, 20), " + + "e=integral(b, 20, 49)," + + "f=integral(b))"; ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); paramsLoc.set("expr", cexpr); paramsLoc.set("qt", "/stream"); @@ -4103,6 +4298,9 @@ public void testIntegrate() throws Exception { assertEquals(integral.doubleValue(), 20, 0.0); integral = (Number)tuples.get(0).get("e"); assertEquals(integral.doubleValue(), 29, 0.0); + List integrals = (List)tuples.get(0).get("f"); + assertEquals(integrals.size(), 50); + assertEquals(integrals.get(49).intValue(), 49); } @Test @@ -4313,7 +4511,8 @@ public void testOutliers() throws Exception { } - @Test + + @Test public void testLerp() throws Exception { String cexpr = "let(echo=true," + " a=array(0,1,2,3,4,5,6,7), " + @@ -5068,7 +5267,9 @@ public void testCorrMatrix() throws Exception { "f=corr(d), " + "g=corr(d, type=kendalls), " + "h=corr(d, type=spearmans)," + - "i=corrPValues(f))"; + "i=corrPValues(f)," + + " j=getRowLabels(f)," + + " k=getColumnLabels(f))"; ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); paramsLoc.set("expr", cexpr); paramsLoc.set("qt", "/stream"); @@ -5157,6 +5358,20 @@ public void testCorrMatrix() throws Exception { assertEquals(row3.get(0).doubleValue(), 0.28548201004998375, 0); assertEquals(row3.get(1).doubleValue(), 0.28548201004998375, 0); assertEquals(row3.get(2).doubleValue(), 0, 0); + + List rowLabels = (List)tuples.get(0).get("j"); + assertEquals(rowLabels.size(), 3); + assertEquals(rowLabels.get(0), "col0"); + assertEquals(rowLabels.get(1), "col1"); + assertEquals(rowLabels.get(2), "col2"); + + List colLabels = (List)tuples.get(0).get("k"); + assertEquals(colLabels.size(), 3); + assertEquals(colLabels.get(0), "col0"); + assertEquals(colLabels.get(1), "col1"); + assertEquals(colLabels.get(2), "col2"); + + } @Test @@ -5394,13 +5609,13 @@ public void testConvolution() throws Exception { assertTrue(tuples.size() == 1); List convolution = (List)(tuples.get(0)).get("conv"); assertTrue(convolution.size() == 7); - assertTrue(convolution.get(0).equals(20000L)); - assertTrue(convolution.get(1).equals(20000L)); - assertTrue(convolution.get(2).equals(25000L)); - assertTrue(convolution.get(3).equals(30000L)); - assertTrue(convolution.get(4).equals(15000L)); - assertTrue(convolution.get(5).equals(10000L)); - assertTrue(convolution.get(6).equals(5000L)); + assertTrue(convolution.get(0).equals(20000D)); + assertTrue(convolution.get(1).equals(20000D)); + assertTrue(convolution.get(2).equals(25000D)); + assertTrue(convolution.get(3).equals(30000D)); + assertTrue(convolution.get(4).equals(15000D)); + assertTrue(convolution.get(5).equals(10000D)); + assertTrue(convolution.get(6).equals(5000D)); } @Test @@ -5451,7 +5666,7 @@ public void testRegressAndPredict() throws Exception { double prediction = tuple.getDouble("p"); assertTrue(prediction == 600.0D); List predictions = (List)tuple.get("pl"); - assertList(predictions, 200L, 400L, 600L, 200L, 400L, 800L, 1200L); + assertList(predictions, 200D, 400D, 600D, 200D, 400D, 800D, 1200D); } @Test diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java index ee63a2cc5edf..9c6e345fc9c0 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java @@ -3085,6 +3085,46 @@ public void testCatStreamSingleFile() throws Exception { } } + @Test + public void testCatStreamEmptyFile() throws Exception { + final String catStream = "cat(\"topLevel-empty.txt\")"; + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); + paramsLoc.set("expr", catStream); + paramsLoc.set("qt", "/stream"); + String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+FILESTREAM_COLLECTION; + + SolrStream solrStream = new SolrStream(url, paramsLoc); + + StreamContext context = new StreamContext(); + solrStream.setStreamContext(context); + List tuples = getTuples(solrStream); + + assertEquals(0, tuples.size()); + } + + @Test + public void testCatStreamMultipleFilesOneEmpty() throws Exception { + final String catStream = "cat(\"topLevel1.txt,topLevel-empty.txt\")"; + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); + paramsLoc.set("expr", catStream); + paramsLoc.set("qt", "/stream"); + String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+FILESTREAM_COLLECTION; + + SolrStream solrStream = new SolrStream(url, paramsLoc); + + StreamContext context = new StreamContext(); + solrStream.setStreamContext(context); + List tuples = getTuples(solrStream); + + assertEquals(4, tuples.size()); + + for (int i = 0; i < 4; i++) { + Tuple t = tuples.get(i); + assertEquals("topLevel1.txt line " + String.valueOf(i+1), t.get("line")); + assertEquals("topLevel1.txt", t.get("file")); + } + } + @Test public void testCatStreamMaxLines() throws Exception { final String catStream = "cat(\"topLevel1.txt\", maxLines=2)"; @@ -3199,6 +3239,7 @@ private static String findUserFilesDataDir() { * dataDir * |- topLevel1.txt * |- topLevel2.txt + * |- topLevel-empty.txt * |- directory1 * |- secondLevel1.txt * |- secondLevel2.txt @@ -3213,10 +3254,12 @@ private static void populateFileStreamData(String dataDir) throws Exception { final File topLevel1 = new File(Paths.get(dataDir, "topLevel1.txt").toString()); final File topLevel2 = new File(Paths.get(dataDir, "topLevel2.txt").toString()); + final File topLevelEmpty = new File(Paths.get(dataDir, "topLevel-empty.txt").toString()); final File secondLevel1 = new File(Paths.get(dataDir, "directory1", "secondLevel1.txt").toString()); final File secondLevel2 = new File(Paths.get(dataDir, "directory1", "secondLevel2.txt").toString()); populateFileWithData(topLevel1); populateFileWithData(topLevel2); + topLevelEmpty.createNewFile(); populateFileWithData(secondLevel1); populateFileWithData(secondLevel2); } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AbsoluteValueEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AbsoluteValueEvaluatorTest.java index 7ce58adf1ada..c35ed293e0d4 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AbsoluteValueEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AbsoluteValueEvaluatorTest.java @@ -53,8 +53,7 @@ public void absoluteValueOneField() throws Exception{ values.clear(); values.put("a", 1); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(1L, result); + Assert.assertEquals(1D, result); values.clear(); values.put("a", 1.1); @@ -78,8 +77,7 @@ public void absoluteValueFromContext() throws Exception{ context.getLets().put("a", 1); result = evaluator.evaluate(new Tuple()); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(1L, result); + Assert.assertEquals(1D, result); context.getLets().put("a", 1.1); result = evaluator.evaluate(new Tuple()); @@ -93,8 +91,7 @@ public void absoluteValueFromContext() throws Exception{ context.getLets().put("a", factory.constructEvaluator("add(4,-6,34,-56)")); result = evaluator.evaluate(new Tuple()); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(24L, result); + Assert.assertEquals(24D, result); } @Test(expected = IOException.class) diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AddEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AddEvaluatorTest.java index 3251fd789996..ca094d597112 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AddEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AddEvaluatorTest.java @@ -50,8 +50,7 @@ public void addTwoFieldsWithValues() throws Exception{ values.put("a", 1); values.put("b", 2); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(3L, result); + Assert.assertEquals(3D, result); values.clear(); values.put("a", 1.1); @@ -134,8 +133,7 @@ public void addManyFieldsWithValues() throws Exception{ values.put("c", 3); values.put("d", 4); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(10L, result); + Assert.assertEquals(10D, result); values.clear(); values.put("a", 1.1); @@ -167,8 +165,7 @@ public void addManyFieldsWithSubAdds() throws Exception{ values.put("c", 3); values.put("d", 4); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(10L, result); + Assert.assertEquals(10D, result); values.clear(); values.put("a", 1.1); @@ -203,8 +200,7 @@ public void addManyFieldsWithSubAdds() throws Exception{ values.put("c", 123456789123456789L); values.put("d", 123456789123456789L); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(4 * 123456789123456789L, result); + Assert.assertEquals(4 * 123456789123456789D, result); } @Test @@ -218,8 +214,7 @@ public void addManyFieldsWithManySubAdds() throws Exception{ values.put("c", 3); values.put("d", 4); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(14L, result); + Assert.assertEquals(14D, result); values.clear(); values.put("a", 1.1); @@ -254,8 +249,7 @@ public void addManyFieldsWithManySubAdds() throws Exception{ values.put("c", 123456789123456789L); values.put("d", 123456789123456789L); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(6 * 123456789123456789L, result); + Assert.assertEquals(6 * 123456789123456789D, result); values.clear(); values.put("a", 4.12345678); @@ -278,8 +272,7 @@ public void addManyFieldsWithManySubAddsWithNegative() throws Exception{ values.put("c", 3); values.put("d", 4); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(10L, result); + Assert.assertEquals(10D, result); values.clear(); values.put("a", 1.1); @@ -314,8 +307,7 @@ public void addManyFieldsWithManySubAddsWithNegative() throws Exception{ values.put("c", 123456789123456789L); values.put("d", 123456789123456789L); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(4 * 123456789123456789L, result); + Assert.assertEquals(4 * 123456789123456789D, result); values.clear(); values.put("a", -4.12345678); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AppendEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AppendEvaluatorTest.java index a34d5cca34c7..f85ed3c94303 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AppendEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AppendEvaluatorTest.java @@ -49,11 +49,11 @@ public void multiField() throws Exception{ Object result; values.clear(); - values.put("a", 1L); + values.put("a", 1); values.put("b", Arrays.asList("foo","bar","baz")); result = evaluator.evaluate(new Tuple(values)); Assert.assertTrue(result instanceof List); - Assert.assertEquals(1L, ((List)result).get(0)); + Assert.assertEquals(1D, ((List)result).get(0)); Assert.assertEquals("foo", ((List)result).get(1)); Assert.assertEquals("bar", ((List)result).get(2)); Assert.assertEquals("baz", ((List)result).get(3)); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ArrayEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ArrayEvaluatorTest.java index 8b2b35f64a94..7d14d56df78a 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ArrayEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ArrayEvaluatorTest.java @@ -60,9 +60,9 @@ public void arrayLongSortAscTest() throws IOException{ Assert.assertTrue(result instanceof List); Assert.assertEquals(3, ((List)result).size()); - Assert.assertEquals(1L, ((List)result).get(0)); - Assert.assertEquals(2L, ((List)result).get(1)); - Assert.assertEquals(3L, ((List)result).get(2)); + Assert.assertEquals(1D, ((List)result).get(0)); + Assert.assertEquals(2D, ((List)result).get(1)); + Assert.assertEquals(3D, ((List)result).get(2)); } @Test @@ -81,9 +81,9 @@ public void arrayLongSortDescTest() throws IOException{ Assert.assertTrue(result instanceof List); Assert.assertEquals(3, ((List)result).size()); - Assert.assertEquals(3L, ((List)result).get(0)); - Assert.assertEquals(2L, ((List)result).get(1)); - Assert.assertEquals(1L, ((List)result).get(2)); + Assert.assertEquals(3D, ((List)result).get(0)); + Assert.assertEquals(2D, ((List)result).get(1)); + Assert.assertEquals(1D, ((List)result).get(2)); } @Test diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AscEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AscEvaluatorTest.java index bb64248ec44c..8029712a4b1f 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AscEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AscEvaluatorTest.java @@ -53,7 +53,7 @@ public void integerSortTest() throws Exception{ result = evaluator.evaluate(new Tuple(values)); Assert.assertTrue(result instanceof List); Assert.assertEquals(7, ((List)result).size()); - checkOrder(Arrays.asList(1L,2L,3L,4L,5L,7L,8L), (List)result); + checkOrder(Arrays.asList(1D,2D,3D,4D,5D,7D,8D), (List)result); } @Test @@ -79,7 +79,7 @@ public void doubleWithIntegersSortTest() throws Exception{ result = evaluator.evaluate(new Tuple(values)); Assert.assertTrue(result instanceof List); Assert.assertEquals(7, ((List)result).size()); - checkOrder(Arrays.asList(2L, 2.1, 2.3, 2.5, 2.6, 2.7, 3L), (List)result); + checkOrder(Arrays.asList(2D, 2.1, 2.3, 2.5, 2.6, 2.7, 3D), (List)result); } @Test diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CeilingEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CeilingEvaluatorTest.java index a4cf9d230dbb..50e8b8ea26c2 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CeilingEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CeilingEvaluatorTest.java @@ -50,20 +50,17 @@ public void ceilingOneField() throws Exception{ values.clear(); values.put("a", 1); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(1L, result); + Assert.assertEquals(1D, result); values.clear(); values.put("a", 1.1); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(2L, result); + Assert.assertEquals(2D, result); values.clear(); values.put("a", -1.1); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(-1L, result); + Assert.assertEquals(-1D, result); } @Test(expected = IOException.class) diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CoalesceEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CoalesceEvaluatorTest.java index e0569d14b824..1d9c6a4b8b87 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CoalesceEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CoalesceEvaluatorTest.java @@ -107,6 +107,6 @@ public void manyFieldsWithSubcoalesces() throws Exception{ values.put("c", null); values.put("d", 4); result = evaluator.evaluate(new Tuple(values)); - Assert.assertEquals(1L, result); + Assert.assertEquals(1D, result); } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java index 60d93ac93b0e..4ef136ce7d69 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java @@ -158,7 +158,6 @@ public void divZeroByValue() throws Exception{ values.put("a", 0); values.put("b", 2); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(0L, result); + Assert.assertEquals(0D, result); } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/FloorEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/FloorEvaluatorTest.java index b1d83ab7b2c3..3ac678cfdc97 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/FloorEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/FloorEvaluatorTest.java @@ -50,20 +50,17 @@ public void floorOneField() throws Exception{ values.clear(); values.put("a", 1); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(1L, result); + Assert.assertEquals(1D, result); values.clear(); values.put("a", 1.1); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(1L, result); + Assert.assertEquals(1D, result); values.clear(); values.put("a", -1.1); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(-2L, result); + Assert.assertEquals(-2D, result); } @Test(expected = IOException.class) diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ModuloEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ModuloEvaluatorTest.java index d72b55f66c75..949c85a1fdbe 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ModuloEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ModuloEvaluatorTest.java @@ -51,21 +51,18 @@ public void modTwoFieldsWithValues() throws Exception{ values.put("a", 1); values.put("b", 2); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(Long.valueOf(1 % 2), result); + Assert.assertEquals(1 % 2, ((Number)result).doubleValue(), 0.0); values.clear(); values.put("a", 1.1); values.put("b", 2); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Double); Assert.assertEquals(1.1 % 2, result); values.clear(); values.put("a", 1.1); values.put("b", 2.1); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Double); Assert.assertEquals(1.1 % 2.1, result); } @@ -135,8 +132,7 @@ public void modManyFieldsWithSubmods() throws Exception{ values.put("b", 2); values.put("c", 9); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(Long.valueOf(1 % (2 % 9)), result); + Assert.assertEquals(1 % (2 % 9), ((Number)result).doubleValue(), 0.0); } @Test(expected = IOException.class) @@ -158,7 +154,6 @@ public void modZeroByValue() throws Exception{ values.put("a", 0); values.put("b", 2); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(0L, result); + Assert.assertEquals(0D, result); } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java index 1c0663d7fc11..2d6fe5a08273 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java @@ -50,8 +50,7 @@ public void multTwoFieldsWithValues() throws Exception{ values.put("a", 1); values.put("b", 2); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(2L, result); + Assert.assertEquals(2D, result); values.clear(); values.put("a", 1.1); @@ -76,8 +75,7 @@ public void multOneField() throws Exception{ values.clear(); values.put("a", 6); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(6L, result); + Assert.assertEquals(6D, result); values.clear(); values.put("a", 6.5); @@ -152,8 +150,7 @@ public void multManyFieldsWithValues() throws Exception{ values.put("c", 3); values.put("d", 4); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(24L, result); + Assert.assertEquals(24D, result); values.clear(); values.put("a", 1.1); @@ -185,7 +182,6 @@ public void multManyFieldsWithSubmults() throws Exception{ values.put("c", 3); values.put("d", 4); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(24L, result); + Assert.assertEquals(24D, result); } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/RecursiveEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/RecursiveEvaluatorTest.java index 7cc0ebc01b99..4e13acf5784b 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/RecursiveEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/RecursiveEvaluatorTest.java @@ -67,8 +67,7 @@ public void compoundTest() throws Exception{ values.put("f", 2); values.put("g", 5); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(-16L, result); + Assert.assertEquals(-16D, result); values.clear(); values.put("a", .1); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java index 684534076656..2012770cf29d 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java @@ -51,8 +51,7 @@ public void subTwoFieldsWithValues() throws Exception{ values.put("a", 1); values.put("b", 2); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(-1L, result); + Assert.assertEquals(-1D, result); values.clear(); values.put("a", 1.1); @@ -65,8 +64,7 @@ public void subTwoFieldsWithValues() throws Exception{ values.put("a", 1.1); values.put("b", 2.1); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(-1L, result); + Assert.assertEquals(-1D, result); } @Test(expected = IOException.class) @@ -140,8 +138,7 @@ public void subManyFieldsWithValues() throws Exception{ values.put("c", 3); values.put("d", 4); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(-8L, result); + Assert.assertEquals(-8D, result); values.clear(); values.put("a", 1.1); @@ -173,8 +170,7 @@ public void subManyFieldsWithSubsubs() throws Exception{ values.put("c", 3); values.put("d", 4); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(0L, result); + Assert.assertEquals(0D, result); values.clear(); values.put("a", 123456789123456789L); @@ -182,7 +178,6 @@ public void subManyFieldsWithSubsubs() throws Exception{ values.put("c", 123456789123456789L); values.put("d", 123456789123456789L); result = evaluator.evaluate(new Tuple(values)); - Assert.assertTrue(result instanceof Long); - Assert.assertEquals(0L, result); + Assert.assertEquals(0D, result); } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java index 76ce4ab01b0d..09235bcd480a 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java @@ -16,6 +16,10 @@ */ package org.apache.solr.client.solrj.request; +import static org.hamcrest.CoreMatchers.anyOf; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; + import java.io.File; import java.util.ArrayList; import java.util.Arrays; @@ -47,10 +51,6 @@ import org.junit.Test; import org.restlet.ext.servlet.ServerServlet; -import static org.hamcrest.CoreMatchers.anyOf; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; - /** * Test the functionality (accuracy and failure) of the methods exposed by the classes * {@link SchemaRequest} and {@link SchemaResponse}. diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SolrPingTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SolrPingTest.java index e65049b84396..388cc78d6b4f 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SolrPingTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SolrPingTest.java @@ -16,10 +16,10 @@ */ package org.apache.solr.client.solrj.request; -import junit.framework.Assert; +import java.io.File; import org.apache.commons.io.FileUtils; -import org.apache.solr.SolrJettyTestBase; +import org.apache.solr.EmbeddedSolrServerTestBase; import org.apache.solr.client.solrj.response.SolrPingResponse; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; @@ -27,12 +27,12 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.io.File; +import junit.framework.Assert; /** * Test SolrPing in Solrj */ -public class SolrPingTest extends SolrJettyTestBase { +public class SolrPingTest extends EmbeddedSolrServerTestBase { @BeforeClass public static void beforeClass() throws Exception { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java index 0cf59b71001d..44247a700310 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java @@ -82,7 +82,7 @@ protected File getSolrXml() throws Exception { */ protected SolrClient getSolrAdmin() { - return new EmbeddedSolrServer(cores, "core0"); + return new EmbeddedSolrServer(cores, null); } @Test diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingEmbeddedTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingEmbeddedTest.java new file mode 100644 index 000000000000..606debb56d48 --- /dev/null +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingEmbeddedTest.java @@ -0,0 +1,592 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.client.solrj.request.json; + +import java.io.File; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.util.List; +import java.util.Properties; + +import org.apache.commons.io.FileUtils; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.solr.EmbeddedSolrServerTestBase; +import org.apache.solr.SolrTestCaseJ4.SuppressSSL; +import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; +import org.apache.solr.client.solrj.request.AbstractUpdateRequest; +import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.client.solrj.response.UpdateResponse; +import org.apache.solr.client.solrj.response.json.BucketJsonFacet; +import org.apache.solr.client.solrj.response.json.NestableJsonFacet; +import org.apache.solr.common.SolrDocumentList; +import org.apache.solr.util.ExternalPaths; +import org.junit.BeforeClass; +import org.junit.Test; + +@SuppressSSL +public class DirectJsonQueryRequestFacetingEmbeddedTest extends EmbeddedSolrServerTestBase { + + private static final String COLLECTION_NAME = "techproducts"; + private static final int NUM_TECHPRODUCTS_DOCS = 32; + private static final int NUM_IN_STOCK = 17; + private static final int NUM_ELECTRONICS = 12; + private static final int NUM_CURRENCY = 4; + private static final int NUM_MEMORY = 3; + private static final int NUM_CORSAIR = 3; + private static final int NUM_BELKIN = 2; + private static final int NUM_CANON = 2; + + @BeforeClass + public static void beforeClass() throws Exception { + final String sourceHome = ExternalPaths.SOURCE_HOME; + + final File tempSolrHome = LuceneTestCase.createTempDir().toFile(); + FileUtils.copyFileToDirectory(new File(sourceHome, "server/solr/solr.xml"), tempSolrHome); + final File collectionDir = new File(tempSolrHome, COLLECTION_NAME); + FileUtils.forceMkdir(collectionDir); + final File configSetDir = new File(sourceHome, "server/solr/configsets/sample_techproducts_configs/conf"); + FileUtils.copyDirectoryToDirectory(configSetDir, collectionDir); + + final Properties props = new Properties(); + props.setProperty("name", COLLECTION_NAME); + + try (Writer writer = new OutputStreamWriter(FileUtils.openOutputStream(new File(collectionDir, "core.properties")), + "UTF-8");) { + props.store(writer, null); + } + + final String config = tempSolrHome.getAbsolutePath() + "/" + COLLECTION_NAME + "/conf/solrconfig.xml"; + final String schema = tempSolrHome.getAbsolutePath() + "/" + COLLECTION_NAME + "/conf/managed-schema"; + initCore(config, schema, tempSolrHome.getAbsolutePath(), COLLECTION_NAME); + + client = new EmbeddedSolrServer(h.getCoreContainer(), COLLECTION_NAME) { + @Override + public void close() { + // do not close core container + } + }; + + ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update"); + up.setParam("collection", COLLECTION_NAME); + up.addFile(getFile("solrj/techproducts.xml"), "application/xml"); + up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); + UpdateResponse updateResponse = up.process(client); + assertEquals(0, updateResponse.getStatus()); + } + + @Test + public void testSingleTermsFacet() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'facet': {", + " 'top_cats': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'limit': 3", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount()); + assertHasFacetWithBucketValues(topLevelFacetData, "top_cats", + new FacetBucket("electronics", NUM_ELECTRONICS), + new FacetBucket("currency", NUM_CURRENCY), + new FacetBucket("memory", NUM_MEMORY)); + } + + @Test + public void testMultiTermsFacet() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'facet': {", + " 'top_cats': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'limit': 3", + " },", + " 'top_manufacturers': {", + " 'type': 'terms',", + " 'field': 'manu_id_s',", + " 'limit': 3", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount()); + assertHasFacetWithBucketValues(topLevelFacetData, "top_cats", + new FacetBucket("electronics", NUM_ELECTRONICS), + new FacetBucket("currency", NUM_CURRENCY), + new FacetBucket("memory", NUM_MEMORY)); + assertHasFacetWithBucketValues(topLevelFacetData, "top_manufacturers", + new FacetBucket("corsair", NUM_CORSAIR), + new FacetBucket("belkin", NUM_BELKIN), + new FacetBucket("canon", NUM_CANON)); + } + + @Test + public void testSingleRangeFacet() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'facet': {", + " 'prices': {", + " 'type': 'range',", + " 'field': 'price',", + " 'start': 0,", + " 'end': 100,", + " 'gap': 20", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount()); + assertHasFacetWithBucketValues(topLevelFacetData, "prices", + new FacetBucket(0.0f, 5), + new FacetBucket(20.0f, 0), + new FacetBucket(40.0f, 0), + new FacetBucket(60.0f, 1), + new FacetBucket(80.0f, 1)); + } + + @Test + public void testMultiRangeFacet() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'facet': {", + " 'prices': {", + " 'type': 'range',", + " 'field': 'price',", + " 'start': 0,", + " 'end': 100,", + " 'gap': 20", + " },", + " 'shipping_weights': {", + " 'type': 'range',", + " 'field': 'weight',", + " 'start': 0,", + " 'end': 200,", + " 'gap': 50", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount()); + assertHasFacetWithBucketValues(topLevelFacetData, "prices", + new FacetBucket(0.0f, 5), + new FacetBucket(20.0f, 0), + new FacetBucket(40.0f, 0), + new FacetBucket(60.0f, 1), + new FacetBucket(80.0f, 1)); + assertHasFacetWithBucketValues(topLevelFacetData, "shipping_weights", + new FacetBucket(0.0f, 6), + new FacetBucket(50.0f, 0), + new FacetBucket(100.0f, 0), + new FacetBucket(150.0f, 1)); + } + + @Test + public void testSingleStatFacet() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'facet': {", + " 'sum_price': 'sum(price)'", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertHasStatFacetWithValue(topLevelFacetData, "sum_price", 5251.270030975342); + } + + @Test + public void testMultiStatFacet() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'facet': {", + " 'sum_price': 'sum(price)',", + " 'avg_price': 'avg(price)'", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertHasStatFacetWithValue(topLevelFacetData, "sum_price", 5251.270030975342); + assertHasStatFacetWithValue(topLevelFacetData, "avg_price", 328.20437693595886); + } + + @Test + public void testMultiFacetsMixedTypes() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'facet': {", + " 'avg_price': 'avg(price)',", + " 'top_cats': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'limit': 3", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertHasStatFacetWithValue(topLevelFacetData, "avg_price", 328.20437693595886); + assertHasFacetWithBucketValues(topLevelFacetData, "top_cats", + new FacetBucket("electronics", NUM_ELECTRONICS), + new FacetBucket("currency", NUM_CURRENCY), + new FacetBucket("memory", NUM_MEMORY)); + } + + @Test + public void testNestedTermsFacet() throws Exception { + final String subfacetName = "top_manufacturers_for_cat"; + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'facet': {", + " 'top_cats': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'limit': 3", + " 'facet': {", + " 'top_manufacturers_for_cat': {", + " 'type': 'terms',", + " 'field': 'manu_id_s',", + " 'limit': 1", + " }", + " }", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + // Test top level facets + assertHasFacetWithBucketValues(topLevelFacetData, "top_cats", + new FacetBucket("electronics", NUM_ELECTRONICS), + new FacetBucket("currency", NUM_CURRENCY), + new FacetBucket("memory", NUM_MEMORY)); + // Test subfacet values for each top-level facet bucket + final List catBuckets = topLevelFacetData.getBucketBasedFacets("top_cats").getBuckets(); + assertHasFacetWithBucketValues(catBuckets.get(0), subfacetName, new FacetBucket("corsair", 3)); + assertHasFacetWithBucketValues(catBuckets.get(1), subfacetName, new FacetBucket("boa", 1)); + assertHasFacetWithBucketValues(catBuckets.get(2), subfacetName, new FacetBucket("corsair", 3)); + } + + @Test + public void testNestedFacetsOfMixedTypes() throws Exception { + final String subfacetName = "avg_price_for_cat"; + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'facet': {", + " 'top_cats': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'limit': 3", + " 'facet': {", + " 'avg_price_for_cat': 'avg(price)'", + " }", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + // Test top level facets + assertHasFacetWithBucketValues(topLevelFacetData, "top_cats", + new FacetBucket("electronics", NUM_ELECTRONICS), + new FacetBucket("currency", NUM_CURRENCY), + new FacetBucket("memory", NUM_MEMORY)); + // Test subfacet values for each top-level facet bucket + final List catBuckets = topLevelFacetData.getBucketBasedFacets("top_cats").getBuckets(); + assertHasStatFacetWithValue(catBuckets.get(0), subfacetName, 252.02909261530095); // electronics + assertHasStatFacetWithValue(catBuckets.get(1), subfacetName, 0.0); // currency + assertHasStatFacetWithValue(catBuckets.get(2), subfacetName, 129.99499893188477); // memory + } + + @Test + public void testFacetWithDomainFilteredBySimpleQueryString() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'facet': {", + " 'top_popular_cats': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'limit': 3", + " 'domain': {", + " 'filter': 'popularity:[5 TO 10]'", + " }", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertHasFacetWithBucketValues(topLevelFacetData, "top_popular_cats", + new FacetBucket("electronics", 9), + new FacetBucket("graphics card", 2), + new FacetBucket("hard drive", 2)); + } + + @Test + public void testFacetWithDomainFilteredByLocalParamsQueryString() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'facet': {", + " 'top_popular_cats': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'limit': 3", + " 'domain': {", + " 'filter': '{!lucene df=\"popularity\" v=\"[5 TO 10]\"}'", + " }", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertHasFacetWithBucketValues(topLevelFacetData, "top_popular_cats", + new FacetBucket("electronics", 9), + new FacetBucket("graphics card", 2), + new FacetBucket("hard drive", 2)); + } + + @Test + public void testFacetWithArbitraryDomainFromQueryString() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': 'cat:electronics',", + " 'facet': {", + " 'top_cats': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'limit': 3", + " 'domain': {", + " 'query': '*:*'", + " }", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertHasFacetWithBucketValues(topLevelFacetData, "top_cats", + new FacetBucket("electronics", NUM_ELECTRONICS), + new FacetBucket("currency", NUM_CURRENCY), + new FacetBucket("memory", NUM_MEMORY)); + } + + @Test + public void testFacetWithArbitraryDomainFromLocalParamsQuery() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': 'cat:electronics',", + " 'facet': {", + " 'largest_search_cats': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'domain': {", + " 'query': '{!lucene df=\"cat\" v=\"search\"}'", + " }", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertHasFacetWithBucketValues(topLevelFacetData, "largest_search_cats", + new FacetBucket("search", 2), + new FacetBucket("software", 2)); + } + + @Test + public void testFacetWithMultipleSimpleQueryClausesInArbitraryDomain() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': 'cat:electronics',", + " 'facet': {", + " 'cats_matching_solr': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'domain': {", + " 'query': ['cat:search', 'name:Solr']", + " }", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertHasFacetWithBucketValues(topLevelFacetData, "cats_matching_solr", + new FacetBucket("search", 1), + new FacetBucket("software", 1)); + } + + @Test + public void testFacetWithMultipleLocalParamsQueryClausesInArbitraryDomain() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': 'cat:electronics',", + " 'facet': {", + " 'cats_matching_solr': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'domain': {", + " 'query': ['{!lucene df=\"cat\" v=\"search\"}', '{!lucene df=\"name\" v=\"Solr\"}']", + " }", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertHasFacetWithBucketValues(topLevelFacetData, "cats_matching_solr", + new FacetBucket("search", 1), + new FacetBucket("software", 1)); + } + + @Test + public void testFacetWithDomainWidenedUsingExcludeTagsToIgnoreFilters() throws Exception { + final String jsonBody = String.join("\n", "{", + " 'query': '*:*',", + " 'filter': {'#on_shelf': 'inStock:true'},", + " 'facet': {", + " 'in_stock_only': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'limit': 2", + " }", + " 'all': {", + " 'type': 'terms',", + " 'field': 'cat',", + " 'limit': 2,", + " 'domain': {", + " 'excludeTags': 'on_shelf'", + " }", + " }", + " }", + "}"); + final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); + + QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME); + + assertExpectedDocumentsFoundAndReturned(response, NUM_IN_STOCK, 10); + final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); + assertHasFacetWithBucketValues(topLevelFacetData, "in_stock_only", + new FacetBucket("electronics", 8), + new FacetBucket("currency", 4)); + assertHasFacetWithBucketValues(topLevelFacetData, "all", + new FacetBucket("electronics", 12), + new FacetBucket("currency", 4)); + } + + private class FacetBucket { + private final Object val; + private final int count; + + FacetBucket(Object val, int count) { + this.val = val; + this.count = count; + } + + public Object getVal() { + return val; + } + + public int getCount() { + return count; + } + } + + private void assertHasFacetWithBucketValues(NestableJsonFacet response, String expectedFacetName, + FacetBucket... expectedBuckets) { + assertTrue("Expected response to have facet with name " + expectedFacetName, + response.getBucketBasedFacets(expectedFacetName) != null); + final List buckets = response.getBucketBasedFacets(expectedFacetName).getBuckets(); + assertEquals(expectedBuckets.length, buckets.size()); + for (int i = 0; i < expectedBuckets.length; i++) { + final FacetBucket expectedBucket = expectedBuckets[i]; + final BucketJsonFacet actualBucket = buckets.get(i); + assertEquals(expectedBucket.getVal(), actualBucket.getVal()); + assertEquals(expectedBucket.getCount(), actualBucket.getCount()); + } + } + + private void assertHasStatFacetWithValue(NestableJsonFacet response, String expectedFacetName, + Double expectedStatValue) { + assertTrue("Expected response to have stat facet named '" + expectedFacetName + "'", + response.getStatValue(expectedFacetName) != null); + assertEquals(expectedStatValue, response.getStatValue(expectedFacetName)); + } + + private void assertExpectedDocumentsFoundAndReturned(QueryResponse response, int expectedNumFound, + int expectedReturned) { + assertEquals(0, response.getStatus()); + final SolrDocumentList documents = response.getResults(); + assertEquals(expectedNumFound, documents.getNumFound()); + assertEquals(expectedReturned, documents.size()); + } +} diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingIntegrationTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingIntegrationTest.java index d515368d896d..48f13c2135e7 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingIntegrationTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingIntegrationTest.java @@ -545,8 +545,8 @@ private void assertHasFacetWithBucketValues(NestableJsonFacet response, String e private void assertHasStatFacetWithValue(NestableJsonFacet response, String expectedFacetName, Double expectedStatValue) { assertTrue("Expected response to have stat facet named '" + expectedFacetName + "'", - response.getStatFacetValue(expectedFacetName) != null); - assertEquals(expectedStatValue, response.getStatFacetValue(expectedFacetName)); + response.getStatValue(expectedFacetName) != null); + assertEquals(expectedStatValue, response.getStatValue(expectedFacetName)); } private void assertExpectedDocumentsFoundAndReturned(QueryResponse response, int expectedNumFound, int expectedReturned) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/JsonQueryRequestFacetingIntegrationTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/JsonQueryRequestFacetingIntegrationTest.java index 757a0eb16708..f4406c17345f 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/JsonQueryRequestFacetingIntegrationTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/JsonQueryRequestFacetingIntegrationTest.java @@ -571,8 +571,8 @@ private void assertHasFacetWithBucketValues(NestableJsonFacet response, String e private void assertHasStatFacetWithValue(NestableJsonFacet response, String expectedFacetName, Double expectedStatValue) { assertTrue("Expected response to have stat facet named '" + expectedFacetName + "'", - response.getStatFacetValue(expectedFacetName) != null); - assertEquals(expectedStatValue, response.getStatFacetValue(expectedFacetName)); + response.getStatValue(expectedFacetName) != null); + assertEquals(expectedStatValue, response.getStatValue(expectedFacetName)); } private void assertExpectedDocumentsFoundAndReturned(QueryResponse response, int expectedNumFound, int expectedReturned) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/TermsFacetMapTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/TermsFacetMapTest.java index 0d06e590e6ff..7028326918c1 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/TermsFacetMapTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/TermsFacetMapTest.java @@ -54,15 +54,6 @@ public void testStoresBucketOffsetWithCorrectKey() { } - @Test - public void testRejectsNegativeBucketLimit() { - final Throwable thrown = expectThrows(IllegalArgumentException.class, () -> { - final TermsFacetMap termsFacet = new TermsFacetMap(ANY_FIELD_NAME) - .setLimit(-1); - }); - assertThat(thrown.getMessage(), containsString("must be non-negative")); - } - @Test public void testStoresBucketLimitWithCorrectKey() { final TermsFacetMap termsFacet = new TermsFacetMap(ANY_FIELD_NAME) @@ -129,9 +120,9 @@ public void testStoresOverRefineBucketsWithCorrectKey() { public void testRejectInvalidMinCount() { final Throwable thrown = expectThrows(IllegalArgumentException.class, () -> { final TermsFacetMap termsFacet = new TermsFacetMap(ANY_FIELD_NAME) - .setMinCount(0); + .setMinCount(-1); }); - assertThat(thrown.getMessage(), containsString("must be a positive integer")); + assertThat(thrown.getMessage(), containsString("must be a non-negative integer")); } @Test @@ -139,6 +130,8 @@ public void testStoresMinCountWithCorrectKey() { final TermsFacetMap termsFacet = new TermsFacetMap(ANY_FIELD_NAME) .setMinCount(6); assertEquals(6, termsFacet.get("mincount")); + termsFacet.setMinCount(0); + assertEquals(0, termsFacet.get("mincount")); } @Test diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/NestableJsonFacetTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/NestableJsonFacetTest.java new file mode 100644 index 000000000000..3b40726d802c --- /dev/null +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/NestableJsonFacetTest.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.client.solrj.response; + + +import java.util.Collections; + +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.client.solrj.response.json.NestableJsonFacet; +import org.apache.solr.common.util.NamedList; +import org.junit.Test; + +public class NestableJsonFacetTest extends SolrTestCaseJ4 { + + @Test + public void testParsing() { + NamedList list = new NamedList<>(); + list.add("count", 12); + NamedList buckets = new NamedList() {{ + add("val", "Nike"); + }}; + NamedList vals = new NamedList() {{ + add("numBuckets", 10); + add("allBuckets", new NamedList(){{ + add("count", 12); + }}); + add("before", new NamedList(){{ + add("count", 1); + }}); + add("after", new NamedList(){{ + add("count", 2); + }}); + add("between", new NamedList(){{ + add("count", 9); + }}); + }}; + vals.add("buckets", Collections.singletonList(buckets)); + list.add("test", vals); + NestableJsonFacet facet = new NestableJsonFacet(list); + + assertEquals(12L, facet.getCount()); + assertEquals(9L, facet.getBucketBasedFacets("test").getBetween()); + list.clear(); + + list.add("count", 12L); + buckets = new NamedList() {{ + add("val", "Nike"); + }}; + vals = new NamedList() {{ + add("numBuckets", 10L); + add("allBuckets", new NamedList(){{ + add("count", 12L); + }}); + add("before", new NamedList(){{ + add("count", 1L); + }}); + add("after", new NamedList(){{ + add("count", 2L); + }}); + add("between", new NamedList(){{ + add("count", 9L); + }}); + }}; + vals.add("buckets", Collections.singletonList(buckets)); + list.add("test", vals); + facet = new NestableJsonFacet(list); + assertEquals(12L, facet.getCount()); + assertEquals(2L, facet.getBucketBasedFacets("test").getAfter()); + } +} diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TermsResponseTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TermsResponseTest.java index 681588949a31..57d6a73df2b0 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TermsResponseTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TermsResponseTest.java @@ -15,31 +15,33 @@ * limitations under the License. */ package org.apache.solr.client.solrj.response; + import java.util.List; -import junit.framework.Assert; -import org.apache.solr.SolrJettyTestBase; +import org.apache.solr.EmbeddedSolrServerTestBase; import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.common.SolrInputDocument; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.response.TermsResponse.Term; +import org.apache.solr.common.SolrInputDocument; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; +import junit.framework.Assert; + /** * Test for TermComponent's response in Solrj */ -public class TermsResponseTest extends SolrJettyTestBase { - +public class TermsResponseTest extends EmbeddedSolrServerTestBase { + @BeforeClass - public static void beforeTest() throws Exception { + public static void beforeClass() throws Exception { initCore(); } - + @Before @Override - public void setUp() throws Exception{ + public void setUp() throws Exception { super.setUp(); clearIndex(); assertU(commit()); @@ -62,7 +64,7 @@ public void testTermsResponse() throws Exception { query.setTermsPrefix("s"); query.addTermsField("terms_s"); query.setTermsMinCount(1); - + QueryRequest request = new QueryRequest(query); List terms = request.process(getSolrClient()).getTermsResponse().getTerms("terms_s"); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSpellCheckResponse.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSpellCheckResponse.java index 443091b58499..8ffdefef34eb 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSpellCheckResponse.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSpellCheckResponse.java @@ -15,8 +15,10 @@ * limitations under the License. */ package org.apache.solr.client.solrj.response; -import junit.framework.Assert; -import org.apache.solr.SolrJettyTestBase; + +import java.util.List; + +import org.apache.solr.EmbeddedSolrServerTestBase; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.response.SpellCheckResponse.Collation; @@ -27,7 +29,7 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.List; +import junit.framework.Assert; /** * Test for SpellCheckComponent's response in Solrj @@ -35,12 +37,13 @@ * * @since solr 1.3 */ -public class TestSpellCheckResponse extends SolrJettyTestBase { +public class TestSpellCheckResponse extends EmbeddedSolrServerTestBase { + @BeforeClass - public static void beforeTest() throws Exception { + public static void beforeClass() throws Exception { initCore(); } - + static String field = "name"; @Test @@ -101,7 +104,7 @@ public void testSpellCheckResponse_Extended() throws Exception { // Hmmm... the API for SpellCheckResponse could be nicer: response.getSuggestions().get(0).getAlternatives().get(0); } - + @Test public void testSpellCheckCollationResponse() throws Exception { getSolrClient(); @@ -128,7 +131,7 @@ public void testSpellCheckCollationResponse() throws Exception { doc.setField("name", "fat of homer"); client.add(doc); client.commit(true, true); - + //Test Backwards Compatibility SolrQuery query = new SolrQuery("name:(+fauth +home +loane)"); query.set(CommonParams.QT, "/spell"); @@ -139,15 +142,15 @@ public void testSpellCheckCollationResponse() throws Exception { SpellCheckResponse response = request.process(client).getSpellCheckResponse(); response = request.process(client).getSpellCheckResponse(); assertTrue("name:(+faith +hope +loaves)".equals(response.getCollatedResult())); - + //Test Expanded Collation Results query.set(SpellingParams.SPELLCHECK_COLLATE_EXTENDED_RESULTS, true); query.set(SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, 10); - query.set(SpellingParams.SPELLCHECK_MAX_COLLATIONS, 2); + query.set(SpellingParams.SPELLCHECK_MAX_COLLATIONS, 2); request = new QueryRequest(query); response = request.process(client).getSpellCheckResponse(); assertTrue("name:(+faith +hope +love)".equals(response.getCollatedResult()) || "name:(+faith +hope +loaves)".equals(response.getCollatedResult())); - + List collations = response.getCollatedResults(); assertEquals(2, collations.size()); for(Collation collation : collations) @@ -174,7 +177,7 @@ public void testSpellCheckCollationResponse() throws Exception { } } } - + query.set(SpellingParams.SPELLCHECK_COLLATE_EXTENDED_RESULTS, false); response = request.process(client).getSpellCheckResponse(); { @@ -182,12 +185,12 @@ public void testSpellCheckCollationResponse() throws Exception { assertEquals(2, collations.size()); String collation1 = collations.get(0).getCollationQueryString(); String collation2 = collations.get(1).getCollationQueryString(); - assertFalse(collation1 + " equals " + collation2, + assertFalse(collation1 + " equals " + collation2, collation1.equals(collation2)); for(Collation collation : collations) { assertTrue("name:(+faith +hope +love)".equals(collation.getCollationQueryString()) || "name:(+faith +hope +loaves)".equals(collation.getCollationQueryString())); - } + } } - + } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSuggesterResponse.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSuggesterResponse.java index 0b3cf2ce1bd4..5eb28ec58666 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSuggesterResponse.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSuggesterResponse.java @@ -15,11 +15,12 @@ * limitations under the License. */ package org.apache.solr.client.solrj.response; + import java.io.IOException; import java.util.List; import java.util.Map; -import org.apache.solr.SolrJettyTestBase; +import org.apache.solr.EmbeddedSolrServerTestBase; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.QueryRequest; @@ -32,9 +33,10 @@ * Test for SuggesterComponent's response in Solrj * */ -public class TestSuggesterResponse extends SolrJettyTestBase { +public class TestSuggesterResponse extends EmbeddedSolrServerTestBase { + @BeforeClass - public static void beforeTest() throws Exception { + public static void beforeClass() throws Exception { initCore(); } diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java b/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java index 147535af031c..52a661ff6341 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java @@ -55,6 +55,19 @@ public void testPathTrie() { pathTrie.lookup("/aa",templateValues, subPaths); assertEquals(3, subPaths.size()); + pathTrie = new PathTrie<>(ImmutableSet.of("_introspect")); + pathTrie.insert("/aa/bb/{cc}/tt/*", emptyMap(), "W"); + templateValues.clear(); + assertEquals("W" ,pathTrie.lookup("/aa/bb/somepart/tt/hello", templateValues)); + assertEquals(templateValues.get("*"), "/hello"); + + templateValues.clear(); + assertEquals("W" ,pathTrie.lookup("/aa/bb/somepart/tt", templateValues)); + assertEquals(templateValues.get("*"), null); + + templateValues.clear(); + assertEquals("W" ,pathTrie.lookup("/aa/bb/somepart/tt/hello/world/from/solr", templateValues)); + assertEquals(templateValues.get("*"), "/hello/world/from/solr"); } } diff --git a/solr/test-framework/src/java/org/apache/solr/EmbeddedSolrServerTestBase.java b/solr/test-framework/src/java/org/apache/solr/EmbeddedSolrServerTestBase.java new file mode 100644 index 000000000000..8df8dea8ebf6 --- /dev/null +++ b/solr/test-framework/src/java/org/apache/solr/EmbeddedSolrServerTestBase.java @@ -0,0 +1,160 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr; + +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Properties; + +import org.apache.commons.io.FileUtils; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; +import org.apache.solr.common.util.ContentStream; +import org.apache.solr.common.util.ContentStreamBase; +import org.apache.solr.common.util.ContentStreamBase.ByteArrayStream; +import org.apache.solr.util.ExternalPaths; +import org.junit.After; +import org.junit.AfterClass; + +import com.google.common.io.ByteStreams; + +abstract public class EmbeddedSolrServerTestBase extends SolrTestCaseJ4 { + + protected static final String DEFAULT_CORE_NAME = "collection1"; + + public static EmbeddedSolrServer client = null; + + @After + public synchronized void afterClass() throws Exception { + if (client != null) client.close(); + client = null; + } + + @AfterClass + public static void afterEmbeddedSolrServerTestBase() throws Exception { + + } + + public synchronized EmbeddedSolrServer getSolrClient() { + if (client == null) { + client = createNewSolrClient(); + } + return client; + } + + /** + * Create a new solr client. Subclasses should override for other options. + */ + public EmbeddedSolrServer createNewSolrClient() { + return new EmbeddedSolrServer(h.getCoreContainer(), DEFAULT_CORE_NAME) { + @Override + public void close() { + // do not close core container + } + }; + } + + public void upload(final String collection, final ContentStream... contents) { + final Path base = Paths.get(getSolrClient().getCoreContainer().getSolrHome(), collection); + writeTo(base, contents); + } + + private void writeTo(final Path base, final ContentStream... contents) { + try { + if (!Files.exists(base)) { + Files.createDirectories(base); + } + + for (final ContentStream content : contents) { + final File file = new File(base.toFile(), content.getName()); + file.getParentFile().mkdirs(); + + try (OutputStream os = new FileOutputStream(file)) { + ByteStreams.copy(content.getStream(), os); + } + } + } catch (final IOException e) { + throw new RuntimeException(e); + } + } + + public Collection download(final String collection, final String... names) { + final Path base = Paths.get(getSolrClient().getCoreContainer().getSolrHome(), collection); + final List result = new ArrayList<>(); + + if (Files.exists(base)) { + for (final String name : names) { + final File file = new File(base.toFile(), name); + if (file.exists() && file.canRead()) { + try { + final ByteArrayOutputStream os = new ByteArrayOutputStream(); + ByteStreams.copy(new FileInputStream(file), os); + final ByteArrayStream stream = new ContentStreamBase.ByteArrayStream(os.toByteArray(), name); + result.add(stream); + } catch (final IOException e) { + throw new RuntimeException(e); + } + } + } + } + + return result; + } + + public static void initCore() throws Exception { + final String home = legacyExampleCollection1SolrHome(); + final String config = home + "/" + DEFAULT_CORE_NAME + "/conf/solrconfig.xml"; + final String schema = home + "/" + DEFAULT_CORE_NAME + "/conf/schema.xml"; + initCore(config, schema, home); + } + + public static String legacyExampleCollection1SolrHome() throws IOException { + final String sourceHome = ExternalPaths.SOURCE_HOME; + if (sourceHome == null) + throw new IllegalStateException("No source home! Cannot create the legacy example solr home directory."); + + final File tempSolrHome = LuceneTestCase.createTempDir().toFile(); + FileUtils.copyFileToDirectory(new File(sourceHome, "server/solr/solr.xml"), tempSolrHome); + final File collectionDir = new File(tempSolrHome, DEFAULT_CORE_NAME); + FileUtils.forceMkdir(collectionDir); + final File configSetDir = new File(sourceHome, "server/solr/configsets/sample_techproducts_configs/conf"); + FileUtils.copyDirectoryToDirectory(configSetDir, collectionDir); + + final Properties props = new Properties(); + props.setProperty("name", DEFAULT_CORE_NAME); + + try (Writer writer = new OutputStreamWriter(FileUtils.openOutputStream(new File(collectionDir, "core.properties")), + "UTF-8");) { + props.store(writer, null); + } + + return tempSolrHome.getAbsolutePath(); + } + +} diff --git a/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java b/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java index db415a20e298..6dcccb4c099e 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java @@ -16,10 +16,16 @@ */ package org.apache.solr; +import java.io.File; +import java.io.OutputStreamWriter; +import java.lang.invoke.MethodHandles; +import java.nio.file.Path; +import java.util.Properties; +import java.util.SortedMap; + import org.apache.commons.io.FileUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; import org.apache.solr.client.solrj.embedded.JettyConfig; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.HttpSolrClient; @@ -31,16 +37,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.io.OutputStreamWriter; -import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.util.Properties; -import java.util.SortedMap; - -abstract public class SolrJettyTestBase extends SolrTestCaseJ4 +abstract public class SolrJettyTestBase extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -55,8 +54,8 @@ public static void beforeSolrJettyTestBase() throws Exception { public static String context; public static JettySolrRunner createAndStartJetty(String solrHome, String configFile, String schemaFile, String context, - boolean stopAtShutdown, SortedMap extraServlets) - throws Exception { + boolean stopAtShutdown, SortedMap extraServlets) + throws Exception { // creates the data dir context = context==null ? "/solr" : context; @@ -132,7 +131,6 @@ public static void afterSolrJettyTestBase() throws Exception { } } - public synchronized SolrClient getSolrClient() { if (client == null) { client = createNewSolrClient(); @@ -147,23 +145,13 @@ public synchronized SolrClient getSolrClient() { * Subclasses should override for other options. */ public SolrClient createNewSolrClient() { - if (jetty != null) { - try { - // setup the client... - String url = jetty.getBaseUrl().toString() + "/" + "collection1"; - HttpSolrClient client = getHttpSolrClient(url, DEFAULT_CONNECTION_TIMEOUT); - return client; - } - catch( Exception ex ) { - throw new RuntimeException( ex ); - } - } else { - return new EmbeddedSolrServer( h.getCoreContainer(), "collection1" ) { - @Override - public void close() { - // do not close core container - } - }; + try { + // setup the client... + final String url = jetty.getBaseUrl().toString() + "/" + "collection1"; + final HttpSolrClient client = getHttpSolrClient(url, DEFAULT_CONNECTION_TIMEOUT); + return client; + } catch (final Exception ex) { + throw new RuntimeException(ex); } } @@ -179,13 +167,6 @@ public static void cleanUpJettyHome(File solrHome) throws Exception { } } - public static void initCore() throws Exception { - String exampleHome = legacyExampleCollection1SolrHome(); - String exampleConfig = exampleHome+"/collection1/conf/solrconfig.xml"; - String exampleSchema = exampleHome+"/collection1/conf/schema.xml"; - initCore(exampleConfig, exampleSchema, exampleHome); - } - public static String legacyExampleCollection1SolrHome() { String sourceHome = ExternalPaths.SOURCE_HOME; if (sourceHome == null) @@ -226,5 +207,4 @@ public static String legacyExampleCollection1SolrHome() { return legacyExampleSolrHome; } - } diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java index b48f2ef2b844..958cb6b7aa6a 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java @@ -121,6 +121,7 @@ import org.apache.solr.common.util.ObjectReleaseTracker; import org.apache.solr.common.util.SolrjNamedThreadFactory; import org.apache.solr.common.util.SuppressForbidden; +import org.apache.solr.common.util.TimeSource; import org.apache.solr.common.util.Utils; import org.apache.solr.common.util.XML; import org.apache.solr.core.CoreContainer; @@ -152,6 +153,7 @@ import org.apache.solr.util.StartupLoggingUtils; import org.apache.solr.util.TestHarness; import org.apache.solr.util.TestInjection; +import org.apache.solr.util.TimeOut; import org.apache.zookeeper.KeeperException; import org.junit.After; import org.junit.AfterClass; @@ -1268,7 +1270,16 @@ public static XmlDoc doc(String... fieldsAndValues) { return d; } + /** + * Generates the correct SolrParams from an even list of strings. + * A string in an even position will represent the name of a parameter, while the following string + * at position (i+1) will be the assigned value. + * + * @param params an even list of strings + * @return the ModifiableSolrParams generated from the given list of strings. + */ public static ModifiableSolrParams params(String... params) { + if (params.length % 2 != 0) throw new RuntimeException("Params length should be even"); ModifiableSolrParams msp = new ModifiableSolrParams(); for (int i=0; i @@ -91,12 +88,9 @@ public class SolrCloudTestCase extends SolrTestCaseJ4 { private static class Config { final String name; final Path path; - final Map extraConfig; - - private Config(String name, Path path, Map extraConfig) { + private Config(String name, Path path) { this.name = name; this.path = path; - this.extraConfig = extraConfig; } } @@ -188,12 +182,7 @@ public Builder withSecurityJson(String securityJson) { * @param configPath the path to the config files */ public Builder addConfig(String configName, Path configPath) { - this.configs.add(new Config(configName, configPath, null)); - return this; - } - - public Builder addConfig(String configName, Path configPath, Map extraConfig) { - this.configs.add(new Config(configName, configPath, extraConfig)); + this.configs.add(new Config(configName, configPath)); return this; } @@ -218,8 +207,8 @@ public Builder withMetrics(boolean trackJettyMetrics) { * * @throws Exception if an error occurs on startup */ - public MiniSolrCloudCluster configure() throws Exception { - return cluster = build(); + public void configure() throws Exception { + cluster = build(); } /** @@ -233,15 +222,7 @@ public MiniSolrCloudCluster build() throws Exception { null, securityJson, trackJettyMetrics); CloudSolrClient client = cluster.getSolrClient(); for (Config config : configs) { - ((ZkClientClusterStateProvider) client.getClusterStateProvider()).uploadConfig(config.path, config.name); - if (config.extraConfig != null) { - for (Map.Entry e : config.extraConfig.entrySet()) { - ((ZkClientClusterStateProvider) client.getClusterStateProvider()).getZkStateReader().getZkClient() - .create(CONFIGS_ZKNODE + "/" + config.name + "/" + e.getKey(), e.getValue(), CreateMode.PERSISTENT, true); - - } - - } + ((ZkClientClusterStateProvider)client.getClusterStateProvider()).uploadConfig(config.path, config.name); } if (clusterProperties.size() > 0) { @@ -509,7 +490,7 @@ public static void ensureRunningJettys(int nodeCount, int timeoutSeconds) throws } cluster.waitForAllNodes(timeoutSeconds); } - + /** * Gets core container by node name. From here, core can be accessed to make updates, etc. * @param nodeName String diff --git a/solr/test-framework/src/test/org/apache/solr/util/TestSSLTestConfig.java b/solr/test-framework/src/test/org/apache/solr/util/TestSSLTestConfig.java index 4e3995469c02..ed2345977c9c 100644 --- a/solr/test-framework/src/test/org/apache/solr/util/TestSSLTestConfig.java +++ b/solr/test-framework/src/test/org/apache/solr/util/TestSSLTestConfig.java @@ -20,6 +20,8 @@ import java.util.Arrays; import java.util.List; +import org.apache.lucene.util.Constants; + import org.apache.solr.SolrTestCase; public class TestSSLTestConfig extends SolrTestCase { @@ -84,4 +86,19 @@ public void testIsOpenJdkJvmVersionKnownToHaveProblems() { } + public void testFailIfUserRunsTestsWithJVMThatHasKnownSSLBugs() { + // NOTE: If there is some future JVM version, where all available "ea" builds are known to be buggy, + // but we still want to be able to use for running tests (ie: via jenkins) to look for *other* bugs, + // then those -ea versions can be "white listed" here... + + try { + SSLTestConfig.assumeSslIsSafeToTest(); + } catch (org.junit.AssumptionViolatedException ave) { + fail("Current JVM (" + Constants.JVM_NAME + " / " + Constants.JVM_VERSION + + ") is known to have SSL Bugs. Other tests that (explicitly or via randomization) " + + " use SSL will be SKIPed"); + } + } + + }